From e7829a4de67624cc53ec6b614b2ab1f2e8d1db8d Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 11:28:37 +0530 Subject: [PATCH 01/41] refactor: pull.ts file and introduce Pull class --- bun.lock | 5 +- lib/commands/config.ts | 390 ++++++++++++++++ lib/commands/errors.ts | 93 ++++ lib/commands/pull.ts | 978 +++++++++++++++++++++++++++-------------- package.json | 3 +- 5 files changed, 1132 insertions(+), 337 deletions(-) create mode 100644 lib/commands/config.ts create mode 100644 lib/commands/errors.ts diff --git a/bun.lock b/bun.lock index 57bb97a5..82d5a534 100644 --- a/bun.lock +++ b/bun.lock @@ -5,7 +5,6 @@ "name": "appwrite-cli", "dependencies": { "@appwrite.io/console": "^2.1.0", - "@types/bun": "^1.3.5", "chalk": "4.1.2", "chokidar": "^3.6.0", "cli-progress": "^3.12.0", @@ -21,8 +20,10 @@ "tail": "^2.2.6", "tar": "^6.1.11", "undici": "^5.28.2", + "zod": "^4.3.5", }, "devDependencies": { + "@types/bun": "^1.3.5", "@types/cli-progress": "^3.11.5", "@types/inquirer": "^8.2.10", "@types/json-bigint": "^1.0.4", @@ -531,6 +532,8 @@ "yargs-parser": ["yargs-parser@20.2.9", "", {}, "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w=="], + "zod": ["zod@4.3.5", "", {}, "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g=="], + "@isaacs/fs-minipass/minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], "@yao-pkg/pkg/tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="], diff --git a/lib/commands/config.ts b/lib/commands/config.ts new file mode 100644 index 00000000..03e0cf9c --- /dev/null +++ b/lib/commands/config.ts @@ -0,0 +1,390 @@ +import { z } from "zod"; + +const INT64_MIN = BigInt("-9223372036854775808"); +const INT64_MAX = BigInt("9223372036854775807"); + +const createSettingsObject = (settings: any) => { + return { + services: { + account: settings.serviceStatusForAccount, + avatars: settings.serviceStatusForAvatars, + databases: settings.serviceStatusForDatabases, + locale: settings.serviceStatusForLocale, + health: settings.serviceStatusForHealth, + storage: settings.serviceStatusForStorage, + teams: settings.serviceStatusForTeams, + users: settings.serviceStatusForUsers, + sites: settings.serviceStatusForSites, + functions: settings.serviceStatusForFunctions, + graphql: settings.serviceStatusForGraphql, + messaging: settings.serviceStatusForMessaging, + }, + auth: { + methods: { + jwt: settings.authJWT, + phone: settings.authPhone, + invites: settings.authInvites, + anonymous: settings.authAnonymous, + "email-otp": settings.authEmailOtp, + "magic-url": settings.authUsersAuthMagicURL, + "email-password": settings.authEmailPassword, + }, + security: { + duration: settings.authDuration, + limit: settings.authLimit, + sessionsLimit: settings.authSessionsLimit, + passwordHistory: settings.authPasswordHistory, + passwordDictionary: settings.authPasswordDictionary, + personalDataCheck: settings.authPersonalDataCheck, + sessionAlerts: settings.authSessionAlerts, + mockNumbers: settings.authMockNumbers, + }, + }, + }; +}; + +const SiteSchema = z + .object({ + path: z.string().optional(), + $id: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + logging: z.boolean().optional(), + timeout: z.number().optional(), + framework: z.string().optional(), + buildRuntime: z.string().optional(), + adapter: z.string().optional(), + installCommand: z.string().optional(), + buildCommand: z.string().optional(), + outputDirectory: z.string().optional(), + fallbackFile: z.string().optional(), + specification: z.string().optional(), + }) + .strict(); + +const FunctionSchema = z + .object({ + path: z.string().optional(), + $id: z.string(), + execute: z.array(z.string()).optional(), + name: z.string(), + enabled: z.boolean().optional(), + logging: z.boolean().optional(), + runtime: z.string().optional(), + specification: z.string().optional(), + scopes: z.array(z.string()).optional(), + events: z.array(z.string()).optional(), + schedule: z.string().optional(), + timeout: z.number().optional(), + entrypoint: z.string().optional(), + commands: z.string().optional(), + }) + .strict(); + +const int64Schema = z.preprocess( + (val) => { + if (typeof val === "bigint") { + return val; + } + + if (typeof val === "object" && val !== null) { + if (typeof val.valueOf === "function") { + try { + const valueOfResult = val.valueOf(); + const bigIntVal = BigInt(valueOfResult as string | number | bigint); + return bigIntVal; + } catch (e) { + return undefined; + } + } + + const num = Number(val); + return !isNaN(num) ? BigInt(Math.trunc(num)) : undefined; + } + + if (typeof val === "string") { + try { + return BigInt(val); + } catch (e) { + return undefined; + } + } + + if (typeof val === "number") { + return BigInt(Math.trunc(val)); + } + + return val; + }, + z + .bigint() + .nullable() + .optional() + .superRefine((val, ctx) => { + if (val === undefined || val === null) return; + + if (val < INT64_MIN || val > INT64_MAX) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `must be between ${INT64_MIN} and ${INT64_MAX} (64-bit signed integer range)`, + }); + } + }), +); + +const AttributeSchemaBase = z + .object({ + key: z.string(), + type: z.enum([ + "string", + "integer", + "double", + "boolean", + "datetime", + "relationship", + "linestring", + "point", + "polygon", + ]), + required: z.boolean().optional(), + array: z.boolean().optional(), + size: z.number().optional(), + default: z.any().optional(), + min: int64Schema, + max: int64Schema, + format: z + .union([ + z.enum(["email", "enum", "url", "ip", "datetime"]), + z.literal(""), + ]) + .optional(), + elements: z.array(z.string()).optional(), + relatedCollection: z.string().optional(), + relationType: z.string().optional(), + twoWay: z.boolean().optional(), + twoWayKey: z.string().optional(), + onDelete: z.string().optional(), + side: z.string().optional(), + attributes: z.array(z.string()).optional(), + orders: z.array(z.string()).optional(), + encrypt: z.boolean().optional(), + }) + .strict(); + +const AttributeSchema = AttributeSchemaBase.refine( + (data) => { + if (data.required === true && data.default !== null) { + return false; + } + return true; + }, + { + message: "When 'required' is true, 'default' must be null", + path: ["default"], + }, +); + +const IndexSchema = z + .object({ + key: z.string(), + type: z.string(), + status: z.string().optional(), + attributes: z.array(z.string()), + orders: z.array(z.string()).optional(), + }) + .strict(); + +const CollectionSchema = z + .object({ + $id: z.string(), + $permissions: z.array(z.string()).optional(), + databaseId: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + documentSecurity: z.boolean().default(true), + attributes: z.array(AttributeSchema).optional(), + indexes: z.array(IndexSchema).optional(), + }) + .strict() + .superRefine((data, ctx) => { + if (data.attributes && data.attributes.length > 0) { + const seenKeys = new Set(); + const duplicateKeys = new Set(); + + data.attributes.forEach((attr, index) => { + if (seenKeys.has(attr.key)) { + duplicateKeys.add(attr.key); + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Attribute with the key '${attr.key}' already exists. Attribute keys must be unique, try again with a different key.`, + path: ["attributes", index, "key"], + }); + } else { + seenKeys.add(attr.key); + } + }); + } + + if (data.indexes && data.indexes.length > 0) { + const seenKeys = new Set(); + const duplicateKeys = new Set(); + + data.indexes.forEach((index, indexPos) => { + if (seenKeys.has(index.key)) { + duplicateKeys.add(index.key); + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Index with the key '${index.key}' already exists. Index keys must be unique, try again with a different key.`, + path: ["indexes", indexPos, "key"], + }); + } else { + seenKeys.add(index.key); + } + }); + } + }); + +const DatabaseSchema = z + .object({ + $id: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + }) + .strict(); + +const BucketSchema = z + .object({ + $id: z.string(), + $permissions: z.array(z.string()).optional(), + fileSecurity: z.boolean().optional(), + name: z.string(), + enabled: z.boolean().optional(), + maximumFileSize: z.number().optional(), + allowedFileExtensions: z.array(z.string()).optional(), + compression: z.string().optional(), + encryption: z.boolean().optional(), + antivirus: z.boolean().optional(), + }) + .strict(); + +const TopicSchema = z + .object({ + $id: z.string(), + name: z.string(), + subscribe: z.array(z.string()).optional(), + }) + .strict(); + +const TeamSchema = z + .object({ + $id: z.string(), + name: z.string(), + }) + .strict(); + +const MessageSchema = z + .object({ + $id: z.string(), + name: z.string(), + emailTotal: z.number().optional(), + smsTotal: z.number().optional(), + pushTotal: z.number().optional(), + subscribe: z.array(z.string()).optional(), + }) + .strict(); + +const SettingsSchema = z + .object({ + services: z + .object({ + account: z.boolean().optional(), + avatars: z.boolean().optional(), + databases: z.boolean().optional(), + locale: z.boolean().optional(), + health: z.boolean().optional(), + storage: z.boolean().optional(), + teams: z.boolean().optional(), + users: z.boolean().optional(), + sites: z.boolean().optional(), + functions: z.boolean().optional(), + graphql: z.boolean().optional(), + messaging: z.boolean().optional(), + }) + .strict() + .optional(), + auth: z + .object({ + methods: z + .object({ + jwt: z.boolean().optional(), + phone: z.boolean().optional(), + invites: z.boolean().optional(), + anonymous: z.boolean().optional(), + "email-otp": z.boolean().optional(), + "magic-url": z.boolean().optional(), + "email-password": z.boolean().optional(), + }) + .strict() + .optional(), + security: z + .object({ + duration: z.number().optional(), + limit: z.number().optional(), + sessionsLimit: z.number().optional(), + passwordHistory: z.number().optional(), + passwordDictionary: z.boolean().optional(), + personalDataCheck: z.boolean().optional(), + sessionAlerts: z.boolean().optional(), + mockNumbers: z + .array( + z + .object({ + phone: z.string(), + otp: z.string(), + }) + .strict(), + ) + .optional(), + }) + .strict() + .optional(), + }) + .strict() + .optional(), + }) + .strict(); + +const configSchema = z + .object({ + projectId: z.string(), + projectName: z.string().optional(), + endpoint: z.string().optional(), + settings: SettingsSchema.optional(), + functions: z.array(FunctionSchema).optional(), + sites: z.array(SiteSchema).optional(), + databases: z.array(DatabaseSchema).optional(), + collections: z.array(CollectionSchema).optional(), + topics: z.array(TopicSchema).optional(), + teams: z.array(TeamSchema).optional(), + buckets: z.array(BucketSchema).optional(), + messages: z.array(MessageSchema).optional(), + }) + .strict(); + +export type ConfigType = z.infer; +export { + configSchema, + SiteSchema, + FunctionSchema, + CollectionSchema, + DatabaseSchema, + BucketSchema, + TopicSchema, + TeamSchema, + MessageSchema, + SettingsSchema, + AttributeSchema, + AttributeSchemaBase, + IndexSchema, + createSettingsObject, +}; diff --git a/lib/commands/errors.ts b/lib/commands/errors.ts new file mode 100644 index 00000000..44610c29 --- /dev/null +++ b/lib/commands/errors.ts @@ -0,0 +1,93 @@ +/** + * Error thrown when destructive changes are detected during push operations + * and the force flag is not enabled. + */ +export class DestructiveChangeError extends Error { + constructor( + message: string, + private metadata: { + changes: Array<{ + type: string; + resource: string; + field: string; + oldValue?: any; + newValue?: any; + }>; + affectedResources: number; + }, + ) { + super(message); + this.name = "DestructiveChangeError"; + Error.captureStackTrace(this, DestructiveChangeError); + } + + /** + * Get detailed metadata about the destructive changes + */ + public getMetadata() { + return this.metadata; + } +} + +/** + * Error thrown when configuration validation fails + */ +export class ConfigValidationError extends Error { + constructor( + message: string, + private validationErrors: Array<{ + path: string; + message: string; + }>, + ) { + super(message); + this.name = "ConfigValidationError"; + Error.captureStackTrace(this, ConfigValidationError); + } + + /** + * Get detailed validation errors + */ + public getValidationErrors() { + return this.validationErrors; + } +} + +/** + * Error thrown when a requested resource is not found + */ +export class ResourceNotFoundError extends Error { + constructor( + message: string, + public resourceType: string, + public resourceId: string, + ) { + super(message); + this.name = "ResourceNotFoundError"; + Error.captureStackTrace(this, ResourceNotFoundError); + } +} + +/** + * Error thrown when authentication fails or is missing + */ +export class AuthenticationError extends Error { + constructor(message: string) { + super(message); + this.name = "AuthenticationError"; + Error.captureStackTrace(this, AuthenticationError); + } +} + +/** + * Error thrown when project is not initialized + */ +export class ProjectNotInitializedError extends Error { + constructor( + message: string = "Project configuration not found. Project must be initialized first.", + ) { + super(message); + this.name = "ProjectNotInitializedError"; + Error.captureStackTrace(this, ProjectNotInitializedError); + } +} diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index ccf60cec..f1812d7a 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -4,19 +4,26 @@ import tar from "tar"; import { Command } from "commander"; import inquirer from "inquirer"; import { - getMessagingService, - getTeamsService, - getProjectsService, + Databases, + Functions, + Messaging, + Projects, + Sites, + Storage, + TablesDB, + Teams, + Client, +} from "@appwrite.io/console"; +import { getFunctionsService, getSitesService, getDatabasesService, getTablesDBService, - getStorageService, } from "../services.js"; +import { sdkForProject, sdkForConsole } from "../sdks.js"; import { localConfig } from "../config.js"; import { paginate } from "../paginate.js"; import { - questionsPullCollection, questionsPullFunctions, questionsPullFunctionsCode, questionsPullSites, @@ -32,9 +39,22 @@ import { actionRunner, commandDescriptions, } from "../parser.js"; - -interface PullResourcesOptions { - skipDeprecated?: boolean; +import type { ConfigType } from "./config.js"; +import { createSettingsObject } from "./config.js"; +import { ProjectNotInitializedError } from "./errors.js"; + +export interface PullOptions { + all?: boolean; + settings?: boolean; + functions?: boolean; + sites?: boolean; + collections?: boolean; + tables?: boolean; + buckets?: boolean; + teams?: boolean; + topics?: boolean; + withVariables?: boolean; + noCode?: boolean; } interface PullFunctionsOptions { @@ -47,6 +67,528 @@ interface PullSitesOptions { withVariables?: boolean; } +interface PullResourcesOptions { + skipDeprecated?: boolean; +} + +async function createPullInstance(): Promise { + const projectClient = await sdkForProject(); + const consoleClient = await sdkForConsole(); + const pullInstance = new Pull(projectClient, consoleClient); + + pullInstance.setConfigDirectoryPath(localConfig.configDirectoryPath); + return pullInstance; +} + +export class Pull { + private projectClient: Client; + private consoleClient: Client; + private configDirectoryPath: string; + + constructor(projectClient: Client, consoleClient: Client) { + this.projectClient = projectClient; + this.consoleClient = consoleClient; + this.configDirectoryPath = process.cwd(); + } + + /** + * Set the base directory path for config files and resources + */ + public setConfigDirectoryPath(path: string): void { + this.configDirectoryPath = path; + } + + /** + * Pull resources from Appwrite project and return updated config + * + * @param config - Current configuration object + * @param options - Pull options specifying which resources to pull + * @returns Updated configuration object with pulled resources + */ + public async pullResources( + config: ConfigType, + options: PullOptions = { all: true }, + ): Promise { + if (!config.projectId) { + throw new ProjectNotInitializedError(); + } + + const updatedConfig: ConfigType = { ...config }; + const shouldPullAll = options.all === true; + + if (shouldPullAll || options.settings) { + const settings = await this.pullSettings(config.projectId); + updatedConfig.settings = settings.settings; + updatedConfig.projectName = settings.projectName; + } + + if (shouldPullAll || options.functions) { + const functions = await this.pullFunctions({ + code: options.noCode === true ? false : true, + withVariables: options.withVariables, + }); + updatedConfig.functions = functions; + } + + if (shouldPullAll || options.sites) { + const sites = await this.pullSites({ + code: options.noCode === true ? false : true, + withVariables: options.withVariables, + }); + updatedConfig.sites = sites; + } + + if (shouldPullAll || options.tables) { + const { databases, tables } = await this.pullTables(); + updatedConfig.databases = databases; + updatedConfig.collections = tables; + } + + if (options.collections) { + const { databases, collections } = await this.pullCollections(); + updatedConfig.databases = databases; + updatedConfig.collections = collections; + } + + if (shouldPullAll || options.buckets) { + const buckets = await this.pullBuckets(); + updatedConfig.buckets = buckets; + } + + if (shouldPullAll || options.teams) { + const teams = await this.pullTeams(); + updatedConfig.teams = teams; + } + + if (shouldPullAll || options.topics) { + const topics = await this.pullMessagingTopics(); + updatedConfig.topics = topics; + } + + return updatedConfig; + } + + /** + * Pull project settings + */ + public async pullSettings(projectId: string): Promise { + const projectsService = new Projects(this.consoleClient); + const response = await projectsService.get(projectId); + + return { + projectName: response.name, + settings: createSettingsObject(response), + }; + } + + /** + * Pull functions from the project + */ + public async pullFunctions( + options: PullFunctionsOptions = {}, + ): Promise { + const originalCwd = process.cwd(); + process.chdir(this.configDirectoryPath); + + try { + const functionsService = new Functions(this.projectClient); + + const fetchResponse = await functionsService.list([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["functions"].length <= 0) { + return []; + } + + const { functions } = await paginate( + async () => new Functions(this.projectClient).list(), + {}, + 100, + "functions", + ); + + const result: any[] = []; + + for (const func of functions) { + const funcPath = func.path || `functions/${func.name}`; + func["path"] = funcPath; + + const holdingVars = func["vars"]; + delete func["vars"]; + + result.push(func); + + if (!fs.existsSync(funcPath)) { + fs.mkdirSync(funcPath, { recursive: true }); + } + + if (options.code === false) { + continue; + } + + let deploymentId: string | null = null; + try { + const deployments = await functionsService.listDeployments({ + functionId: func["$id"], + queries: [ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "orderDesc", + values: ["$id"], + }), + ], + }); + + if (deployments["total"] > 0) { + deploymentId = deployments["deployments"][0]["$id"]; + } + } catch {} + + if (deploymentId === null) { + continue; + } + + const compressedFileName = `${func["$id"]}-${+new Date()}.tar.gz`; + const downloadUrl = functionsService.getDeploymentDownload({ + functionId: func["$id"], + deploymentId: deploymentId, + }); + + const downloadBuffer = await this.projectClient.call( + "get", + new URL(downloadUrl), + {}, + {}, + "arrayBuffer", + ); + + fs.writeFileSync( + compressedFileName, + Buffer.from(downloadBuffer as any), + ); + + tar.extract({ + sync: true, + cwd: funcPath, + file: compressedFileName, + strict: false, + }); + + fs.rmSync(compressedFileName); + + if (options.withVariables) { + const envFileLocation = `${funcPath}/.env`; + try { + fs.rmSync(envFileLocation); + } catch {} + + fs.writeFileSync( + envFileLocation, + holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), + ); + } + } + + return result; + } finally { + process.chdir(originalCwd); + } + } + + /** + * Pull sites from the project + */ + public async pullSites(options: PullSitesOptions = {}): Promise { + const originalCwd = process.cwd(); + process.chdir(this.configDirectoryPath); + + try { + const sitesService = new Sites(this.projectClient); + + const fetchResponse = await sitesService.list([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["sites"].length <= 0) { + return []; + } + + const { sites } = await paginate( + async () => new Sites(this.projectClient).list(), + {}, + 100, + "sites", + ); + + const result: any[] = []; + + for (const site of sites) { + const sitePath = site.path || `sites/${site.name}`; + site["path"] = sitePath; + + const holdingVars = site["vars"]; + delete site["vars"]; + + result.push(site); + + if (!fs.existsSync(sitePath)) { + fs.mkdirSync(sitePath, { recursive: true }); + } + + if (options.code === false) { + continue; + } + + let deploymentId: string | null = null; + try { + const deployments = await sitesService.listDeployments({ + siteId: site["$id"], + queries: [ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "orderDesc", + values: ["$id"], + }), + ], + }); + + if (deployments["total"] > 0) { + deploymentId = deployments["deployments"][0]["$id"]; + } + } catch {} + + if (deploymentId === null) { + continue; + } + + const compressedFileName = `${site["$id"]}-${+new Date()}.tar.gz`; + const downloadUrl = sitesService.getDeploymentDownload({ + siteId: site["$id"], + deploymentId: deploymentId, + }); + + const downloadBuffer = await this.projectClient.call( + "get", + new URL(downloadUrl), + {}, + {}, + "arrayBuffer", + ); + + fs.writeFileSync( + compressedFileName, + Buffer.from(downloadBuffer as any), + ); + + tar.extract({ + sync: true, + cwd: sitePath, + file: compressedFileName, + strict: false, + }); + + fs.rmSync(compressedFileName); + + if (options.withVariables) { + const envFileLocation = `${sitePath}/.env`; + try { + fs.rmSync(envFileLocation); + } catch {} + + fs.writeFileSync( + envFileLocation, + holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), + ); + } + } + + return result; + } finally { + process.chdir(originalCwd); + } + } + + /** + * Pull collections from the project (deprecated) + */ + public async pullCollections(): Promise<{ + databases: any[]; + collections: any[]; + }> { + const databasesService = new Databases(this.projectClient); + + const fetchResponse = await databasesService.list([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["databases"].length <= 0) { + return { databases: [], collections: [] }; + } + + const { databases } = await paginate( + async () => new Databases(this.projectClient).list(), + {}, + 100, + "databases", + ); + + const allDatabases: any[] = []; + const allCollections: any[] = []; + + for (const database of databases) { + allDatabases.push(database); + + const { collections } = await paginate( + async () => + new Databases(this.projectClient).listCollections(database.$id), + {}, + 100, + "collections", + ); + + for (const collection of collections) { + allCollections.push({ + ...collection, + $createdAt: undefined, + $updatedAt: undefined, + }); + } + } + + return { + databases: allDatabases, + collections: allCollections, + }; + } + + /** + * Pull tables from the project + */ + public async pullTables(): Promise<{ + databases: any[]; + tables: any[]; + }> { + const tablesDBService = new TablesDB(this.projectClient); + + const fetchResponse = await tablesDBService.list([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["databases"].length <= 0) { + return { databases: [], tables: [] }; + } + + const { databases } = await paginate( + async () => new TablesDB(this.projectClient).list(), + {}, + 100, + "databases", + ); + + const allDatabases: any[] = []; + const allTables: any[] = []; + + for (const database of databases) { + allDatabases.push(database); + + const { tables } = await paginate( + async () => new TablesDB(this.projectClient).listTables(database.$id), + {}, + 100, + "tables", + ); + + for (const table of tables) { + allTables.push({ + ...table, + $createdAt: undefined, + $updatedAt: undefined, + }); + } + } + + return { + databases: allDatabases, + tables: allTables, + }; + } + + /** + * Pull storage buckets from the project + */ + public async pullBuckets(): Promise { + const storageService = new Storage(this.projectClient); + + const fetchResponse = await storageService.listBuckets([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["buckets"].length <= 0) { + return []; + } + + const { buckets } = await paginate( + async () => new Storage(this.projectClient).listBuckets(), + {}, + 100, + "buckets", + ); + + return buckets; + } + + /** + * Pull teams from the project + */ + public async pullTeams(): Promise { + const teamsService = new Teams(this.projectClient); + + const fetchResponse = await teamsService.list([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["teams"].length <= 0) { + return []; + } + + const { teams } = await paginate( + async () => new Teams(this.projectClient).list(), + {}, + 100, + "teams", + ); + + return teams; + } + + /** + * Pull messaging topics from the project + */ + public async pullMessagingTopics(): Promise { + const messagingService = new Messaging(this.projectClient); + + const fetchResponse = await messagingService.listTopics([ + JSON.stringify({ method: "limit", values: [1] }), + ]); + + if (fetchResponse["topics"].length <= 0) { + return []; + } + + const { topics } = await paginate( + async () => new Messaging(this.projectClient).listTopics(), + {}, + 100, + "topics", + ); + + return topics; + } +} + +/** Helper methods for CLI commands */ + export const pullResources = async ({ skipDeprecated = false, }: PullResourcesOptions = {}): Promise => { @@ -92,12 +634,14 @@ const pullSettings = async (): Promise => { log("Pulling project settings ..."); try { - const projectsService = await getProjectsService(); - let response = await projectsService.get( - localConfig.getProject().projectId, - ); + const pullInstance = await createPullInstance(); + const projectId = localConfig.getProject().projectId; + const settings = await pullInstance.pullSettings(projectId); - localConfig.setProject(response.$id, response.name, response); + localConfig.setProject(projectId, settings.projectName, { + name: settings.projectName, + ...settings.settings, + }); success(`Successfully pulled ${chalk.bold("all")} project settings.`); } catch (e) { @@ -109,10 +653,7 @@ const pullFunctions = async ({ code, withVariables, }: PullFunctionsOptions = {}): Promise => { - process.chdir(localConfig.configDirectoryPath); - log("Fetching functions ..."); - let total = 0; const functionsService = await getFunctionsService(); const fetchResponse = await functionsService.list([ @@ -120,11 +661,11 @@ const pullFunctions = async ({ ]); if (fetchResponse["functions"].length <= 0) { log("No functions found."); - success(`Successfully pulled ${chalk.bold(total)} functions.`); + success(`Successfully pulled ${chalk.bold(0)} functions.`); return; } - const functions = cliConfig.all + const functionsToCheck = cliConfig.all ? ( await paginate( async () => (await getFunctionsService()).list(), @@ -136,115 +677,45 @@ const pullFunctions = async ({ : (await inquirer.prompt(questionsPullFunctions)).functions; let allowCodePull: boolean | null = cliConfig.force === true ? true : null; + if (code !== false && allowCodePull === null) { + const codeAnswer = await inquirer.prompt(questionsPullFunctionsCode); + allowCodePull = codeAnswer.override; + } - for (let func of functions) { - total++; - log(`Pulling function ${chalk.bold(func["name"])} ...`); - - const localFunction = localConfig.getFunction(func.$id); - - func["path"] = localFunction["path"]; - if (!localFunction["path"]) { - func["path"] = `functions/${func.name}`; - } - const holdingVars = func["vars"]; - // We don't save var in to the config - delete func["vars"]; - localConfig.addFunction(func); - - if (!fs.existsSync(func["path"])) { - fs.mkdirSync(func["path"], { recursive: true }); - } - - if (code === false) { - warn("Source code download skipped."); - continue; - } - - if (allowCodePull === null) { - const codeAnswer = await inquirer.prompt(questionsPullFunctionsCode); - allowCodePull = codeAnswer.override; - } - - if (!allowCodePull) { - continue; - } - - let deploymentId: string | null = null; - - try { - const fetchResponse = await functionsService.listDeployments({ - functionId: func["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ method: "orderDesc", values: ["$id"] }), - ], - }); - - if (fetchResponse["total"] > 0) { - deploymentId = fetchResponse["deployments"][0]["$id"]; - } - } catch {} - - if (deploymentId === null) { - log( - "Source code download skipped because function doesn't have any available deployment", - ); - continue; - } - - log("Pulling latest deployment code ..."); - - const compressedFileName = `${func["$id"]}-${+new Date()}.tar.gz`; - const downloadUrl = functionsService.getDeploymentDownload({ - functionId: func["$id"], - deploymentId: deploymentId, - }); - - const client = (await getFunctionsService()).client; - const downloadBuffer = await client.call( - "get", - new URL(downloadUrl), - {}, - {}, - "arrayBuffer", - ); - - fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + const shouldPullCode = code !== false && allowCodePull === true; - tar.extract({ - sync: true, - cwd: func["path"], - file: compressedFileName, - strict: false, - }); + const pullInstance = await createPullInstance(); + const functions = await pullInstance.pullFunctions({ + code: shouldPullCode, + withVariables, + }); - fs.rmSync(compressedFileName); + const selectedFunctionIds = new Set(functionsToCheck.map((f: any) => f.$id)); + const filteredFunctions = functions.filter((f) => + selectedFunctionIds.has(f.$id), + ); - if (withVariables) { - const envFileLocation = `${func["path"]}/.env`; - try { - fs.rmSync(envFileLocation); - } catch {} + for (const func of filteredFunctions) { + log(`Pulling function ${chalk.bold(func["name"])} ...`); + const localFunction = localConfig.getFunction(func.$id); + func["path"] = localFunction["path"] || func["path"]; + localConfig.addFunction(func); + } - fs.writeFileSync( - envFileLocation, - holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), - ); - } + if (!shouldPullCode) { + warn("Source code download skipped."); } - success(`Successfully pulled ${chalk.bold(total)} functions.`); + success( + `Successfully pulled ${chalk.bold(filteredFunctions.length)} functions.`, + ); }; const pullSites = async ({ code, withVariables, }: PullSitesOptions = {}): Promise => { - process.chdir(localConfig.configDirectoryPath); - log("Fetching sites ..."); - let total = 0; const sitesService = await getSitesService(); const fetchResponse = await sitesService.list([ @@ -252,11 +723,11 @@ const pullSites = async ({ ]); if (fetchResponse["sites"].length <= 0) { log("No sites found."); - success(`Successfully pulled ${chalk.bold(total)} sites.`); + success(`Successfully pulled ${chalk.bold(0)} sites.`); return; } - const sites = cliConfig.all + const sitesToCheck = cliConfig.all ? ( await paginate( async () => (await getSitesService()).list(), @@ -268,105 +739,34 @@ const pullSites = async ({ : (await inquirer.prompt(questionsPullSites)).sites; let allowCodePull: boolean | null = cliConfig.force === true ? true : null; + if (code !== false && allowCodePull === null) { + const codeAnswer = await inquirer.prompt(questionsPullSitesCode); + allowCodePull = codeAnswer.override; + } - for (let site of sites) { - total++; - log(`Pulling site ${chalk.bold(site["name"])} ...`); - - const localSite = localConfig.getSite(site.$id); - - site["path"] = localSite["path"]; - if (!localSite["path"]) { - site["path"] = `sites/${site.name}`; - } - const holdingVars = site["vars"]; - // We don't save var in to the config - delete site["vars"]; - localConfig.addSite(site); - - if (!fs.existsSync(site["path"])) { - fs.mkdirSync(site["path"], { recursive: true }); - } - - if (code === false) { - warn("Source code download skipped."); - continue; - } - - if (allowCodePull === null) { - const codeAnswer = await inquirer.prompt(questionsPullSitesCode); - allowCodePull = codeAnswer.override; - } - - if (!allowCodePull) { - continue; - } - - let deploymentId: string | null = null; - - try { - const fetchResponse = await sitesService.listDeployments({ - siteId: site["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ method: "orderDesc", values: ["$id"] }), - ], - }); - - if (fetchResponse["total"] > 0) { - deploymentId = fetchResponse["deployments"][0]["$id"]; - } - } catch {} - - if (deploymentId === null) { - log( - "Source code download skipped because site doesn't have any available deployment", - ); - continue; - } - - log("Pulling latest deployment code ..."); - - const compressedFileName = `${site["$id"]}-${+new Date()}.tar.gz`; - const downloadUrl = sitesService.getDeploymentDownload({ - siteId: site["$id"], - deploymentId: deploymentId, - }); - - const client = (await getSitesService()).client; - const downloadBuffer = await client.call( - "get", - new URL(downloadUrl), - {}, - {}, - "arrayBuffer", - ); - - fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + const shouldPullCode = code !== false && allowCodePull === true; - tar.extract({ - sync: true, - cwd: site["path"], - file: compressedFileName, - strict: false, - }); + const pullInstance = await createPullInstance(); + const sites = await pullInstance.pullSites({ + code: shouldPullCode, + withVariables, + }); - fs.rmSync(compressedFileName); + const selectedSiteIds = new Set(sitesToCheck.map((s: any) => s.$id)); + const filteredSites = sites.filter((s) => selectedSiteIds.has(s.$id)); - if (withVariables) { - const envFileLocation = `${site["path"]}/.env`; - try { - fs.rmSync(envFileLocation); - } catch {} + for (const site of filteredSites) { + log(`Pulling site ${chalk.bold(site["name"])} ...`); + const localSite = localConfig.getSite(site.$id); + site["path"] = localSite["path"] || site["path"]; + localConfig.addSite(site); + } - fs.writeFileSync( - envFileLocation, - holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), - ); - } + if (!shouldPullCode) { + warn("Source code download skipped."); } - success(`Successfully pulled ${chalk.bold(total)} sites.`); + success(`Successfully pulled ${chalk.bold(filteredSites.length)} sites.`); }; const pullCollection = async (): Promise => { @@ -374,8 +774,6 @@ const pullCollection = async (): Promise => { "appwrite pull collection has been deprecated. Please consider using 'appwrite pull tables' instead", ); log("Fetching collections ..."); - let totalDatabases = 0; - let totalCollections = 0; const databasesService = await getDatabasesService(); const fetchResponse = await databasesService.list([ @@ -384,64 +782,32 @@ const pullCollection = async (): Promise => { if (fetchResponse["databases"].length <= 0) { log("No collections found."); success( - `Successfully pulled ${chalk.bold(totalCollections)} collections from ${chalk.bold(totalDatabases)} databases.`, + `Successfully pulled ${chalk.bold(0)} collections from ${chalk.bold(0)} databases.`, ); return; } - let databases: string[] = cliConfig.ids; - - if (databases.length === 0) { - if (cliConfig.all) { - databases = ( - await paginate( - async () => (await getDatabasesService()).list(), - {}, - 100, - "databases", - ) - ).databases.map((database: any) => database.$id); - } else { - databases = (await inquirer.prompt(questionsPullCollection)).databases; - } - } - - for (const databaseId of databases) { - const database = await databasesService.get(databaseId); + const pullInstance = await createPullInstance(); + const { databases, collections } = await pullInstance.pullCollections(); - totalDatabases++; + for (const database of databases) { log( `Pulling all collections from ${chalk.bold(database["name"])} database ...`, ); - localConfig.addDatabase(database); + } - const { collections } = await paginate( - async () => (await getDatabasesService()).listCollections(databaseId), - {}, - 100, - "collections", - ); - - for (const collection of collections) { - totalCollections++; - localConfig.addCollection({ - ...collection, - $createdAt: undefined, - $updatedAt: undefined, - }); - } + for (const collection of collections) { + localConfig.addCollection(collection); } success( - `Successfully pulled ${chalk.bold(totalCollections)} collections from ${chalk.bold(totalDatabases)} databases.`, + `Successfully pulled ${chalk.bold(collections.length)} collections from ${chalk.bold(databases.length)} databases.`, ); }; const pullTable = async (): Promise => { log("Fetching tables ..."); - let totalTablesDBs = 0; - let totalTables = 0; const tablesDBService = await getTablesDBService(); const fetchResponse = await tablesDBService.list([ @@ -450,148 +816,90 @@ const pullTable = async (): Promise => { if (fetchResponse["databases"].length <= 0) { log("No tables found."); success( - `Successfully pulled ${chalk.bold(totalTables)} tables from ${chalk.bold(totalTablesDBs)} tableDBs.`, + `Successfully pulled ${chalk.bold(0)} tables from ${chalk.bold(0)} tableDBs.`, ); return; } - let databases: string[] = cliConfig.ids; - - if (databases.length === 0) { - if (cliConfig.all) { - databases = ( - await paginate( - async () => (await getTablesDBService()).list(), - {}, - 100, - "databases", - ) - ).databases.map((database: any) => database.$id); - } else { - databases = (await inquirer.prompt(questionsPullCollection)).databases; - } - } - - for (const databaseId of databases) { - const database = await tablesDBService.get(databaseId); + const pullInstance = await createPullInstance(); + const { databases, tables } = await pullInstance.pullTables(); - totalTablesDBs++; + for (const database of databases) { log(`Pulling all tables from ${chalk.bold(database["name"])} database ...`); - localConfig.addTablesDB(database); + } - const { tables } = await paginate( - async () => (await getTablesDBService()).listTables(databaseId), - {}, - 100, - "tables", - ); - - for (const table of tables) { - totalTables++; - localConfig.addTable({ - ...table, - $createdAt: undefined, - $updatedAt: undefined, - }); - } + for (const table of tables) { + localConfig.addTable(table); } success( - `Successfully pulled ${chalk.bold(totalTables)} tables from ${chalk.bold(totalTablesDBs)} tableDBs.`, + `Successfully pulled ${chalk.bold(tables.length)} tables from ${chalk.bold(databases.length)} tableDBs.`, ); }; const pullBucket = async (): Promise => { log("Fetching buckets ..."); - let total = 0; - const storageService = await getStorageService(); - const fetchResponse = await storageService.listBuckets([ - JSON.stringify({ method: "limit", values: [1] }), - ]); - if (fetchResponse["buckets"].length <= 0) { + const pullInstance = await createPullInstance(); + const buckets = await pullInstance.pullBuckets(); + + if (buckets.length === 0) { log("No buckets found."); - success(`Successfully pulled ${chalk.bold(total)} buckets.`); + success(`Successfully pulled ${chalk.bold(0)} buckets.`); return; } - const { buckets } = await paginate( - async () => (await getStorageService()).listBuckets(), - {}, - 100, - "buckets", - ); - for (const bucket of buckets) { - total++; log(`Pulling bucket ${chalk.bold(bucket["name"])} ...`); localConfig.addBucket(bucket); } - success(`Successfully pulled ${chalk.bold(total)} buckets.`); + success(`Successfully pulled ${chalk.bold(buckets.length)} buckets.`); }; const pullTeam = async (): Promise => { log("Fetching teams ..."); - let total = 0; - const teamsService = await getTeamsService(); - const fetchResponse = await teamsService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); - if (fetchResponse["teams"].length <= 0) { + const pullInstance = await createPullInstance(); + const teams = await pullInstance.pullTeams(); + + if (teams.length === 0) { log("No teams found."); - success(`Successfully pulled ${chalk.bold(total)} teams.`); + success(`Successfully pulled ${chalk.bold(0)} teams.`); return; } - const { teams } = await paginate( - async () => (await getTeamsService()).list(), - {}, - 100, - "teams", - ); - for (const team of teams) { - total++; log(`Pulling team ${chalk.bold(team["name"])} ...`); localConfig.addTeam(team); } - success(`Successfully pulled ${chalk.bold(total)} teams.`); + success(`Successfully pulled ${chalk.bold(teams.length)} teams.`); }; const pullMessagingTopic = async (): Promise => { log("Fetching topics ..."); - let total = 0; - const messagingService = await getMessagingService(); - const fetchResponse = await messagingService.listTopics([ - JSON.stringify({ method: "limit", values: [1] }), - ]); - if (fetchResponse["topics"].length <= 0) { + const pullInstance = await createPullInstance(); + const topics = await pullInstance.pullMessagingTopics(); + + if (topics.length === 0) { log("No topics found."); - success(`Successfully pulled ${chalk.bold(total)} topics.`); + success(`Successfully pulled ${chalk.bold(0)} topics.`); return; } - const { topics } = await paginate( - async () => (await getMessagingService()).listTopics(), - {}, - 100, - "topics", - ); - for (const topic of topics) { - total++; log(`Pulling topic ${chalk.bold(topic["name"])} ...`); localConfig.addMessagingTopic(topic); } - success(`Successfully pulled ${chalk.bold(total)} topics.`); + success(`Successfully pulled ${chalk.bold(topics.length)} topics.`); }; +/** Commander.js exports */ + export const pull = new Command("pull") .description(commandDescriptions["pull"]) .action(actionRunner(() => pullResources({ skipDeprecated: true }))); diff --git a/package.json b/package.json index 5383b646..301a58ad 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,8 @@ "json-bigint": "^1.0.0", "tail": "^2.2.6", "tar": "^6.1.11", - "undici": "^5.28.2" + "undici": "^5.28.2", + "zod": "^4.3.5" }, "devDependencies": { "@types/bun": "^1.3.5", From 34e6d2e60050fa8db03bf86859dc18f0905e642e Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 11:52:59 +0530 Subject: [PATCH 02/41] only download code for selected functions and sites --- lib/commands/pull.ts | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index f1812d7a..7386a562 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -60,11 +60,13 @@ export interface PullOptions { interface PullFunctionsOptions { code?: boolean; withVariables?: boolean; + functionIds?: string[]; } interface PullSitesOptions { code?: boolean; withVariables?: boolean; + siteIds?: string[]; } interface PullResourcesOptions { @@ -201,13 +203,17 @@ export class Pull { return []; } - const { functions } = await paginate( + const { functions: allFunctions } = await paginate( async () => new Functions(this.projectClient).list(), {}, 100, "functions", ); + const functions = options.functionIds + ? allFunctions.filter((f) => options.functionIds!.includes(f.$id)) + : allFunctions; + const result: any[] = []; for (const func of functions) { @@ -314,13 +320,17 @@ export class Pull { return []; } - const { sites } = await paginate( + const { sites: allSites } = await paginate( async () => new Sites(this.projectClient).list(), {}, 100, "sites", ); + const sites = options.siteIds + ? allSites.filter((s) => options.siteIds!.includes(s.$id)) + : allSites; + const result: any[] = []; for (const site of sites) { @@ -683,19 +693,16 @@ const pullFunctions = async ({ } const shouldPullCode = code !== false && allowCodePull === true; + const selectedFunctionIds = functionsToCheck.map((f: any) => f.$id); const pullInstance = await createPullInstance(); const functions = await pullInstance.pullFunctions({ code: shouldPullCode, withVariables, + functionIds: selectedFunctionIds, }); - const selectedFunctionIds = new Set(functionsToCheck.map((f: any) => f.$id)); - const filteredFunctions = functions.filter((f) => - selectedFunctionIds.has(f.$id), - ); - - for (const func of filteredFunctions) { + for (const func of functions) { log(`Pulling function ${chalk.bold(func["name"])} ...`); const localFunction = localConfig.getFunction(func.$id); func["path"] = localFunction["path"] || func["path"]; @@ -706,9 +713,7 @@ const pullFunctions = async ({ warn("Source code download skipped."); } - success( - `Successfully pulled ${chalk.bold(filteredFunctions.length)} functions.`, - ); + success(`Successfully pulled ${chalk.bold(functions.length)} functions.`); }; const pullSites = async ({ @@ -745,17 +750,16 @@ const pullSites = async ({ } const shouldPullCode = code !== false && allowCodePull === true; + const selectedSiteIds = sitesToCheck.map((s: any) => s.$id); const pullInstance = await createPullInstance(); const sites = await pullInstance.pullSites({ code: shouldPullCode, withVariables, + siteIds: selectedSiteIds, }); - const selectedSiteIds = new Set(sitesToCheck.map((s: any) => s.$id)); - const filteredSites = sites.filter((s) => selectedSiteIds.has(s.$id)); - - for (const site of filteredSites) { + for (const site of sites) { log(`Pulling site ${chalk.bold(site["name"])} ...`); const localSite = localConfig.getSite(site.$id); site["path"] = localSite["path"] || site["path"]; @@ -766,7 +770,7 @@ const pullSites = async ({ warn("Source code download skipped."); } - success(`Successfully pulled ${chalk.bold(filteredSites.length)} sites.`); + success(`Successfully pulled ${chalk.bold(sites.length)} sites.`); }; const pullCollection = async (): Promise => { From 7dad33d74ff0567d58441b5fe6cfaad52a5e4558 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 11:54:25 +0530 Subject: [PATCH 03/41] null fallback for holdingVars --- lib/commands/pull.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 7386a562..7f3c15b4 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -220,7 +220,7 @@ export class Pull { const funcPath = func.path || `functions/${func.name}`; func["path"] = funcPath; - const holdingVars = func["vars"]; + const holdingVars = func["vars"] || []; delete func["vars"]; result.push(func); @@ -337,7 +337,7 @@ export class Pull { const sitePath = site.path || `sites/${site.name}`; site["path"] = sitePath; - const holdingVars = site["vars"]; + const holdingVars = site["vars"] || []; delete site["vars"]; result.push(site); From 6c22b4041087e02bccc5c86d4859ad3913e0c0a7 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 12:03:13 +0530 Subject: [PATCH 04/41] refactor duplicated deployment download code --- lib/commands/pull.ts | 227 ++++++++++++++++++++----------------------- 1 file changed, 103 insertions(+), 124 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 7f3c15b4..44db3ad2 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -100,6 +100,73 @@ export class Pull { this.configDirectoryPath = path; } + /** + * Download and extract deployment code for a resource + */ + private async downloadDeploymentCode(params: { + resourceId: string; + resourcePath: string; + holdingVars: any[]; + withVariables?: boolean; + listDeployments: () => Promise; + getDownloadUrl: (deploymentId: string) => string; + }): Promise { + const { + resourceId, + resourcePath, + holdingVars, + withVariables, + listDeployments, + getDownloadUrl, + } = params; + + let deploymentId: string | null = null; + try { + const deployments = await listDeployments(); + if (deployments["total"] > 0) { + deploymentId = deployments["deployments"][0]["$id"]; + } + } catch {} + + if (deploymentId === null) { + return; + } + + const compressedFileName = `${resourceId}-${+new Date()}.tar.gz`; + const downloadUrl = getDownloadUrl(deploymentId); + + const downloadBuffer = await this.projectClient.call( + "get", + new URL(downloadUrl), + {}, + {}, + "arrayBuffer", + ); + + fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + + tar.extract({ + sync: true, + cwd: resourcePath, + file: compressedFileName, + strict: false, + }); + + fs.rmSync(compressedFileName); + + if (withVariables) { + const envFileLocation = `${resourcePath}/.env`; + try { + fs.rmSync(envFileLocation); + } catch {} + + fs.writeFileSync( + envFileLocation, + holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), + ); + } + } + /** * Pull resources from Appwrite project and return updated config * @@ -229,70 +296,26 @@ export class Pull { fs.mkdirSync(funcPath, { recursive: true }); } - if (options.code === false) { - continue; - } - - let deploymentId: string | null = null; - try { - const deployments = await functionsService.listDeployments({ - functionId: func["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "orderDesc", - values: ["$id"], + if (options.code !== false) { + await this.downloadDeploymentCode({ + resourceId: func["$id"], + resourcePath: funcPath, + holdingVars, + withVariables: options.withVariables, + listDeployments: () => + functionsService.listDeployments({ + functionId: func["$id"], + queries: [ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ method: "orderDesc", values: ["$id"] }), + ], + }), + getDownloadUrl: (deploymentId) => + functionsService.getDeploymentDownload({ + functionId: func["$id"], + deploymentId, }), - ], }); - - if (deployments["total"] > 0) { - deploymentId = deployments["deployments"][0]["$id"]; - } - } catch {} - - if (deploymentId === null) { - continue; - } - - const compressedFileName = `${func["$id"]}-${+new Date()}.tar.gz`; - const downloadUrl = functionsService.getDeploymentDownload({ - functionId: func["$id"], - deploymentId: deploymentId, - }); - - const downloadBuffer = await this.projectClient.call( - "get", - new URL(downloadUrl), - {}, - {}, - "arrayBuffer", - ); - - fs.writeFileSync( - compressedFileName, - Buffer.from(downloadBuffer as any), - ); - - tar.extract({ - sync: true, - cwd: funcPath, - file: compressedFileName, - strict: false, - }); - - fs.rmSync(compressedFileName); - - if (options.withVariables) { - const envFileLocation = `${funcPath}/.env`; - try { - fs.rmSync(envFileLocation); - } catch {} - - fs.writeFileSync( - envFileLocation, - holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), - ); } } @@ -346,70 +369,26 @@ export class Pull { fs.mkdirSync(sitePath, { recursive: true }); } - if (options.code === false) { - continue; - } - - let deploymentId: string | null = null; - try { - const deployments = await sitesService.listDeployments({ - siteId: site["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "orderDesc", - values: ["$id"], + if (options.code !== false) { + await this.downloadDeploymentCode({ + resourceId: site["$id"], + resourcePath: sitePath, + holdingVars, + withVariables: options.withVariables, + listDeployments: () => + sitesService.listDeployments({ + siteId: site["$id"], + queries: [ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ method: "orderDesc", values: ["$id"] }), + ], + }), + getDownloadUrl: (deploymentId) => + sitesService.getDeploymentDownload({ + siteId: site["$id"], + deploymentId, }), - ], }); - - if (deployments["total"] > 0) { - deploymentId = deployments["deployments"][0]["$id"]; - } - } catch {} - - if (deploymentId === null) { - continue; - } - - const compressedFileName = `${site["$id"]}-${+new Date()}.tar.gz`; - const downloadUrl = sitesService.getDeploymentDownload({ - siteId: site["$id"], - deploymentId: deploymentId, - }); - - const downloadBuffer = await this.projectClient.call( - "get", - new URL(downloadUrl), - {}, - {}, - "arrayBuffer", - ); - - fs.writeFileSync( - compressedFileName, - Buffer.from(downloadBuffer as any), - ); - - tar.extract({ - sync: true, - cwd: sitePath, - file: compressedFileName, - strict: false, - }); - - fs.rmSync(compressedFileName); - - if (options.withVariables) { - const envFileLocation = `${sitePath}/.env`; - try { - fs.rmSync(envFileLocation); - } catch {} - - fs.writeFileSync( - envFileLocation, - holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), - ); } } From befc1de4e5a26a07999c57cccfc4c9b34875f74c Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 12:05:54 +0530 Subject: [PATCH 05/41] type safety --- lib/commands/pull.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 44db3ad2..e90ebe48 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -106,7 +106,7 @@ export class Pull { private async downloadDeploymentCode(params: { resourceId: string; resourcePath: string; - holdingVars: any[]; + holdingVars: { key: string; value: string }[]; withVariables?: boolean; listDeployments: () => Promise; getDownloadUrl: (deploymentId: string) => string; @@ -162,7 +162,7 @@ export class Pull { fs.writeFileSync( envFileLocation, - holdingVars.map((r: any) => `${r.key}=${r.value}\n`).join(""), + holdingVars.map((r) => `${r.key}=${r.value}\n`).join(""), ); } } From efe8d5437eea30c2be3fbffc4d90da48ce0b070e Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 12:07:59 +0530 Subject: [PATCH 06/41] more type safety --- lib/commands/config.ts | 5 ++++- lib/commands/pull.ts | 22 ++++++++++++++++------ 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index 03e0cf9c..a21483b2 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -1,9 +1,12 @@ import { z } from "zod"; +import type { ProjectSettings, RawProjectSettings } from "../types.js"; const INT64_MIN = BigInt("-9223372036854775808"); const INT64_MAX = BigInt("9223372036854775807"); -const createSettingsObject = (settings: any) => { +const createSettingsObject = ( + settings: RawProjectSettings, +): ProjectSettings => { return { services: { account: settings.serviceStatusForAccount, diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index e90ebe48..f6db100c 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -42,6 +42,7 @@ import { import type { ConfigType } from "./config.js"; import { createSettingsObject } from "./config.js"; import { ProjectNotInitializedError } from "./errors.js"; +import type { ProjectSettings, RawProjectSettings } from "../types.js"; export interface PullOptions { all?: boolean; @@ -73,6 +74,12 @@ interface PullResourcesOptions { skipDeprecated?: boolean; } +export interface PullSettingsResult { + projectName: string; + settings: ProjectSettings; + rawSettings: RawProjectSettings; +} + async function createPullInstance(): Promise { const projectClient = await sdkForProject(); const consoleClient = await sdkForConsole(); @@ -240,13 +247,15 @@ export class Pull { /** * Pull project settings */ - public async pullSettings(projectId: string): Promise { + public async pullSettings(projectId: string): Promise { const projectsService = new Projects(this.consoleClient); const response = await projectsService.get(projectId); + const rawSettings = response as RawProjectSettings; return { projectName: response.name, - settings: createSettingsObject(response), + settings: createSettingsObject(rawSettings), + rawSettings, }; } @@ -627,10 +636,11 @@ const pullSettings = async (): Promise => { const projectId = localConfig.getProject().projectId; const settings = await pullInstance.pullSettings(projectId); - localConfig.setProject(projectId, settings.projectName, { - name: settings.projectName, - ...settings.settings, - }); + localConfig.setProject( + projectId, + settings.projectName, + settings.rawSettings, + ); success(`Successfully pulled ${chalk.bold("all")} project settings.`); } catch (e) { From d512ac99768527ff44c188b7f28b365b2b54b482 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 12:09:37 +0530 Subject: [PATCH 07/41] try catch errors while downloading code --- lib/commands/pull.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index f6db100c..9aa8dd27 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -150,7 +150,14 @@ export class Pull { "arrayBuffer", ); - fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + try { + fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + throw new Error( + `Failed to write deployment archive to "${compressedFileName}": ${message}`, + ); + } tar.extract({ sync: true, From 47ce78dfdefce09316945b436ad729cab1063fb1 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 12:15:29 +0530 Subject: [PATCH 08/41] non deprecated methods --- lib/commands/pull.ts | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 9aa8dd27..1c8d7280 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -476,9 +476,9 @@ export class Pull { }> { const tablesDBService = new TablesDB(this.projectClient); - const fetchResponse = await tablesDBService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await tablesDBService.list({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["databases"].length <= 0) { return { databases: [], tables: [] }; @@ -525,9 +525,9 @@ export class Pull { public async pullBuckets(): Promise { const storageService = new Storage(this.projectClient); - const fetchResponse = await storageService.listBuckets([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await storageService.listBuckets({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["buckets"].length <= 0) { return []; @@ -549,9 +549,9 @@ export class Pull { public async pullTeams(): Promise { const teamsService = new Teams(this.projectClient); - const fetchResponse = await teamsService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await teamsService.list({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["teams"].length <= 0) { return []; @@ -573,9 +573,9 @@ export class Pull { public async pullMessagingTopics(): Promise { const messagingService = new Messaging(this.projectClient); - const fetchResponse = await messagingService.listTopics([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await messagingService.listTopics({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["topics"].length <= 0) { return []; @@ -719,9 +719,9 @@ const pullSites = async ({ log("Fetching sites ..."); const sitesService = await getSitesService(); - const fetchResponse = await sitesService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await sitesService.list({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["sites"].length <= 0) { log("No sites found."); success(`Successfully pulled ${chalk.bold(0)} sites.`); @@ -776,9 +776,9 @@ const pullCollection = async (): Promise => { log("Fetching collections ..."); const databasesService = await getDatabasesService(); - const fetchResponse = await databasesService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await databasesService.list({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["databases"].length <= 0) { log("No collections found."); success( @@ -810,9 +810,9 @@ const pullTable = async (): Promise => { log("Fetching tables ..."); const tablesDBService = await getTablesDBService(); - const fetchResponse = await tablesDBService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await tablesDBService.list({ + queries: [JSON.stringify({ method: "limit", values: [1] })], + }); if (fetchResponse["databases"].length <= 0) { log("No tables found."); success( From b08b610b6c8766118079cc5080240e64be063553 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 13:27:58 +0530 Subject: [PATCH 09/41] fix filtering --- lib/commands/pull.ts | 89 +++++++++++++++++++++++++++++--------------- 1 file changed, 58 insertions(+), 31 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 1c8d7280..10039a87 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -13,6 +13,9 @@ import { TablesDB, Teams, Client, + AppwriteException, + Query, + Models, } from "@appwrite.io/console"; import { getFunctionsService, @@ -133,7 +136,11 @@ export class Pull { if (deployments["total"] > 0) { deploymentId = deployments["deployments"][0]["$id"]; } - } catch {} + } catch (e: unknown) { + if (e instanceof AppwriteException) { + error(e.message); + } + } if (deploymentId === null) { return; @@ -277,30 +284,38 @@ export class Pull { try { const functionsService = new Functions(this.projectClient); + let functions: Models.Function[]; + + if (options.functionIds && options.functionIds.length > 0) { + functions = await Promise.all( + options.functionIds.map((id) => + functionsService.get({ + functionId: id, + }), + ), + ); + } else { + const fetchResponse = await functionsService.list({ + queries: [Query.limit(1)], + }); - const fetchResponse = await functionsService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + if (fetchResponse["functions"].length <= 0) { + return []; + } - if (fetchResponse["functions"].length <= 0) { - return []; + const { functions: allFunctions } = await paginate( + async () => new Functions(this.projectClient).list(), + {}, + 100, + "functions", + ); + functions = allFunctions; } - const { functions: allFunctions } = await paginate( - async () => new Functions(this.projectClient).list(), - {}, - 100, - "functions", - ); - - const functions = options.functionIds - ? allFunctions.filter((f) => options.functionIds!.includes(f.$id)) - : allFunctions; - const result: any[] = []; for (const func of functions) { - const funcPath = func.path || `functions/${func.name}`; + const funcPath = `functions/${func.name}`; func["path"] = funcPath; const holdingVars = func["vars"] || []; @@ -350,22 +365,34 @@ export class Pull { try { const sitesService = new Sites(this.projectClient); + let allSites: Models.Site[]; + + if (options.siteIds && options.siteIds.length > 0) { + allSites = await Promise.all( + options.siteIds.map((id) => + sitesService.get({ + siteId: id, + }), + ), + ); + } else { + const fetchResponse = await sitesService.list({ + queries: [Query.limit(1)], + }); - const fetchResponse = await sitesService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + if (fetchResponse["sites"].length <= 0) { + return []; + } - if (fetchResponse["sites"].length <= 0) { - return []; + const { sites: fetchedSites } = await paginate( + async () => new Sites(this.projectClient).list(), + {}, + 100, + "sites", + ); + allSites = fetchedSites; } - const { sites: allSites } = await paginate( - async () => new Sites(this.projectClient).list(), - {}, - 100, - "sites", - ); - const sites = options.siteIds ? allSites.filter((s) => options.siteIds!.includes(s.$id)) : allSites; @@ -373,7 +400,7 @@ export class Pull { const result: any[] = []; for (const site of sites) { - const sitePath = site.path || `sites/${site.name}`; + const sitePath = `sites/${site.name}`; site["path"] = sitePath; const holdingVars = site["vars"] || []; From a593586eee915ec884e7219cfdaf421685477d59 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 13:29:00 +0530 Subject: [PATCH 10/41] grammer --- lib/commands/pull.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 10039a87..c768fefe 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -933,7 +933,7 @@ export const pull = new Command("pull") pull .command("all") - .description("Pull all resource.") + .description("Pull all resources") .action( actionRunner(() => { cliConfig.all = true; From e3ff5c9d9b4f7498b8613f984b1c8bbb1abdc8b9 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 13:32:14 +0530 Subject: [PATCH 11/41] use query class --- lib/commands/pull.ts | 32 +++++++++++--------------------- 1 file changed, 11 insertions(+), 21 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index c768fefe..3bf99a15 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -336,10 +336,7 @@ export class Pull { listDeployments: () => functionsService.listDeployments({ functionId: func["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ method: "orderDesc", values: ["$id"] }), - ], + queries: [Query.limit(1), Query.orderDesc("$id")], }), getDownloadUrl: (deploymentId) => functionsService.getDeploymentDownload({ @@ -421,10 +418,7 @@ export class Pull { listDeployments: () => sitesService.listDeployments({ siteId: site["$id"], - queries: [ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ method: "orderDesc", values: ["$id"] }), - ], + queries: [Query.limit(1), Query.orderDesc("$id")], }), getDownloadUrl: (deploymentId) => sitesService.getDeploymentDownload({ @@ -450,9 +444,7 @@ export class Pull { }> { const databasesService = new Databases(this.projectClient); - const fetchResponse = await databasesService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await databasesService.list([Query.limit(1)]); if (fetchResponse["databases"].length <= 0) { return { databases: [], collections: [] }; @@ -504,7 +496,7 @@ export class Pull { const tablesDBService = new TablesDB(this.projectClient); const fetchResponse = await tablesDBService.list({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["databases"].length <= 0) { @@ -553,7 +545,7 @@ export class Pull { const storageService = new Storage(this.projectClient); const fetchResponse = await storageService.listBuckets({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["buckets"].length <= 0) { @@ -577,7 +569,7 @@ export class Pull { const teamsService = new Teams(this.projectClient); const fetchResponse = await teamsService.list({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["teams"].length <= 0) { @@ -601,7 +593,7 @@ export class Pull { const messagingService = new Messaging(this.projectClient); const fetchResponse = await messagingService.listTopics({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["topics"].length <= 0) { @@ -689,9 +681,7 @@ const pullFunctions = async ({ log("Fetching functions ..."); const functionsService = await getFunctionsService(); - const fetchResponse = await functionsService.list([ - JSON.stringify({ method: "limit", values: [1] }), - ]); + const fetchResponse = await functionsService.list([Query.limit(1)]); if (fetchResponse["functions"].length <= 0) { log("No functions found."); success(`Successfully pulled ${chalk.bold(0)} functions.`); @@ -747,7 +737,7 @@ const pullSites = async ({ const sitesService = await getSitesService(); const fetchResponse = await sitesService.list({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["sites"].length <= 0) { log("No sites found."); @@ -804,7 +794,7 @@ const pullCollection = async (): Promise => { const databasesService = await getDatabasesService(); const fetchResponse = await databasesService.list({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["databases"].length <= 0) { log("No collections found."); @@ -838,7 +828,7 @@ const pullTable = async (): Promise => { const tablesDBService = await getTablesDBService(); const fetchResponse = await tablesDBService.list({ - queries: [JSON.stringify({ method: "limit", values: [1] })], + queries: [Query.limit(1)], }); if (fetchResponse["databases"].length <= 0) { log("No tables found."); From 707571b1792cf694095008892c1e2ea6953eec51 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 13:41:19 +0530 Subject: [PATCH 12/41] typing --- lib/commands/pull.ts | 69 +++++++++++++++++++++++++++++++++----------- 1 file changed, 52 insertions(+), 17 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 3bf99a15..8e5443c7 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -45,7 +45,12 @@ import { import type { ConfigType } from "./config.js"; import { createSettingsObject } from "./config.js"; import { ProjectNotInitializedError } from "./errors.js"; -import type { ProjectSettings, RawProjectSettings } from "../types.js"; +import type { + ProjectSettings, + RawProjectSettings, + FunctionConfig, + SiteConfig, +} from "../types.js"; export interface PullOptions { all?: boolean; @@ -278,7 +283,7 @@ export class Pull { */ public async pullFunctions( options: PullFunctionsOptions = {}, - ): Promise { + ): Promise { const originalCwd = process.cwd(); process.chdir(this.configDirectoryPath); @@ -312,16 +317,30 @@ export class Pull { functions = allFunctions; } - const result: any[] = []; + const result: FunctionConfig[] = []; for (const func of functions) { const funcPath = `functions/${func.name}`; - func["path"] = funcPath; - - const holdingVars = func["vars"] || []; - delete func["vars"]; - - result.push(func); + const holdingVars = func.vars || []; + + const functionConfig: FunctionConfig = { + $id: func.$id, + name: func.name, + runtime: func.runtime, + path: funcPath, + entrypoint: func.entrypoint, + execute: func.execute, + enabled: func.enabled, + logging: func.logging, + events: func.events, + schedule: func.schedule, + timeout: func.timeout, + commands: func.commands, + scopes: func.scopes, + specification: func.specification, + }; + + result.push(functionConfig); if (!fs.existsSync(funcPath)) { fs.mkdirSync(funcPath, { recursive: true }); @@ -356,7 +375,9 @@ export class Pull { /** * Pull sites from the project */ - public async pullSites(options: PullSitesOptions = {}): Promise { + public async pullSites( + options: PullSitesOptions = {}, + ): Promise { const originalCwd = process.cwd(); process.chdir(this.configDirectoryPath); @@ -394,16 +415,30 @@ export class Pull { ? allSites.filter((s) => options.siteIds!.includes(s.$id)) : allSites; - const result: any[] = []; + const result: SiteConfig[] = []; for (const site of sites) { const sitePath = `sites/${site.name}`; - site["path"] = sitePath; - - const holdingVars = site["vars"] || []; - delete site["vars"]; - - result.push(site); + const holdingVars = site.vars || []; + + const siteConfig: SiteConfig = { + $id: site.$id, + name: site.name, + path: sitePath, + framework: site.framework, + enabled: site.enabled, + logging: site.logging, + timeout: site.timeout, + buildRuntime: site.buildRuntime, + adapter: site.adapter, + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + fallbackFile: site.fallbackFile, + specification: site.specification, + }; + + result.push(siteConfig); if (!fs.existsSync(sitePath)) { fs.mkdirSync(sitePath, { recursive: true }); From 8c0819f1e5471472cbee8928c7b8eb6fa5baa30e Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 14:03:16 +0530 Subject: [PATCH 13/41] improve zod config --- lib/commands/config.ts | 111 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 103 insertions(+), 8 deletions(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index a21483b2..e321d827 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -84,6 +84,14 @@ const FunctionSchema = z }) .strict(); +const DatabaseSchema = z + .object({ + $id: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + }) + .strict(); + const int64Schema = z.preprocess( (val) => { if (typeof val === "bigint") { @@ -186,6 +194,7 @@ const AttributeSchema = AttributeSchemaBase.refine( path: ["default"], }, ); +const ColumnSchema = AttributeSchema; const IndexSchema = z .object({ @@ -197,6 +206,16 @@ const IndexSchema = z }) .strict(); +const IndexTableSchema = z + .object({ + key: z.string(), + type: z.string(), + status: z.string().optional(), + columns: z.array(z.string()), + orders: z.array(z.string()).optional(), + }) + .strict(); + const CollectionSchema = z .object({ $id: z.string(), @@ -247,13 +266,55 @@ const CollectionSchema = z } }); -const DatabaseSchema = z +const TablesDBSchema = z .object({ $id: z.string(), + $permissions: z.array(z.string()).optional(), + databaseId: z.string(), name: z.string(), enabled: z.boolean().optional(), + rowSecurity: z.boolean().default(true), + columns: z.array(ColumnSchema).optional(), + indexes: z.array(IndexTableSchema).optional(), }) - .strict(); + .strict() + .superRefine((data, ctx) => { + if (data.columns && data.columns.length > 0) { + const seenKeys = new Set(); + const duplicateKeys = new Set(); + + data.columns.forEach((col, index) => { + if (seenKeys.has(col.key)) { + duplicateKeys.add(col.key); + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Column with the key '${col.key}' already exists. Column keys must be unique, try again with a different key.`, + path: ["columns", index, "key"], + }); + } else { + seenKeys.add(col.key); + } + }); + } + + if (data.indexes && data.indexes.length > 0) { + const seenKeys = new Set(); + const duplicateKeys = new Set(); + + data.indexes.forEach((index, indexPos) => { + if (seenKeys.has(index.key)) { + duplicateKeys.add(index.key); + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Index with the key '${index.key}' already exists. Index keys must be unique, try again with a different key.`, + path: ["indexes", indexPos, "key"], + }); + } else { + seenKeys.add(index.key); + } + }); + } + }); const BucketSchema = z .object({ @@ -367,6 +428,7 @@ const configSchema = z sites: z.array(SiteSchema).optional(), databases: z.array(DatabaseSchema).optional(), collections: z.array(CollectionSchema).optional(), + tablesDB: z.array(TablesDBSchema).optional(), topics: z.array(TopicSchema).optional(), teams: z.array(TeamSchema).optional(), buckets: z.array(BucketSchema).optional(), @@ -375,19 +437,52 @@ const configSchema = z .strict(); export type ConfigType = z.infer; +export type SettingsType = z.infer; +export type FunctionType = z.infer; +export type SiteType = z.infer; +export type DatabaseType = z.infer; +export type CollectionType = z.infer; +export type TablesDBType = z.infer; +export type TopicType = z.infer; +export type TeamType = z.infer; +export type MessageType = z.infer; +export type BucketType = z.infer; + export { configSchema, + + /** Project Settings */ + SettingsSchema, + + /** Functions and Sites */ SiteSchema, FunctionSchema, - CollectionSchema, + + /** Databases */ DatabaseSchema, - BucketSchema, + + /** Collections (legacy) */ + CollectionSchema, + AttributeSchema, + IndexSchema, + + /** Tables */ + TablesDBSchema, + ColumnSchema, + IndexTableSchema, + + /** Topics */ TopicSchema, + + /** Teams */ TeamSchema, + + /** Messages */ MessageSchema, - SettingsSchema, - AttributeSchema, - AttributeSchemaBase, - IndexSchema, + + /** Buckets */ + BucketSchema, + + /** Helper functions */ createSettingsObject, }; From c4b91b022bde722c80c894b8bf9a0ec236b03ffa Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 14:05:20 +0530 Subject: [PATCH 14/41] deprecation --- lib/commands/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index e321d827..fa872eea 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -136,7 +136,7 @@ const int64Schema = z.preprocess( if (val < INT64_MIN || val > INT64_MAX) { ctx.addIssue({ - code: z.ZodIssueCode.custom, + code: "custom", message: `must be between ${INT64_MIN} and ${INT64_MAX} (64-bit signed integer range)`, }); } From a550d6a95aa166740b4d921b1c8a9b582d432d38 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 14:21:39 +0530 Subject: [PATCH 15/41] chore shift attribute creation to seperate file --- lib/commands/push.ts | 1062 +----------------------------- lib/commands/update.ts | 12 +- lib/commands/utils/attributes.ts | 733 +++++++++++++++++++++ lib/commands/utils/pools.ts | 354 ++++++++++ 4 files changed, 1114 insertions(+), 1047 deletions(-) create mode 100644 lib/commands/utils/attributes.ts create mode 100644 lib/commands/utils/pools.ts diff --git a/lib/commands/push.ts b/lib/commands/push.ts index b1ae3a60..a5c84b76 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -7,7 +7,6 @@ import ID from "../id.js"; import { localConfig, globalConfig, - KeysAttributes, KeysFunction, KeysSite, whitelistKeys, @@ -57,23 +56,17 @@ import { } from "../services.js"; import { ApiService, AuthMethod } from "@appwrite.io/console"; import { checkDeployConditions } from "../utils.js"; +import { Pools } from "./utils/pools.js"; +import { Attributes, Collection } from "./utils/attributes.js"; -const STEP_SIZE = 100; // Resources const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; let pollMaxDebounces = POLL_DEFAULT_VALUE; -const changeableKeys = [ - "status", - "required", - "xdefault", - "elements", - "min", - "max", - "default", - "error", -]; +// Shared instances +const pools = new Pools(pollMaxDebounces); +const attributes = new Attributes(pools); interface ObjectChange { group: string; @@ -84,13 +77,6 @@ interface ObjectChange { type ComparableValue = boolean | number | string | any[] | undefined; -interface AttributeChange { - key: string; - attribute: any; - reason: string; - action: string; -} - interface PushResourcesOptions { skipDeprecated?: boolean; } @@ -118,383 +104,6 @@ interface PushTableOptions { attempts?: number; } -interface AwaitPools { - wipeAttributes: ( - databaseId: string, - collectionId: string, - iteration?: number, - ) => Promise; - - wipeIndexes: ( - databaseId: string, - collectionId: string, - iteration?: number, - ) => Promise; - - deleteAttributes: ( - databaseId: string, - collectionId: string, - attributeKeys: any[], - iteration?: number, - ) => Promise; - - expectAttributes: ( - databaseId: string, - collectionId: string, - attributeKeys: string[], - iteration?: number, - ) => Promise; - - deleteIndexes: ( - databaseId: string, - collectionId: string, - indexesKeys: any[], - iteration?: number, - ) => Promise; - - expectIndexes: ( - databaseId: string, - collectionId: string, - indexKeys: string[], - iteration?: number, - ) => Promise; -} - -const awaitPools: AwaitPools = { - wipeAttributes: async ( - databaseId: string, - collectionId: string, - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - const databasesService = await getDatabasesService(); - const response = await databasesService.listAttributes( - databaseId, - collectionId, - [JSON.stringify({ method: "limit", values: [1] })], - ); - const { total } = response; - - if (total === 0) { - return true; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Found a large number of attributes, increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.wipeAttributes( - databaseId, - collectionId, - iteration + 1, - ); - }, - wipeIndexes: async ( - databaseId: string, - collectionId: string, - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - const databasesService = await getDatabasesService(); - const response = await databasesService.listIndexes( - databaseId, - collectionId, - [JSON.stringify({ method: "limit", values: [1] })], - ); - const { total } = response; - - if (total === 0) { - return true; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Found a large number of indexes, increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.wipeIndexes( - databaseId, - collectionId, - iteration + 1, - ); - }, - deleteAttributes: async ( - databaseId: string, - collectionId: string, - attributeKeys: any[], - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Found a large number of attributes to be deleted. Increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - const { attributes } = await paginate( - async (args: any) => { - const databasesService = await getDatabasesService(); - return await databasesService.listAttributes( - args.databaseId, - args.collectionId, - args.queries || [], - ); - }, - { - databaseId, - collectionId, - }, - 100, - "attributes", - ); - - const ready = attributeKeys.filter((attribute: any) => - attributes.includes(attribute.key), - ); - - if (ready.length === 0) { - return true; - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.expectAttributes( - databaseId, - collectionId, - attributeKeys, - iteration + 1, - ); - }, - expectAttributes: async ( - databaseId: string, - collectionId: string, - attributeKeys: string[], - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Creating a large number of attributes, increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - const { attributes } = await paginate( - async (args: any) => { - const databasesService = await getDatabasesService(); - return await databasesService.listAttributes( - args.databaseId, - args.collectionId, - args.queries || [], - ); - }, - { - databaseId, - collectionId, - }, - 100, - "attributes", - ); - - const ready = attributes - .filter((attribute: any) => { - if (attributeKeys.includes(attribute.key)) { - if (["stuck", "failed"].includes(attribute.status)) { - throw new Error(`Attribute '${attribute.key}' failed!`); - } - - return attribute.status === "available"; - } - - return false; - }) - .map((attribute: any) => attribute.key); - - if (ready.length === attributeKeys.length) { - return true; - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.expectAttributes( - databaseId, - collectionId, - attributeKeys, - iteration + 1, - ); - }, - deleteIndexes: async ( - databaseId: string, - collectionId: string, - indexesKeys: any[], - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(indexesKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Found a large number of indexes to be deleted. Increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - const { indexes } = await paginate( - async (args: any) => { - const databasesService = await getDatabasesService(); - return await databasesService.listIndexes( - args.databaseId, - args.collectionId, - args.queries || [], - ); - }, - { - databaseId, - collectionId, - }, - 100, - "indexes", - ); - - const ready = indexesKeys.filter((index: any) => - indexes.includes(index.key), - ); - - if (ready.length === 0) { - return true; - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.expectIndexes( - databaseId, - collectionId, - indexesKeys, - iteration + 1, - ); - }, - expectIndexes: async ( - databaseId: string, - collectionId: string, - indexKeys: string[], - iteration: number = 1, - ): Promise => { - if (iteration > pollMaxDebounces) { - return false; - } - - if (pollMaxDebounces === POLL_DEFAULT_VALUE) { - let steps = Math.max(1, Math.ceil(indexKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; - - log( - "Creating a large number of indexes, increasing timeout to " + - (pollMaxDebounces * POLL_DEBOUNCE) / 1000 / 60 + - " minutes", - ); - } - } - - const { indexes } = await paginate( - async (args: any) => { - const databasesService = await getDatabasesService(); - return await databasesService.listIndexes( - args.databaseId, - args.collectionId, - args.queries || [], - ); - }, - { - databaseId, - collectionId, - }, - 100, - "indexes", - ); - - const ready = indexes - .filter((index: any) => { - if (indexKeys.includes(index.key)) { - if (["stuck", "failed"].includes(index.status)) { - throw new Error(`Index '${index.key}' failed!`); - } - - return index.status === "available"; - } - - return false; - }) - .map((index: any) => index.key); - - if (ready.length >= indexKeys.length) { - return true; - } - - await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE)); - - return await awaitPools.expectIndexes( - databaseId, - collectionId, - indexKeys, - iteration + 1, - ); - }, -}; - const getConfirmation = async (): Promise => { if (cliConfig.force) { return true; @@ -522,6 +131,7 @@ const getConfirmation = async (): Promise => { warn("Skipping push action. Changes were not applied."); return false; }; + const isEmpty = (value: any): boolean => value === null || value === undefined || @@ -650,631 +260,6 @@ const getObjectChanges = >( return changes; }; -const createAttribute = async ( - databaseId: string, - collectionId: string, - attribute: any, -): Promise => { - const databasesService = await getDatabasesService(); - switch (attribute.type) { - case "string": - switch (attribute.format) { - case "email": - return databasesService.createEmailAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - case "url": - return databasesService.createUrlAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - case "ip": - return databasesService.createIpAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - case "enum": - return databasesService.createEnumAttribute({ - databaseId, - collectionId, - key: attribute.key, - elements: attribute.elements, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - default: - return databasesService.createStringAttribute({ - databaseId, - collectionId, - key: attribute.key, - size: attribute.size, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - encrypt: attribute.encrypt, - }); - } - case "integer": - return databasesService.createIntegerAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - min: attribute.min, - max: attribute.max, - xdefault: attribute.default, - array: attribute.array, - }); - case "double": - return databasesService.createFloatAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - min: attribute.min, - max: attribute.max, - xdefault: attribute.default, - array: attribute.array, - }); - case "boolean": - return databasesService.createBooleanAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - case "datetime": - return databasesService.createDatetimeAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - array: attribute.array, - }); - case "relationship": - return databasesService.createRelationshipAttribute({ - databaseId, - collectionId, - relatedCollectionId: - attribute.relatedTable ?? attribute.relatedCollection, - type: attribute.relationType, - twoWay: attribute.twoWay, - key: attribute.key, - twoWayKey: attribute.twoWayKey, - onDelete: attribute.onDelete, - }); - case "point": - return databasesService.createPointAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "linestring": - return databasesService.createLineAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "polygon": - return databasesService.createPolygonAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - default: - throw new Error(`Unsupported attribute type: ${attribute.type}`); - } -}; - -const updateAttribute = async ( - databaseId: string, - collectionId: string, - attribute: any, -): Promise => { - const databasesService = await getDatabasesService(); - switch (attribute.type) { - case "string": - switch (attribute.format) { - case "email": - return databasesService.updateEmailAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "url": - return databasesService.updateUrlAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "ip": - return databasesService.updateIpAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "enum": - return databasesService.updateEnumAttribute({ - databaseId, - collectionId, - key: attribute.key, - elements: attribute.elements, - required: attribute.required, - xdefault: attribute.default, - }); - default: - return databasesService.updateStringAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - } - case "integer": - return databasesService.updateIntegerAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - min: attribute.min, - max: attribute.max, - xdefault: attribute.default, - }); - case "double": - return databasesService.updateFloatAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - min: attribute.min, - max: attribute.max, - xdefault: attribute.default, - }); - case "boolean": - return databasesService.updateBooleanAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "datetime": - return databasesService.updateDatetimeAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "relationship": - return databasesService.updateRelationshipAttribute({ - databaseId, - collectionId, - key: attribute.key, - onDelete: attribute.onDelete, - }); - case "point": - return databasesService.updatePointAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "linestring": - return databasesService.updateLineAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - case "polygon": - return databasesService.updatePolygonAttribute({ - databaseId, - collectionId, - key: attribute.key, - required: attribute.required, - xdefault: attribute.default, - }); - default: - throw new Error(`Unsupported attribute type: ${attribute.type}`); - } -}; -const deleteAttribute = async ( - collection: any, - attribute: any, - isIndex: boolean = false, -): Promise => { - log( - `Deleting ${isIndex ? "index" : "attribute"} ${attribute.key} of ${collection.name} ( ${collection["$id"]} )`, - ); - - const databasesService = await getDatabasesService(); - if (isIndex) { - await databasesService.deleteIndex( - collection["databaseId"], - collection["$id"], - attribute.key, - ); - return; - } - - await databasesService.deleteAttribute( - collection["databaseId"], - collection["$id"], - attribute.key, - ); -}; - -const isEqual = (a: any, b: any): boolean => { - if (a === b) return true; - - if (a && b && typeof a === "object" && typeof b === "object") { - if ( - a.constructor && - a.constructor.name === "BigNumber" && - b.constructor && - b.constructor.name === "BigNumber" - ) { - return a.eq(b); - } - - if (typeof a.equals === "function") { - return a.equals(b); - } - - if (typeof a.eq === "function") { - return a.eq(b); - } - } - - if (typeof a === "number" && typeof b === "number") { - if (isNaN(a) && isNaN(b)) return true; - if (!isFinite(a) && !isFinite(b)) return a === b; - return Math.abs(a - b) < Number.EPSILON; - } - - return false; -}; - -const compareAttribute = ( - remote: any, - local: any, - reason: string, - key: string, -): string => { - if (isEmpty(remote) && isEmpty(local)) { - return reason; - } - - if (Array.isArray(remote) && Array.isArray(local)) { - if (JSON.stringify(remote) !== JSON.stringify(local)) { - const bol = reason === "" ? "" : "\n"; - reason += `${bol}${key} changed from ${chalk.red(remote)} to ${chalk.green(local)}`; - } - } else if (!isEqual(remote, local)) { - const bol = reason === "" ? "" : "\n"; - reason += `${bol}${key} changed from ${chalk.red(remote)} to ${chalk.green(local)}`; - } - - return reason; -}; - -/** - * Check if attribute non-changeable fields has been changed - * If so return the differences as an object. - */ -const checkAttributeChanges = ( - remote: any, - local: any, - collection: any, - recreating: boolean = true, -): AttributeChange | undefined => { - if (local === undefined) { - return undefined; - } - - const keyName = `${chalk.yellow(local.key)} in ${collection.name} (${collection["$id"]})`; - const action = chalk.cyan(recreating ? "recreating" : "changing"); - let reason = ""; - let attribute = recreating ? remote : local; - - for (let key of Object.keys(remote)) { - if (!KeysAttributes.has(key)) { - continue; - } - - if (changeableKeys.includes(key)) { - if (!recreating) { - reason = compareAttribute(remote[key], local[key], reason, key); - } - continue; - } - - if (!recreating) { - continue; - } - - reason = compareAttribute(remote[key], local[key], reason, key); - } - - return reason === "" - ? undefined - : { key: keyName, attribute, reason, action }; -}; - -/** - * Check if attributes contain the given attribute - */ -const attributesContains = (attribute: any, attributes: any[]): any => - attributes.find((attr) => attr.key === attribute.key); - -const generateChangesObject = ( - attribute: any, - collection: any, - isAdding: boolean, -): AttributeChange => { - return { - key: `${chalk.yellow(attribute.key)} in ${collection.name} (${collection["$id"]})`, - attribute: attribute, - reason: isAdding - ? "Field isn't present on the remote server" - : "Field isn't present on the appwrite.config.json file", - action: isAdding ? chalk.green("adding") : chalk.red("deleting"), - }; -}; - -/** - * Filter deleted and recreated attributes, - * return list of attributes to create - */ -const attributesToCreate = async ( - remoteAttributes: any[], - localAttributes: any[], - collection: any, - isIndex: boolean = false, -): Promise => { - const deleting = remoteAttributes - .filter((attribute) => !attributesContains(attribute, localAttributes)) - .map((attr) => generateChangesObject(attr, collection, false)); - const adding = localAttributes - .filter((attribute) => !attributesContains(attribute, remoteAttributes)) - .map((attr) => generateChangesObject(attr, collection, true)); - const conflicts = remoteAttributes - .map((attribute) => - checkAttributeChanges( - attribute, - attributesContains(attribute, localAttributes), - collection, - ), - ) - .filter((attribute) => attribute !== undefined) as AttributeChange[]; - const changes = remoteAttributes - .map((attribute) => - checkAttributeChanges( - attribute, - attributesContains(attribute, localAttributes), - collection, - false, - ), - ) - .filter((attribute) => attribute !== undefined) - .filter( - (attribute) => - conflicts.filter((attr) => attribute!.key === attr.key).length !== 1, - ) as AttributeChange[]; - - let changedAttributes: any[] = []; - const changing = [...deleting, ...adding, ...conflicts, ...changes]; - if (changing.length === 0) { - return changedAttributes; - } - - log( - !cliConfig.force - ? "There are pending changes in your collection deployment" - : "List of applied changes", - ); - - drawTable( - changing.map((change) => { - return { Key: change.key, Action: change.action, Reason: change.reason }; - }), - ); - - if (!cliConfig.force) { - if (deleting.length > 0 && !isIndex) { - console.log( - `${chalk.red("------------------------------------------------------")}`, - ); - console.log( - `${chalk.red("| WARNING: Attribute deletion may cause loss of data |")}`, - ); - console.log( - `${chalk.red("------------------------------------------------------")}`, - ); - console.log(); - } - if (conflicts.length > 0 && !isIndex) { - console.log( - `${chalk.red("--------------------------------------------------------")}`, - ); - console.log( - `${chalk.red("| WARNING: Attribute recreation may cause loss of data |")}`, - ); - console.log( - `${chalk.red("--------------------------------------------------------")}`, - ); - console.log(); - } - - if ((await getConfirmation()) !== true) { - return changedAttributes; - } - } - - if (conflicts.length > 0) { - changedAttributes = conflicts.map((change) => change.attribute); - await Promise.all( - changedAttributes.map((changed) => - deleteAttribute(collection, changed, isIndex), - ), - ); - remoteAttributes = remoteAttributes.filter( - (attribute) => !attributesContains(attribute, changedAttributes), - ); - } - - if (changes.length > 0) { - changedAttributes = changes.map((change) => change.attribute); - await Promise.all( - changedAttributes.map((changed) => - updateAttribute(collection["databaseId"], collection["$id"], changed), - ), - ); - } - - const deletingAttributes = deleting.map((change) => change.attribute); - await Promise.all( - deletingAttributes.map((attribute) => - deleteAttribute(collection, attribute, isIndex), - ), - ); - const attributeKeys = [ - ...remoteAttributes.map((attribute: any) => attribute.key), - ...deletingAttributes.map((attribute: any) => attribute.key), - ]; - - if (attributeKeys.length) { - const deleteAttributesPoolStatus = await awaitPools.deleteAttributes( - collection["databaseId"], - collection["$id"], - attributeKeys, - ); - - if (!deleteAttributesPoolStatus) { - throw new Error("Attribute deletion timed out."); - } - } - - return localAttributes.filter( - (attribute) => !attributesContains(attribute, remoteAttributes), - ); -}; - -const createIndexes = async ( - indexes: any[], - collection: any, -): Promise => { - log(`Creating indexes ...`); - - const databasesService = await getDatabasesService(); - for (let index of indexes) { - await databasesService.createIndex( - collection["databaseId"], - collection["$id"], - index.key, - index.type, - index.columns ?? index.attributes, - index.orders, - ); - } - - const result = await awaitPools.expectIndexes( - collection["databaseId"], - collection["$id"], - indexes.map((index: any) => index.key), - ); - - if (!result) { - throw new Error("Index creation timed out."); - } - - success(`Created ${indexes.length} indexes`); -}; - -const createAttributes = async ( - attributes: any[], - collection: any, -): Promise => { - for (let attribute of attributes) { - if (attribute.side !== "child") { - await createAttribute( - collection["databaseId"], - collection["$id"], - attribute, - ); - } - } - - const result = await awaitPools.expectAttributes( - collection["databaseId"], - collection["$id"], - collection.attributes - .filter((attribute: any) => attribute.side !== "child") - .map((attribute: any) => attribute.key), - ); - - if (!result) { - throw new Error(`Attribute creation timed out.`); - } - - success(`Created ${attributes.length} attributes`); -}; - -const createColumns = async (columns: any[], table: any): Promise => { - for (let column of columns) { - if (column.side !== "child") { - await createAttribute(table["databaseId"], table["$id"], column); - } - } - - const result = await awaitPools.expectAttributes( - table["databaseId"], - table["$id"], - table.columns - .filter((column: any) => column.side !== "child") - .map((column: any) => column.key), - ); - - if (!result) { - throw new Error(`Column creation timed out.`); - } - - success(`Created ${columns.length} columns`); -}; - const pushResources = async ({ skipDeprecated = false, }: PushResourcesOptions = {}): Promise => { @@ -2402,6 +1387,7 @@ const pushTable = async ({ if (attempts) { pollMaxDebounces = attempts; + pools.setPollMaxDebounces(attempts); } const { applied: tablesDBApplied, resyncNeeded } = @@ -2619,15 +1605,15 @@ const pushTable = async ({ let indexes = table.indexes; if (table.isExisted) { - columns = await attributesToCreate( + columns = await attributes.attributesToCreate( table.remoteVersion.columns, table.columns, - table, + table as Collection, ); - indexes = await attributesToCreate( + indexes = await attributes.attributesToCreate( table.remoteVersion.indexes, table.indexes, - table, + table as Collection, true, ); @@ -2646,13 +1632,13 @@ const pushTable = async ({ ); try { - await createColumns(columns, table); + await attributes.createColumns(columns, table as Collection); } catch (e) { throw e; } try { - await createIndexes(indexes, table); + await attributes.createIndexes(indexes, table as Collection); } catch (e) { throw e; } @@ -2671,6 +1657,7 @@ const pushCollection = async ({ attempts }): Promise => { if (attempts) { pollMaxDebounces = attempts; + pools.setPollMaxDebounces(attempts); } if (cliConfig.all) { @@ -2794,25 +1781,25 @@ const pushCollection = async ({ attempts }): Promise => { let numberOfCollections = 0; // Serialize attribute actions for (let collection of collections) { - let attributes = collection.attributes; + let collectionAttributes = collection.attributes; let indexes = collection.indexes; if (collection.isExisted) { - attributes = await attributesToCreate( + collectionAttributes = await attributes.attributesToCreate( collection.remoteVersion.attributes, collection.attributes, - collection, + collection as Collection, ); - indexes = await attributesToCreate( + indexes = await attributes.attributesToCreate( collection.remoteVersion.indexes, collection.indexes, - collection, + collection as Collection, true, ); if ( - Array.isArray(attributes) && - attributes.length <= 0 && + Array.isArray(collectionAttributes) && + collectionAttributes.length <= 0 && Array.isArray(indexes) && indexes.length <= 0 ) { @@ -2825,13 +1812,16 @@ const pushCollection = async ({ attempts }): Promise => { ); try { - await createAttributes(attributes, collection); + await attributes.createAttributes( + collectionAttributes, + collection as Collection, + ); } catch (e) { throw e; } try { - await createIndexes(indexes, collection); + await attributes.createIndexes(indexes, collection as Collection); } catch (e) { throw e; } diff --git a/lib/commands/update.ts b/lib/commands/update.ts index f4445b90..af2f8816 100644 --- a/lib/commands/update.ts +++ b/lib/commands/update.ts @@ -1,18 +1,8 @@ -import fs from "fs"; -import path from "path"; import { spawn } from "child_process"; import { Command } from "commander"; import chalk from "chalk"; import inquirer from "inquirer"; -import { - success, - log, - warn, - error, - hint, - actionRunner, - commandDescriptions, -} from "../parser.js"; +import { success, log, warn, error, hint, actionRunner } from "../parser.js"; import { getLatestVersion, compareVersions } from "../utils.js"; import packageJson from "../../package.json" with { type: "json" }; const { version } = packageJson; diff --git a/lib/commands/utils/attributes.ts b/lib/commands/utils/attributes.ts new file mode 100644 index 00000000..a9226455 --- /dev/null +++ b/lib/commands/utils/attributes.ts @@ -0,0 +1,733 @@ +import chalk from "chalk"; +import { getDatabasesService } from "../../services.js"; +import { KeysAttributes } from "../../config.js"; +import { log, success, cliConfig, drawTable } from "../../parser.js"; +import { Pools } from "./pools.js"; +import inquirer from "inquirer"; + +const changeableKeys = [ + "status", + "required", + "xdefault", + "elements", + "min", + "max", + "default", + "error", +]; + +export interface AttributeChange { + key: string; + attribute: any; + reason: string; + action: string; +} + +export interface Collection { + $id: string; + databaseId: string; + name: string; + attributes?: any[]; + indexes?: any[]; + columns?: any[]; + [key: string]: any; +} + +const questionPushChanges = [ + { + type: "input", + name: "changes", + message: 'Type "YES" to confirm or "NO" to cancel:', + }, +]; + +const questionPushChangesConfirmation = [ + { + type: "input", + name: "changes", + message: + 'Incorrect answer. Please type "YES" to confirm or "NO" to cancel:', + }, +]; + +export class Attributes { + private pools: Pools; + + constructor(pools?: Pools) { + this.pools = pools || new Pools(); + } + + private getConfirmation = async (): Promise => { + if (cliConfig.force) { + return true; + } + + async function fixConfirmation(): Promise { + const answers = await inquirer.prompt(questionPushChangesConfirmation); + if (answers.changes !== "YES" && answers.changes !== "NO") { + return await fixConfirmation(); + } + + return answers.changes; + } + + let answers = await inquirer.prompt(questionPushChanges); + + if (answers.changes !== "YES" && answers.changes !== "NO") { + answers.changes = await fixConfirmation(); + } + + if (answers.changes === "YES") { + return true; + } + + return false; + }; + + private isEmpty = (value: any): boolean => + value === null || + value === undefined || + (typeof value === "string" && value.trim().length === 0) || + (Array.isArray(value) && value.length === 0); + + private isEqual = (a: any, b: any): boolean => { + if (a === b) return true; + + if (a && b && typeof a === "object" && typeof b === "object") { + if ( + a.constructor && + a.constructor.name === "BigNumber" && + b.constructor && + b.constructor.name === "BigNumber" + ) { + return a.eq(b); + } + + if (typeof a.equals === "function") { + return a.equals(b); + } + + if (typeof a.eq === "function") { + return a.eq(b); + } + } + + if (typeof a === "number" && typeof b === "number") { + if (isNaN(a) && isNaN(b)) return true; + if (!isFinite(a) && !isFinite(b)) return a === b; + return Math.abs(a - b) < Number.EPSILON; + } + + return false; + }; + + private compareAttribute = ( + remote: any, + local: any, + reason: string, + key: string, + ): string => { + if (this.isEmpty(remote) && this.isEmpty(local)) { + return reason; + } + + if (Array.isArray(remote) && Array.isArray(local)) { + if (JSON.stringify(remote) !== JSON.stringify(local)) { + const bol = reason === "" ? "" : "\n"; + reason += `${bol}${key} changed from ${chalk.red(remote)} to ${chalk.green(local)}`; + } + } else if (!this.isEqual(remote, local)) { + const bol = reason === "" ? "" : "\n"; + reason += `${bol}${key} changed from ${chalk.red(remote)} to ${chalk.green(local)}`; + } + + return reason; + }; + + /** + * Check if attribute non-changeable fields has been changed + * If so return the differences as an object. + */ + private checkAttributeChanges = ( + remote: any, + local: any, + collection: Collection, + recreating: boolean = true, + ): AttributeChange | undefined => { + if (local === undefined) { + return undefined; + } + + const keyName = `${chalk.yellow(local.key)} in ${collection.name} (${collection["$id"]})`; + const action = chalk.cyan(recreating ? "recreating" : "changing"); + let reason = ""; + let attribute = recreating ? remote : local; + + for (let key of Object.keys(remote)) { + if (!KeysAttributes.has(key)) { + continue; + } + + if (changeableKeys.includes(key)) { + if (!recreating) { + reason = this.compareAttribute(remote[key], local[key], reason, key); + } + continue; + } + + if (!recreating) { + continue; + } + + reason = this.compareAttribute(remote[key], local[key], reason, key); + } + + return reason === "" + ? undefined + : { key: keyName, attribute, reason, action }; + }; + + /** + * Check if attributes contain the given attribute + */ + private attributesContains = (attribute: any, attributes: any[]): any => + attributes.find((attr) => attr.key === attribute.key); + + private generateChangesObject = ( + attribute: any, + collection: Collection, + isAdding: boolean, + ): AttributeChange => { + return { + key: `${chalk.yellow(attribute.key)} in ${collection.name} (${collection["$id"]})`, + attribute: attribute, + reason: isAdding + ? "Field isn't present on the remote server" + : "Field isn't present on the appwrite.config.json file", + action: isAdding ? chalk.green("adding") : chalk.red("deleting"), + }; + }; + + public createAttribute = async ( + databaseId: string, + collectionId: string, + attribute: any, + ): Promise => { + const databasesService = await getDatabasesService(); + switch (attribute.type) { + case "string": + switch (attribute.format) { + case "email": + return databasesService.createEmailAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + case "url": + return databasesService.createUrlAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + case "ip": + return databasesService.createIpAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + case "enum": + return databasesService.createEnumAttribute({ + databaseId, + collectionId, + key: attribute.key, + elements: attribute.elements, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + default: + return databasesService.createStringAttribute({ + databaseId, + collectionId, + key: attribute.key, + size: attribute.size, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + encrypt: attribute.encrypt, + }); + } + case "integer": + return databasesService.createIntegerAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + min: attribute.min, + max: attribute.max, + xdefault: attribute.default, + array: attribute.array, + }); + case "double": + return databasesService.createFloatAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + min: attribute.min, + max: attribute.max, + xdefault: attribute.default, + array: attribute.array, + }); + case "boolean": + return databasesService.createBooleanAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + case "datetime": + return databasesService.createDatetimeAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + array: attribute.array, + }); + case "relationship": + return databasesService.createRelationshipAttribute({ + databaseId, + collectionId, + relatedCollectionId: + attribute.relatedTable ?? attribute.relatedCollection, + type: attribute.relationType, + twoWay: attribute.twoWay, + key: attribute.key, + twoWayKey: attribute.twoWayKey, + onDelete: attribute.onDelete, + }); + case "point": + return databasesService.createPointAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "linestring": + return databasesService.createLineAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "polygon": + return databasesService.createPolygonAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + default: + throw new Error(`Unsupported attribute type: ${attribute.type}`); + } + }; + + public updateAttribute = async ( + databaseId: string, + collectionId: string, + attribute: any, + ): Promise => { + const databasesService = await getDatabasesService(); + switch (attribute.type) { + case "string": + switch (attribute.format) { + case "email": + return databasesService.updateEmailAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "url": + return databasesService.updateUrlAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "ip": + return databasesService.updateIpAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "enum": + return databasesService.updateEnumAttribute({ + databaseId, + collectionId, + key: attribute.key, + elements: attribute.elements, + required: attribute.required, + xdefault: attribute.default, + }); + default: + return databasesService.updateStringAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + } + case "integer": + return databasesService.updateIntegerAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + min: attribute.min, + max: attribute.max, + xdefault: attribute.default, + }); + case "double": + return databasesService.updateFloatAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + min: attribute.min, + max: attribute.max, + xdefault: attribute.default, + }); + case "boolean": + return databasesService.updateBooleanAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "datetime": + return databasesService.updateDatetimeAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "relationship": + return databasesService.updateRelationshipAttribute({ + databaseId, + collectionId, + key: attribute.key, + onDelete: attribute.onDelete, + }); + case "point": + return databasesService.updatePointAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "linestring": + return databasesService.updateLineAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + case "polygon": + return databasesService.updatePolygonAttribute({ + databaseId, + collectionId, + key: attribute.key, + required: attribute.required, + xdefault: attribute.default, + }); + default: + throw new Error(`Unsupported attribute type: ${attribute.type}`); + } + }; + + public deleteAttribute = async ( + collection: Collection, + attribute: any, + isIndex: boolean = false, + ): Promise => { + log( + `Deleting ${isIndex ? "index" : "attribute"} ${attribute.key} of ${collection.name} ( ${collection["$id"]} )`, + ); + + const databasesService = await getDatabasesService(); + if (isIndex) { + await databasesService.deleteIndex( + collection["databaseId"], + collection["$id"], + attribute.key, + ); + return; + } + + await databasesService.deleteAttribute( + collection["databaseId"], + collection["$id"], + attribute.key, + ); + }; + + /** + * Filter deleted and recreated attributes, + * return list of attributes to create + */ + public attributesToCreate = async ( + remoteAttributes: any[], + localAttributes: any[], + collection: Collection, + isIndex: boolean = false, + ): Promise => { + const deleting = remoteAttributes + .filter( + (attribute) => !this.attributesContains(attribute, localAttributes), + ) + .map((attr) => this.generateChangesObject(attr, collection, false)); + const adding = localAttributes + .filter( + (attribute) => !this.attributesContains(attribute, remoteAttributes), + ) + .map((attr) => this.generateChangesObject(attr, collection, true)); + const conflicts = remoteAttributes + .map((attribute) => + this.checkAttributeChanges( + attribute, + this.attributesContains(attribute, localAttributes), + collection, + ), + ) + .filter((attribute) => attribute !== undefined) as AttributeChange[]; + const changes = remoteAttributes + .map((attribute) => + this.checkAttributeChanges( + attribute, + this.attributesContains(attribute, localAttributes), + collection, + false, + ), + ) + .filter((attribute) => attribute !== undefined) + .filter( + (attribute) => + conflicts.filter((attr) => attribute!.key === attr.key).length !== 1, + ) as AttributeChange[]; + + let changedAttributes: any[] = []; + const changing = [...deleting, ...adding, ...conflicts, ...changes]; + if (changing.length === 0) { + return changedAttributes; + } + + log( + !cliConfig.force + ? "There are pending changes in your collection deployment" + : "List of applied changes", + ); + + drawTable( + changing.map((change) => { + return { + Key: change.key, + Action: change.action, + Reason: change.reason, + }; + }), + ); + + if (!cliConfig.force) { + if (deleting.length > 0 && !isIndex) { + console.log( + `${chalk.red("------------------------------------------------------")}`, + ); + console.log( + `${chalk.red("| WARNING: Attribute deletion may cause loss of data |")}`, + ); + console.log( + `${chalk.red("------------------------------------------------------")}`, + ); + console.log(); + } + if (conflicts.length > 0 && !isIndex) { + console.log( + `${chalk.red("--------------------------------------------------------")}`, + ); + console.log( + `${chalk.red("| WARNING: Attribute recreation may cause loss of data |")}`, + ); + console.log( + `${chalk.red("--------------------------------------------------------")}`, + ); + console.log(); + } + + if ((await this.getConfirmation()) !== true) { + return changedAttributes; + } + } + + if (conflicts.length > 0) { + changedAttributes = conflicts.map((change) => change.attribute); + await Promise.all( + changedAttributes.map((changed) => + this.deleteAttribute(collection, changed, isIndex), + ), + ); + remoteAttributes = remoteAttributes.filter( + (attribute) => !this.attributesContains(attribute, changedAttributes), + ); + } + + if (changes.length > 0) { + changedAttributes = changes.map((change) => change.attribute); + await Promise.all( + changedAttributes.map((changed) => + this.updateAttribute( + collection["databaseId"], + collection["$id"], + changed, + ), + ), + ); + } + + const deletingAttributes = deleting.map((change) => change.attribute); + await Promise.all( + deletingAttributes.map((attribute) => + this.deleteAttribute(collection, attribute, isIndex), + ), + ); + const attributeKeys = [ + ...remoteAttributes.map((attribute: any) => attribute.key), + ...deletingAttributes.map((attribute: any) => attribute.key), + ]; + + if (attributeKeys.length) { + const deleteAttributesPoolStatus = await this.pools.deleteAttributes( + collection["databaseId"], + collection["$id"], + attributeKeys, + ); + + if (!deleteAttributesPoolStatus) { + throw new Error("Attribute deletion timed out."); + } + } + + return localAttributes.filter( + (attribute) => !this.attributesContains(attribute, remoteAttributes), + ); + }; + + public createIndexes = async ( + indexes: any[], + collection: Collection, + ): Promise => { + log(`Creating indexes ...`); + + const databasesService = await getDatabasesService(); + for (let index of indexes) { + await databasesService.createIndex( + collection["databaseId"], + collection["$id"], + index.key, + index.type, + index.columns ?? index.attributes, + index.orders, + ); + } + + const result = await this.pools.expectIndexes( + collection["databaseId"], + collection["$id"], + indexes.map((index: any) => index.key), + ); + + if (!result) { + throw new Error("Index creation timed out."); + } + + success(`Created ${indexes.length} indexes`); + }; + + public createAttributes = async ( + attributes: any[], + collection: Collection, + ): Promise => { + for (let attribute of attributes) { + if (attribute.side !== "child") { + await this.createAttribute( + collection["databaseId"], + collection["$id"], + attribute, + ); + } + } + + const result = await this.pools.expectAttributes( + collection["databaseId"], + collection["$id"], + (collection.attributes || []) + .filter((attribute: any) => attribute.side !== "child") + .map((attribute: any) => attribute.key), + ); + + if (!result) { + throw new Error(`Attribute creation timed out.`); + } + + success(`Created ${attributes.length} attributes`); + }; + + public createColumns = async ( + columns: any[], + table: Collection, + ): Promise => { + for (let column of columns) { + if (column.side !== "child") { + await this.createAttribute(table["databaseId"], table["$id"], column); + } + } + + const result = await this.pools.expectAttributes( + table["databaseId"], + table["$id"], + (table.columns || []) + .filter((column: any) => column.side !== "child") + .map((column: any) => column.key), + ); + + if (!result) { + throw new Error(`Column creation timed out.`); + } + + success(`Created ${columns.length} columns`); + }; +} diff --git a/lib/commands/utils/pools.ts b/lib/commands/utils/pools.ts new file mode 100644 index 00000000..84dabb60 --- /dev/null +++ b/lib/commands/utils/pools.ts @@ -0,0 +1,354 @@ +import { getDatabasesService, getTablesDBService } from "../../services.js"; +import { paginate } from "../../paginate.js"; +import { log } from "../../parser.js"; + +export class Pools { + private STEP_SIZE = 100; // Resources + private POLL_DEBOUNCE = 2000; // Milliseconds + private pollMaxDebounces = 30; + private POLL_DEFAULT_VALUE = 30; + + constructor(pollMaxDebounces?: number) { + if (pollMaxDebounces) { + this.pollMaxDebounces = pollMaxDebounces; + } + } + + public setPollMaxDebounces(value: number): void { + this.pollMaxDebounces = value; + } + + public getPollMaxDebounces(): number { + return this.pollMaxDebounces; + } + + public wipeAttributes = async ( + databaseId: string, + collectionId: string, + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + const databasesService = await getDatabasesService(); + const response = await databasesService.listAttributes( + databaseId, + collectionId, + [JSON.stringify({ method: "limit", values: [1] })], + ); + const { total } = response; + + if (total === 0) { + return true; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(total / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Found a large number of attributes, increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.wipeAttributes(databaseId, collectionId, iteration + 1); + }; + + public wipeIndexes = async ( + databaseId: string, + collectionId: string, + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + const databasesService = await getDatabasesService(); + const response = await databasesService.listIndexes( + databaseId, + collectionId, + [JSON.stringify({ method: "limit", values: [1] })], + ); + const { total } = response; + + if (total === 0) { + return true; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(total / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Found a large number of indexes, increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.wipeIndexes(databaseId, collectionId, iteration + 1); + }; + + public deleteAttributes = async ( + databaseId: string, + collectionId: string, + attributeKeys: any[], + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(attributeKeys.length / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Found a large number of attributes to be deleted. Increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + const { attributes } = await paginate( + async (args: any) => { + const databasesService = await getDatabasesService(); + return await databasesService.listAttributes( + args.databaseId, + args.collectionId, + args.queries || [], + ); + }, + { + databaseId, + collectionId, + }, + 100, + "attributes", + ); + + const ready = attributeKeys.filter((attribute: any) => + attributes.includes(attribute.key), + ); + + if (ready.length === 0) { + return true; + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.expectAttributes( + databaseId, + collectionId, + attributeKeys, + iteration + 1, + ); + }; + + public expectAttributes = async ( + databaseId: string, + collectionId: string, + attributeKeys: string[], + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(attributeKeys.length / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Creating a large number of attributes, increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + const { attributes } = await paginate( + async (args: any) => { + const databasesService = await getDatabasesService(); + return await databasesService.listAttributes( + args.databaseId, + args.collectionId, + args.queries || [], + ); + }, + { + databaseId, + collectionId, + }, + 100, + "attributes", + ); + + const ready = attributes + .filter((attribute: any) => { + if (attributeKeys.includes(attribute.key)) { + if (["stuck", "failed"].includes(attribute.status)) { + throw new Error(`Attribute '${attribute.key}' failed!`); + } + + return attribute.status === "available"; + } + + return false; + }) + .map((attribute: any) => attribute.key); + + if (ready.length === attributeKeys.length) { + return true; + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.expectAttributes( + databaseId, + collectionId, + attributeKeys, + iteration + 1, + ); + }; + + public deleteIndexes = async ( + databaseId: string, + collectionId: string, + indexesKeys: any[], + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(indexesKeys.length / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Found a large number of indexes to be deleted. Increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + const { indexes } = await paginate( + async (args: any) => { + const databasesService = await getDatabasesService(); + return await databasesService.listIndexes( + args.databaseId, + args.collectionId, + args.queries || [], + ); + }, + { + databaseId, + collectionId, + }, + 100, + "indexes", + ); + + const ready = indexesKeys.filter((index: any) => + indexes.includes(index.key), + ); + + if (ready.length === 0) { + return true; + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.expectIndexes( + databaseId, + collectionId, + indexesKeys, + iteration + 1, + ); + }; + + public expectIndexes = async ( + databaseId: string, + collectionId: string, + indexKeys: string[], + iteration: number = 1, + ): Promise => { + if (iteration > this.pollMaxDebounces) { + return false; + } + + if (this.pollMaxDebounces === this.POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(indexKeys.length / this.STEP_SIZE)); + if (steps > 1 && iteration === 1) { + this.pollMaxDebounces *= steps; + + log( + "Creating a large number of indexes, increasing timeout to " + + (this.pollMaxDebounces * this.POLL_DEBOUNCE) / 1000 / 60 + + " minutes", + ); + } + } + + const { indexes } = await paginate( + async (args: any) => { + const databasesService = await getDatabasesService(); + return await databasesService.listIndexes( + args.databaseId, + args.collectionId, + args.queries || [], + ); + }, + { + databaseId, + collectionId, + }, + 100, + "indexes", + ); + + const ready = indexes + .filter((index: any) => { + if (indexKeys.includes(index.key)) { + if (["stuck", "failed"].includes(index.status)) { + throw new Error(`Index '${index.key}' failed!`); + } + + return index.status === "available"; + } + + return false; + }) + .map((index: any) => index.key); + + if (ready.length >= indexKeys.length) { + return true; + } + + await new Promise((resolve) => setTimeout(resolve, this.POLL_DEBOUNCE)); + + return await this.expectIndexes( + databaseId, + collectionId, + indexKeys, + iteration + 1, + ); + }; +} From e8d9543ebe984bfb1d24fc494805b286b713dacd Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 14:25:24 +0530 Subject: [PATCH 16/41] simplify --- lib/commands/pull.ts | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 8e5443c7..9222f4d9 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -383,10 +383,10 @@ export class Pull { try { const sitesService = new Sites(this.projectClient); - let allSites: Models.Site[]; + let sites: Models.Site[]; if (options.siteIds && options.siteIds.length > 0) { - allSites = await Promise.all( + sites = await Promise.all( options.siteIds.map((id) => sitesService.get({ siteId: id, @@ -408,13 +408,9 @@ export class Pull { 100, "sites", ); - allSites = fetchedSites; + sites = fetchedSites; } - const sites = options.siteIds - ? allSites.filter((s) => options.siteIds!.includes(s.$id)) - : allSites; - const result: SiteConfig[] = []; for (const site of sites) { From 7863279e61f11348982004f191c9db1c1504d19a Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 16:25:52 +0530 Subject: [PATCH 17/41] fix attempts --- lib/commands/push.ts | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index a5c84b76..b6609d92 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -62,12 +62,6 @@ import { Attributes, Collection } from "./utils/attributes.js"; const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; -let pollMaxDebounces = POLL_DEFAULT_VALUE; - -// Shared instances -const pools = new Pools(pollMaxDebounces); -const attributes = new Attributes(pools); - interface ObjectChange { group: string; setting: string; @@ -1385,13 +1379,11 @@ const pushTable = async ({ }: PushTableOptions = {}): Promise => { const tables: any[] = []; - if (attempts) { - pollMaxDebounces = attempts; - pools.setPollMaxDebounces(attempts); - } + const pollMaxDebounces = attempts ?? POLL_DEFAULT_VALUE; + const pools = new Pools(pollMaxDebounces); + const attributes = new Attributes(pools); - const { applied: tablesDBApplied, resyncNeeded } = - await checkAndApplyTablesDBChanges(); + const { resyncNeeded } = await checkAndApplyTablesDBChanges(); if (resyncNeeded) { log("Resyncing configuration due to tablesDB deletions ..."); @@ -1649,16 +1641,17 @@ const pushTable = async ({ success(`Successfully pushed ${tablesChanged.size} tables`); }; -const pushCollection = async ({ attempts }): Promise => { +const pushCollection = async ({ + attempts, +}: PushTableOptions = {}): Promise => { warn( "appwrite push collection has been deprecated. Please consider using 'appwrite push tables' instead", ); const collections: any[] = []; - if (attempts) { - pollMaxDebounces = attempts; - pools.setPollMaxDebounces(attempts); - } + // Create fresh instances per operation to avoid shared state issues + const pools = new Pools(attempts ?? POLL_DEFAULT_VALUE); + const attributes = new Attributes(pools); if (cliConfig.all) { checkDeployConditions(localConfig); From a4b663dacaf3deeae16e60d3fc95390824e7f01b Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 16:26:26 +0530 Subject: [PATCH 18/41] grammer --- lib/commands/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index fa872eea..25709b47 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -137,7 +137,7 @@ const int64Schema = z.preprocess( if (val < INT64_MIN || val > INT64_MAX) { ctx.addIssue({ code: "custom", - message: `must be between ${INT64_MIN} and ${INT64_MAX} (64-bit signed integer range)`, + message: `Value must be between ${INT64_MIN} and ${INT64_MAX} (64-bit signed integer range)`, }); } }), From b414fce08d7b1ac8e4f6693f059132ed734b6f8d Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 17:21:37 +0530 Subject: [PATCH 19/41] more fixes --- lib/commands/pull.ts | 2 +- lib/commands/utils/attributes.ts | 11 ++++++----- lib/commands/utils/pools.ts | 14 +++++++------- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 9222f4d9..6035bec5 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -236,7 +236,7 @@ export class Pull { if (shouldPullAll || options.tables) { const { databases, tables } = await this.pullTables(); updatedConfig.databases = databases; - updatedConfig.collections = tables; + updatedConfig.tablesDB = tables; } if (options.collections) { diff --git a/lib/commands/utils/attributes.ts b/lib/commands/utils/attributes.ts index a9226455..589d84f3 100644 --- a/lib/commands/utils/attributes.ts +++ b/lib/commands/utils/attributes.ts @@ -630,11 +630,12 @@ export class Attributes { ]; if (attributeKeys.length) { - const deleteAttributesPoolStatus = await this.pools.deleteAttributes( - collection["databaseId"], - collection["$id"], - attributeKeys, - ); + const deleteAttributesPoolStatus = + await this.pools.waitForAttributeDeletion( + collection["databaseId"], + collection["$id"], + attributeKeys, + ); if (!deleteAttributesPoolStatus) { throw new Error("Attribute deletion timed out."); diff --git a/lib/commands/utils/pools.ts b/lib/commands/utils/pools.ts index 84dabb60..1866da44 100644 --- a/lib/commands/utils/pools.ts +++ b/lib/commands/utils/pools.ts @@ -1,4 +1,4 @@ -import { getDatabasesService, getTablesDBService } from "../../services.js"; +import { getDatabasesService } from "../../services.js"; import { paginate } from "../../paginate.js"; import { log } from "../../parser.js"; @@ -100,7 +100,7 @@ export class Pools { return await this.wipeIndexes(databaseId, collectionId, iteration + 1); }; - public deleteAttributes = async ( + public waitForAttributeDeletion = async ( databaseId: string, collectionId: string, attributeKeys: any[], @@ -126,11 +126,11 @@ export class Pools { const { attributes } = await paginate( async (args: any) => { const databasesService = await getDatabasesService(); - return await databasesService.listAttributes( - args.databaseId, - args.collectionId, - args.queries || [], - ); + return await databasesService.listAttributes({ + databaseId: args.databaseId, + collectionId: args.collectionId, + queries: args.queries || [], + }); }, { databaseId, From 9ace35c4118bf072b6ba44fdd3e4af85ff5a0ce0 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 17:48:48 +0530 Subject: [PATCH 20/41] util methods from push --- lib/commands/push.ts | 2407 +++++++++++++------------ lib/commands/utils/change-approval.ts | 186 ++ lib/commands/utils/database-sync.ts | 180 ++ 3 files changed, 1662 insertions(+), 1111 deletions(-) create mode 100644 lib/commands/utils/change-approval.ts create mode 100644 lib/commands/utils/database-sync.ts diff --git a/lib/commands/push.ts b/lib/commands/push.ts index b6609d92..730f093b 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -9,14 +9,13 @@ import { globalConfig, KeysFunction, KeysSite, - whitelistKeys, KeysTopics, KeysStorage, KeysTeams, KeysCollection, KeysTable, } from "../config.js"; -import { Spinner, SPINNER_ARC, SPINNER_DOTS } from "../spinner.js"; +import { Spinner, SPINNER_DOTS } from "../spinner.js"; import { paginate } from "../paginate.js"; import { questionsPushBuckets, @@ -26,8 +25,6 @@ import { questionsGetEntrypoint, questionsPushCollections, questionsPushTables, - questionPushChanges, - questionPushChangesConfirmation, questionsPushMessagingTopics, questionsPushResources, } from "../questions.js"; @@ -58,19 +55,17 @@ import { ApiService, AuthMethod } from "@appwrite.io/console"; import { checkDeployConditions } from "../utils.js"; import { Pools } from "./utils/pools.js"; import { Attributes, Collection } from "./utils/attributes.js"; +import { + getConfirmation, + approveChanges, + getObjectChanges, +} from "./utils/change-approval.js"; +import { checkAndApplyTablesDBChanges } from "./utils/database-sync.js"; +import type { ConfigType } from "./config.js"; const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; -interface ObjectChange { - group: string; - setting: string; - remote: string; - local: string; -} - -type ComparableValue = boolean | number | string | any[] | undefined; - interface PushResourcesOptions { skipDeprecated?: boolean; } @@ -89,262 +84,24 @@ interface PushFunctionOptions { withVariables?: boolean; } -interface TablesDBChangesResult { - applied: boolean; - resyncNeeded: boolean; -} - interface PushTableOptions { attempts?: number; } -const getConfirmation = async (): Promise => { - if (cliConfig.force) { - return true; - } - - async function fixConfirmation(): Promise { - const answers = await inquirer.prompt(questionPushChangesConfirmation); - if (answers.changes !== "YES" && answers.changes !== "NO") { - return await fixConfirmation(); - } - - return answers.changes; - } - - let answers = await inquirer.prompt(questionPushChanges); - - if (answers.changes !== "YES" && answers.changes !== "NO") { - answers.changes = await fixConfirmation(); - } - - if (answers.changes === "YES") { - return true; - } - - warn("Skipping push action. Changes were not applied."); - return false; -}; - -const isEmpty = (value: any): boolean => - value === null || - value === undefined || - (typeof value === "string" && value.trim().length === 0) || - (Array.isArray(value) && value.length === 0); - -const approveChanges = async ( - resource: any[], - resourceGetFunction: Function, - keys: Set, - resourceName: string, - resourcePlural: string, - skipKeys: string[] = [], - secondId: string = "", - secondResourceName: string = "", -): Promise => { - log("Checking for changes ..."); - const changes: any[] = []; - - await Promise.all( - resource.map(async (localResource) => { - try { - const options: Record = { - [resourceName]: localResource["$id"], - }; - - if (secondId !== "" && secondResourceName !== "") { - options[secondResourceName] = localResource[secondId]; - } - - const remoteResource = await resourceGetFunction(options); - - for (let [key, value] of Object.entries( - whitelistKeys(remoteResource, keys), - )) { - if (skipKeys.includes(key)) { - continue; - } - - if (isEmpty(value) && isEmpty(localResource[key])) { - continue; - } - - if (Array.isArray(value) && Array.isArray(localResource[key])) { - if (JSON.stringify(value) !== JSON.stringify(localResource[key])) { - changes.push({ - id: localResource["$id"], - key, - remote: chalk.red((value as string[]).join("\n")), - local: chalk.green(localResource[key].join("\n")), - }); - } - } else if (value !== localResource[key]) { - changes.push({ - id: localResource["$id"], - key, - remote: chalk.red(value), - local: chalk.green(localResource[key]), - }); - } - } - } catch (e: any) { - if (Number(e.code) !== 404) { - throw e; - } - } - }), - ); - - if (changes.length === 0) { - return true; - } - - drawTable(changes); - if ((await getConfirmation()) === true) { - return true; - } - - success(`Successfully pushed 0 ${resourcePlural}.`); - return false; -}; - -const getObjectChanges = >( - remote: T, - local: T, - index: keyof T, - what: string, -): ObjectChange[] => { - const changes: ObjectChange[] = []; - - const remoteNested = remote[index]; - const localNested = local[index]; - - if ( - remoteNested && - localNested && - typeof remoteNested === "object" && - !Array.isArray(remoteNested) && - typeof localNested === "object" && - !Array.isArray(localNested) - ) { - const remoteObj = remoteNested as Record; - const localObj = localNested as Record; - - for (const [service, status] of Object.entries(remoteObj)) { - const localValue = localObj[service]; - let valuesEqual = false; - - if (Array.isArray(status) && Array.isArray(localValue)) { - valuesEqual = JSON.stringify(status) === JSON.stringify(localValue); - } else { - valuesEqual = status === localValue; - } - - if (!valuesEqual) { - changes.push({ - group: what, - setting: service, - remote: chalk.red(String(status ?? "")), - local: chalk.green(String(localValue ?? "")), - }); - } - } - } - - return changes; -}; - -const pushResources = async ({ - skipDeprecated = false, -}: PushResourcesOptions = {}): Promise => { - const actions: Record Promise> = { - settings: pushSettings, - functions: pushFunction, - sites: pushSite, - collections: pushCollection, - tables: pushTable, - buckets: pushBucket, - teams: pushTeam, - messages: pushMessagingTopic, - }; - - if (skipDeprecated) { - delete actions.collections; - } - - if (cliConfig.all) { - for (let action of Object.values(actions)) { - await action(); - } - } else { - const answers = await inquirer.prompt(questionsPushResources); - - const action = actions[answers.resource]; - if (action !== undefined) { - await action(); - } - } -}; - -const pushSettings = async (): Promise => { - checkDeployConditions(localConfig); - - try { - const projectsService = await getProjectsService(); - let response = await projectsService.get( - localConfig.getProject().projectId, - ); - - const remoteSettings = localConfig.createSettingsObject(response ?? {}); - const localSettings = localConfig.getProject().projectSettings ?? {}; - - log("Checking for changes ..."); - const changes: any[] = []; - - changes.push( - ...getObjectChanges(remoteSettings, localSettings, "services", "Service"), - ); - changes.push( - ...getObjectChanges( - remoteSettings["auth"] ?? {}, - localSettings["auth"] ?? {}, - "methods", - "Auth method", - ), - ); - changes.push( - ...getObjectChanges( - remoteSettings["auth"] ?? {}, - localSettings["auth"] ?? {}, - "security", - "Auth security", - ), - ); - - if (changes.length > 0) { - drawTable(changes); - if ((await getConfirmation()) !== true) { - success(`Successfully pushed 0 project settings.`); - return; - } - } - } catch (e) {} - - try { - log("Pushing project settings ..."); +export class Push { + constructor() {} + public async pushSettings(config: ConfigType): Promise { const projectsService = await getProjectsService(); - const projectId = localConfig.getProject().projectId; - const projectName = localConfig.getProject().projectName; - const settings = localConfig.getProject().projectSettings ?? {}; + const projectId = config.projectId; + const projectName = config.projectName; + const settings = config.settings ?? {}; if (projectName) { - log("Applying project name ..."); await projectsService.update(projectId, projectName); } if (settings.services) { - log("Applying service statuses ..."); for (let [service, status] of Object.entries(settings.services)) { await projectsService.updateServiceStatus( projectId, @@ -356,7 +113,6 @@ const pushSettings = async (): Promise => { if (settings.auth) { if (settings.auth.security) { - log("Applying auth security settings ..."); await projectsService.updateAuthDuration( projectId, settings.auth.security.duration, @@ -392,8 +148,6 @@ const pushSettings = async (): Promise => { } if (settings.auth.methods) { - log("Applying auth methods statuses ..."); - for (let [method, status] of Object.entries(settings.auth.methods)) { await projectsService.updateAuthStatus( projectId, @@ -403,158 +157,454 @@ const pushSettings = async (): Promise => { } } } - - success(`Successfully pushed ${chalk.bold("all")} project settings.`); - } catch (e) { - throw e; } -}; - -const pushSite = async ({ - siteId, - async: asyncDeploy, - code, - withVariables, -}: PushSiteOptions = {}): Promise => { - process.chdir(localConfig.configDirectoryPath); - - const siteIds: string[] = []; - if (siteId) { - siteIds.push(siteId); - } else if (cliConfig.all) { - checkDeployConditions(localConfig); - const sites = localConfig.getSites(); - siteIds.push( - ...sites.map((site: any) => { - return site.$id; - }), - ); - } + public async pushBucket(bucket: any): Promise { + const storageService = await getStorageService(); - if (siteIds.length <= 0) { - const answers = await inquirer.prompt(questionsPushSites); - if (answers.sites) { - siteIds.push(...answers.sites); + try { + await storageService.getBucket(bucket["$id"]); + await storageService.updateBucket({ + bucketId: bucket["$id"], + name: bucket.name, + permissions: bucket["$permissions"], + fileSecurity: bucket.fileSecurity, + enabled: bucket.enabled, + maximumFileSize: bucket.maximumFileSize, + allowedFileExtensions: bucket.allowedFileExtensions, + encryption: bucket.encryption, + antivirus: bucket.antivirus, + compression: bucket.compression, + }); + } catch (e: any) { + if (Number(e.code) === 404) { + await storageService.createBucket({ + bucketId: bucket["$id"], + name: bucket.name, + permissions: bucket["$permissions"], + fileSecurity: bucket.fileSecurity, + enabled: bucket.enabled, + maximumFileSize: bucket.maximumFileSize, + allowedFileExtensions: bucket.allowedFileExtensions, + compression: bucket.compression, + encryption: bucket.encryption, + antivirus: bucket.antivirus, + }); + } else { + throw e; + } } } - if (siteIds.length === 0) { - log("No sites found."); - hint( - "Use 'appwrite pull sites' to synchronize existing one, or use 'appwrite init site' to create a new one.", - ); - return; - } - - let sites = siteIds.map((id: string) => { - const sites = localConfig.getSites(); - const site = sites.find((s: any) => s.$id === id); + public async pushTeam(team: any): Promise { + const teamsService = await getTeamsService(); - if (!site) { - throw new Error("Site '" + id + "' not found."); + try { + await teamsService.get(team["$id"]); + await teamsService.updateName(team["$id"], team.name); + } catch (e: any) { + if (Number(e.code) === 404) { + await teamsService.create(team["$id"], team.name); + } else { + throw e; + } } + } - return site; - }); + public async pushMessagingTopic(topic: any): Promise { + const messagingService = await getMessagingService(); - log("Validating sites ..."); - // Validation is done BEFORE pushing so the deployment process can be run in async with progress update - for (let site of sites) { - if (!site.buildCommand) { - log(`Site ${site.name} is missing build command.`); - const answers = await inquirer.prompt(questionsGetEntrypoint); - site.buildCommand = answers.entrypoint; - localConfig.addSite(site); + try { + await messagingService.getTopic(topic["$id"]); + await messagingService.updateTopic( + topic["$id"], + topic.name, + topic.subscribe, + ); + } catch (e: any) { + if (Number(e.code) === 404) { + await messagingService.createTopic( + topic["$id"], + topic.name, + topic.subscribe, + ); + } else { + throw e; + } } } - if ( - !(await approveChanges( - sites, - async (args: any) => { - const sitesService = await getSitesService(); - return await sitesService.get({ siteId: args.siteId }); - }, - KeysSite, - "siteId", - "sites", - ["vars"], - )) - ) { - return; - } - - log("Pushing sites ..."); - - Spinner.start(false); - let successfullyPushed = 0; - let successfullyDeployed = 0; - const failedDeployments: any[] = []; - const errors: any[] = []; + public async pushFunction( + functions: any[], + options: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + } = {}, + ): Promise<{ + successfullyPushed: number; + successfullyDeployed: number; + failedDeployments: any[]; + errors: any[]; + }> { + const { async: asyncDeploy, code, withVariables } = options; + + Spinner.start(false); + let successfullyPushed = 0; + let successfullyDeployed = 0; + const failedDeployments: any[] = []; + const errors: any[] = []; + + await Promise.all( + functions.map(async (func: any) => { + let response: any = {}; + + const ignore = func.ignore ? "appwrite.config.json" : ".gitignore"; + let functionExists = false; + let deploymentCreated = false; + + const updaterRow = new Spinner({ + status: "", + resource: func.name, + id: func["$id"], + end: `Ignoring using: ${ignore}`, + }); - await Promise.all( - sites.map(async (site: any) => { - let response: any = {}; - - const ignore = site.ignore ? "appwrite.config.json" : ".gitignore"; - let siteExists = false; - let deploymentCreated = false; - - const updaterRow = new Spinner({ - status: "", - resource: site.name, - id: site["$id"], - end: `Ignoring using: ${ignore}`, - }); + updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); + const functionsService = await getFunctionsService(); + try { + response = await functionsService.get({ functionId: func["$id"] }); + functionExists = true; + if (response.runtime !== func.runtime) { + updaterRow.fail({ + errorMessage: `Runtime mismatch! (local=${func.runtime},remote=${response.runtime}) Please delete remote function or update your appwrite.config.json`, + }); + return; + } - updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); + updaterRow + .update({ status: "Updating" }) + .replaceSpinner(SPINNER_DOTS); - const sitesService = await getSitesService(); - try { - response = await sitesService.get({ siteId: site["$id"] }); - siteExists = true; - if (response.framework !== site.framework) { - updaterRow.fail({ - errorMessage: `Framework mismatch! (local=${site.framework},remote=${response.framework}) Please delete remote site or update your appwrite.config.json`, + response = await functionsService.update({ + functionId: func["$id"], + name: func.name, + runtime: func.runtime, + execute: func.execute, + events: func.events, + schedule: func.schedule, + timeout: func.timeout, + enabled: func.enabled, + logging: func.logging, + entrypoint: func.entrypoint, + commands: func.commands, + scopes: func.scopes, + specification: func.specification, }); + } catch (e: any) { + if (Number(e.code) === 404) { + functionExists = false; + } else { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "General error occurs please try again", + }); + return; + } + } + + if (!functionExists) { + updaterRow + .update({ status: "Creating" }) + .replaceSpinner(SPINNER_DOTS); + + try { + response = await functionsService.create({ + functionId: func.$id, + name: func.name, + runtime: func.runtime, + execute: func.execute, + events: func.events, + schedule: func.schedule, + timeout: func.timeout, + enabled: func.enabled, + logging: func.logging, + entrypoint: func.entrypoint, + commands: func.commands, + scopes: func.scopes, + specification: func.specification, + }); + + let domain = ""; + try { + const consoleService = await getConsoleService(); + const variables = await consoleService.variables(); + domain = ID.unique() + "." + variables["_APP_DOMAIN_FUNCTIONS"]; + } catch (error) { + console.error("Error fetching console variables."); + throw error; + } + + try { + const proxyService = await getProxyService(); + await proxyService.createFunctionRule(domain, func.$id); + } catch (error) { + console.error("Error creating function rule."); + throw error; + } + + updaterRow.update({ status: "Created" }); + } catch (e: any) { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "General error occurs please try again", + }); + return; + } + } + + if (withVariables) { + updaterRow + .update({ status: "Updating variables" }) + .replaceSpinner(SPINNER_DOTS); + + const functionsService = await getFunctionsService(); + const { variables } = await paginate( + async (args: any) => { + return await functionsService.listVariables({ + functionId: args.functionId, + }); + }, + { + functionId: func["$id"], + }, + 100, + "variables", + ); + + await Promise.all( + variables.map(async (variable: any) => { + const functionsService = await getFunctionsService(); + await functionsService.deleteVariable({ + functionId: func["$id"], + variableId: variable["$id"], + }); + }), + ); + + const envFileLocation = `${func["path"]}/.env`; + let envVariables: Array<{ key: string; value: string }> = []; + try { + if (fs.existsSync(envFileLocation)) { + const envObject = parseDotenv( + fs.readFileSync(envFileLocation, "utf8"), + ); + envVariables = Object.entries(envObject || {}).map( + ([key, value]) => ({ key, value }), + ); + } + } catch (error) { + envVariables = []; + } + await Promise.all( + envVariables.map(async (variable) => { + const functionsService = await getFunctionsService(); + await functionsService.createVariable({ + functionId: func["$id"], + key: variable.key, + value: variable.value, + secret: false, + }); + }), + ); + } + + if (code === false) { + successfullyPushed++; + successfullyDeployed++; + updaterRow.update({ status: "Pushed" }); + updaterRow.stopSpinner(); return; } - updaterRow.update({ status: "Updating" }).replaceSpinner(SPINNER_ARC); + try { + updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); + const functionsService = await getFunctionsService(); + response = await functionsService.createDeployment({ + functionId: func["$id"], + entrypoint: func.entrypoint, + commands: func.commands, + code: func.path, + activate: true, + }); - response = await sitesService.update({ - siteId: site["$id"], - name: site.name, - framework: site.framework, - enabled: site.enabled, - logging: site.logging, - timeout: site.timeout, - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - buildRuntime: site.buildRuntime, - adapter: site.adapter, - specification: site.specification, - }); - } catch (e: any) { - if (Number(e.code) === 404) { - siteExists = false; - } else { + updaterRow.update({ status: "Pushed" }); + deploymentCreated = true; + successfullyPushed++; + } catch (e: any) { errors.push(e); - updaterRow.fail({ - errorMessage: e.message ?? "General error occurs please try again", - }); - return; + + switch (e.code) { + case "ENOENT": + updaterRow.fail({ + errorMessage: "Not found in the current directory. Skipping...", + }); + break; + default: + updaterRow.fail({ + errorMessage: + e.message ?? "An unknown error occurred. Please try again.", + }); + } } - } - if (!siteExists) { - updaterRow.update({ status: "Creating" }).replaceSpinner(SPINNER_DOTS); + if (deploymentCreated && !asyncDeploy) { + try { + const deploymentId = response["$id"]; + updaterRow.update({ + status: "Deploying", + end: "Checking deployment status...", + }); + + while (true) { + const functionsService = await getFunctionsService(); + response = await functionsService.getDeployment({ + functionId: func["$id"], + deploymentId: deploymentId, + }); + + const status = response["status"]; + if (status === "ready") { + successfullyDeployed++; + + let url = ""; + const proxyService = await getProxyService(); + const res = await proxyService.listRules([ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceType", + values: ["function"], + }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceId", + values: [func["$id"]], + }), + JSON.stringify({ + method: "equal", + attribute: "trigger", + values: ["manual"], + }), + ]); + + if (Number(res.total) === 1) { + url = res.rules[0].domain; + } + + updaterRow.update({ status: "Deployed", end: url }); + + break; + } else if (status === "failed") { + failedDeployments.push({ + name: func["name"], + $id: func["$id"], + deployment: response["$id"], + }); + updaterRow.fail({ errorMessage: `Failed to deploy` }); + + break; + } else { + updaterRow.update({ + status: "Deploying", + end: `Current status: ${status}`, + }); + } + + await new Promise((resolve) => + setTimeout(resolve, POLL_DEBOUNCE * 1.5), + ); + } + } catch (e: any) { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "Unknown error occurred. Please try again", + }); + } + } + + updaterRow.stopSpinner(); + }), + ); + + Spinner.stop(); + + return { + successfullyPushed, + successfullyDeployed, + failedDeployments, + errors, + }; + } + + public async pushSite( + sites: any[], + options: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + } = {}, + ): Promise<{ + successfullyPushed: number; + successfullyDeployed: number; + failedDeployments: any[]; + errors: any[]; + }> { + const { async: asyncDeploy, code, withVariables } = options; + + Spinner.start(false); + let successfullyPushed = 0; + let successfullyDeployed = 0; + const failedDeployments: any[] = []; + const errors: any[] = []; + + await Promise.all( + sites.map(async (site: any) => { + let response: any = {}; + + const ignore = site.ignore ? "appwrite.config.json" : ".gitignore"; + let siteExists = false; + let deploymentCreated = false; + + const updaterRow = new Spinner({ + status: "", + resource: site.name, + id: site["$id"], + end: `Ignoring using: ${ignore}`, + }); + + updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); + const sitesService = await getSitesService(); try { - response = await sitesService.create({ - siteId: site.$id, + response = await sitesService.get({ siteId: site["$id"] }); + siteExists = true; + if (response.framework !== site.framework) { + updaterRow.fail({ + errorMessage: `Framework mismatch! (local=${site.framework},remote=${response.framework}) Please delete remote site or update your appwrite.config.json`, + }); + return; + } + + updaterRow + .update({ status: "Updating" }) + .replaceSpinner(SPINNER_DOTS); + + response = await sitesService.update({ + siteId: site["$id"], name: site.name, framework: site.framework, enabled: site.enabled, @@ -567,63 +617,363 @@ const pushSite = async ({ adapter: site.adapter, specification: site.specification, }); + } catch (e: any) { + if (Number(e.code) === 404) { + siteExists = false; + } else { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "General error occurs please try again", + }); + return; + } + } + + if (!siteExists) { + updaterRow + .update({ status: "Creating" }) + .replaceSpinner(SPINNER_DOTS); - let domain = ""; try { - const consoleService = await getConsoleService(); - const variables = await consoleService.variables(); - domain = ID.unique() + "." + variables["_APP_DOMAIN_SITES"]; - } catch (error) { - console.error("Error fetching console variables."); - throw error; + response = await sitesService.create({ + siteId: site.$id, + name: site.name, + framework: site.framework, + enabled: site.enabled, + logging: site.logging, + timeout: site.timeout, + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + buildRuntime: site.buildRuntime, + adapter: site.adapter, + specification: site.specification, + }); + + let domain = ""; + try { + const consoleService = await getConsoleService(); + const variables = await consoleService.variables(); + domain = ID.unique() + "." + variables["_APP_DOMAIN_SITES"]; + } catch (error) { + console.error("Error fetching console variables."); + throw error; + } + + try { + const proxyService = await getProxyService(); + await proxyService.createSiteRule(domain, site.$id); + } catch (error) { + console.error("Error creating site rule."); + throw error; + } + + updaterRow.update({ status: "Created" }); + } catch (e: any) { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "General error occurs please try again", + }); + return; } + } + if (withVariables) { + updaterRow + .update({ status: "Creating variables" }) + .replaceSpinner(SPINNER_DOTS); + + const sitesService = await getSitesService(); + const { variables } = await paginate( + async (args: any) => { + return await sitesService.listVariables({ siteId: args.siteId }); + }, + { + siteId: site["$id"], + }, + 100, + "variables", + ); + + await Promise.all( + variables.map(async (variable: any) => { + const sitesService = await getSitesService(); + await sitesService.deleteVariable({ + siteId: site["$id"], + variableId: variable["$id"], + }); + }), + ); + + const envFileLocation = `${site["path"]}/.env`; + let envVariables: Array<{ key: string; value: string }> = []; try { - const proxyService = await getProxyService(); - const rule = await proxyService.createSiteRule(domain, site.$id); + if (fs.existsSync(envFileLocation)) { + const envObject = parseDotenv( + fs.readFileSync(envFileLocation, "utf8"), + ); + envVariables = Object.entries(envObject || {}).map( + ([key, value]) => ({ key, value }), + ); + } } catch (error) { - console.error("Error creating site rule."); - throw error; + envVariables = []; } + await Promise.all( + envVariables.map(async (variable) => { + const sitesService = await getSitesService(); + await sitesService.createVariable({ + siteId: site["$id"], + key: variable.key, + value: variable.value, + secret: false, + }); + }), + ); + } + + if (code === false) { + successfullyPushed++; + successfullyDeployed++; + updaterRow.update({ status: "Pushed" }); + updaterRow.stopSpinner(); + return; + } + + try { + updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); + const sitesService = await getSitesService(); + response = await sitesService.createDeployment({ + siteId: site["$id"], + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + code: site.path, + activate: true, + }); - updaterRow.update({ status: "Created" }); + updaterRow.update({ status: "Pushed" }); + deploymentCreated = true; + successfullyPushed++; } catch (e: any) { errors.push(e); - updaterRow.fail({ - errorMessage: e.message ?? "General error occurs please try again", - }); - return; + + switch (e.code) { + case "ENOENT": + updaterRow.fail({ + errorMessage: "Not found in the current directory. Skipping...", + }); + break; + default: + updaterRow.fail({ + errorMessage: + e.message ?? "An unknown error occurred. Please try again.", + }); + } + } + + if (deploymentCreated && !asyncDeploy) { + try { + const deploymentId = response["$id"]; + updaterRow.update({ + status: "Deploying", + end: "Checking deployment status...", + }); + + while (true) { + const sitesService = await getSitesService(); + response = await sitesService.getDeployment({ + siteId: site["$id"], + deploymentId: deploymentId, + }); + + const status = response["status"]; + if (status === "ready") { + successfullyDeployed++; + + let url = ""; + const proxyService = await getProxyService(); + const res = await proxyService.listRules([ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceType", + values: ["site"], + }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceId", + values: [site["$id"]], + }), + JSON.stringify({ + method: "equal", + attribute: "trigger", + values: ["manual"], + }), + ]); + + if (Number(res.total) === 1) { + url = res.rules[0].domain; + } + + updaterRow.update({ status: "Deployed", end: url }); + + break; + } else if (status === "failed") { + failedDeployments.push({ + name: site["name"], + $id: site["$id"], + deployment: response["$id"], + }); + updaterRow.fail({ errorMessage: `Failed to deploy` }); + + break; + } else { + updaterRow.update({ + status: "Deploying", + end: `Current status: ${status}`, + }); + } + + await new Promise((resolve) => + setTimeout(resolve, POLL_DEBOUNCE * 1.5), + ); + } + } catch (e: any) { + errors.push(e); + updaterRow.fail({ + errorMessage: + e.message ?? "Unknown error occurred. Please try again", + }); + } } + + updaterRow.stopSpinner(); + }), + ); + + Spinner.stop(); + + return { + successfullyPushed, + successfullyDeployed, + failedDeployments, + errors, + }; + } + + public async pushSingleFunction( + func: any, + options: { + withVariables?: boolean; + code?: boolean; + } = {}, + ): Promise<{ + success: boolean; + deploymentId?: string; + error?: string; + errorCode?: string; + }> { + const functionsService = await getFunctionsService(); + let functionExists = false; + + try { + const response = await functionsService.get({ functionId: func["$id"] }); + functionExists = true; + + if (response.runtime !== func.runtime) { + return { + success: false, + error: `Runtime mismatch! (local=${func.runtime},remote=${response.runtime}) Please delete remote function or update your appwrite.config.json`, + }; + } + + await functionsService.update({ + functionId: func["$id"], + name: func.name, + runtime: func.runtime, + execute: func.execute, + events: func.events, + schedule: func.schedule, + timeout: func.timeout, + enabled: func.enabled, + logging: func.logging, + entrypoint: func.entrypoint, + commands: func.commands, + scopes: func.scopes, + specification: func.specification, + }); + } catch (e: any) { + if (Number(e.code) === 404) { + functionExists = false; + } else { + return { success: false, error: e.message }; } + } + + if (!functionExists) { + try { + await functionsService.create({ + functionId: func.$id, + name: func.name, + runtime: func.runtime, + execute: func.execute, + events: func.events, + schedule: func.schedule, + timeout: func.timeout, + enabled: func.enabled, + logging: func.logging, + entrypoint: func.entrypoint, + commands: func.commands, + scopes: func.scopes, + specification: func.specification, + }); - if (withVariables) { - updaterRow - .update({ status: "Creating variables" }) - .replaceSpinner(SPINNER_ARC); + let domain = ""; + try { + const consoleService = await getConsoleService(); + const variables = await consoleService.variables(); + domain = ID.unique() + "." + variables["_APP_DOMAIN_FUNCTIONS"]; + } catch (error) { + return { success: false, error: "Error fetching console variables." }; + } - const sitesService = await getSitesService(); + try { + const proxyService = await getProxyService(); + await proxyService.createFunctionRule(domain, func.$id); + } catch (error) { + return { success: false, error: "Error creating function rule." }; + } + } catch (e: any) { + return { success: false, error: e.message }; + } + } + + if (options.withVariables) { + try { const { variables } = await paginate( async (args: any) => { - return await sitesService.listVariables({ siteId: args.siteId }); - }, - { - siteId: site["$id"], + return await functionsService.listVariables({ + functionId: args.functionId, + }); }, + { functionId: func["$id"] }, 100, "variables", ); await Promise.all( variables.map(async (variable: any) => { - const sitesService = await getSitesService(); - await sitesService.deleteVariable({ - siteId: site["$id"], + await functionsService.deleteVariable({ + functionId: func["$id"], variableId: variable["$id"], }); }), ); - const envFileLocation = `${site["path"]}/.env`; + const envFileLocation = `${func["path"]}/.env`; let envVariables: Array<{ key: string; value: string }> = []; try { if (fs.existsSync(envFileLocation)) { @@ -635,396 +985,156 @@ const pushSite = async ({ ); } } catch (error) { - // Handle parsing errors gracefully envVariables = []; } + await Promise.all( envVariables.map(async (variable) => { - const sitesService = await getSitesService(); - await sitesService.createVariable({ - siteId: site["$id"], + await functionsService.createVariable({ + functionId: func["$id"], key: variable.key, value: variable.value, secret: false, }); }), ); + } catch (e: any) { + return { + success: false, + error: `Failed to update variables: ${e.message}`, + }; } + } - if (code === false) { - successfullyPushed++; - successfullyDeployed++; - updaterRow.update({ status: "Pushed" }); - updaterRow.stopSpinner(); - return; - } + if (options.code === false) { + return { success: true }; + } - try { - updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_ARC); - const sitesService = await getSitesService(); - response = await sitesService.createDeployment({ - siteId: site["$id"], - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - code: site.path, - activate: true, - }); + try { + const response = await functionsService.createDeployment({ + functionId: func["$id"], + entrypoint: func.entrypoint, + commands: func.commands, + code: func.path, + activate: true, + }); - updaterRow.update({ status: "Pushed" }); - deploymentCreated = true; - successfullyPushed++; - } catch (e: any) { - errors.push(e); - - switch (e.code) { - case "ENOENT": - updaterRow.fail({ - errorMessage: "Not found in the current directory. Skipping...", - }); - break; - default: - updaterRow.fail({ - errorMessage: - e.message ?? "An unknown error occurred. Please try again.", - }); - } - } - - if (deploymentCreated && !asyncDeploy) { - try { - const deploymentId = response["$id"]; - updaterRow.update({ - status: "Deploying", - end: "Checking deployment status...", - }); - let pollChecks = 0; - - while (true) { - const sitesService = await getSitesService(); - response = await sitesService.getDeployment({ - siteId: site["$id"], - deploymentId: deploymentId, - }); - - const status = response["status"]; - if (status === "ready") { - successfullyDeployed++; - - let url = ""; - const proxyService = await getProxyService(); - const res = await proxyService.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["site"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [site["$id"]], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), - ]); - - if (Number(res.total) === 1) { - url = res.rules[0].domain; - } - - updaterRow.update({ status: "Deployed", end: url }); - - break; - } else if (status === "failed") { - failedDeployments.push({ - name: site["name"], - $id: site["$id"], - deployment: response["$id"], - }); - updaterRow.fail({ errorMessage: `Failed to deploy` }); - - break; - } else { - updaterRow.update({ - status: "Deploying", - end: `Current status: ${status}`, - }); - } - - pollChecks++; - await new Promise((resolve) => - setTimeout(resolve, POLL_DEBOUNCE * 1.5), - ); - } - } catch (e: any) { - errors.push(e); - updaterRow.fail({ - errorMessage: - e.message ?? "Unknown error occurred. Please try again", - }); - } - } - - updaterRow.stopSpinner(); - }), - ); - - Spinner.stop(); - - failedDeployments.forEach((failed) => { - const { name, deployment, $id } = failed; - const failUrl = `${globalConfig.getEndpoint().slice(0, -3)}/console/project-${localConfig.getProject().projectId}/sites/site-${$id}/deployments/deployment-${deployment}`; - - error( - `Deployment of ${name} has failed. Check at ${failUrl} for more details\n`, - ); - }); - - if (!asyncDeploy) { - if (successfullyPushed === 0) { - error("No sites were pushed."); - } else if (successfullyDeployed !== successfullyPushed) { - warn( - `Successfully pushed ${successfullyDeployed} of ${successfullyPushed} sites`, - ); - } else { - success(`Successfully pushed ${successfullyPushed} sites.`); - } - } else { - success(`Successfully pushed ${successfullyPushed} sites.`); - } - - if (cliConfig.verbose) { - errors.forEach((e) => { - console.error(e); - }); - } -}; - -const pushFunction = async ({ - functionId, - async: asyncDeploy, - code, - withVariables, -}: PushFunctionOptions = {}): Promise => { - process.chdir(localConfig.configDirectoryPath); - - const functionIds: string[] = []; - - if (functionId) { - functionIds.push(functionId); - } else if (cliConfig.all) { - checkDeployConditions(localConfig); - const functions = localConfig.getFunctions(); - functionIds.push( - ...functions.map((func: any) => { - return func.$id; - }), - ); - } - - if (functionIds.length <= 0) { - const answers = await inquirer.prompt(questionsPushFunctions); - if (answers.functions) { - functionIds.push(...answers.functions); - } - } - - if (functionIds.length === 0) { - log("No functions found."); - hint( - "Use 'appwrite pull functions' to synchronize existing one, or use 'appwrite init function' to create a new one.", - ); - return; - } - - let functions = functionIds.map((id: string) => { - const functions = localConfig.getFunctions(); - const func = functions.find((f: any) => f.$id === id); - - if (!func) { - throw new Error("Function '" + id + "' not found."); - } - - return func; - }); - - log("Validating functions ..."); - // Validation is done BEFORE pushing so the deployment process can be run in async with progress update - for (let func of functions) { - if (!func.entrypoint) { - log(`Function ${func.name} is missing an entrypoint.`); - const answers = await inquirer.prompt(questionsGetEntrypoint); - func.entrypoint = answers.entrypoint; - localConfig.addFunction(func); + return { success: true, deploymentId: response["$id"] }; + } catch (e: any) { + return { + success: false, + error: e.message ?? "An unknown error occurred. Please try again.", + errorCode: e.code, + }; } } - if ( - !(await approveChanges( - functions, - async (args: any) => { - const functionsService = await getFunctionsService(); - return await functionsService.get({ functionId: args.functionId }); - }, - KeysFunction, - "functionId", - "functions", - ["vars"], - )) - ) { - return; - } + public async pushSingleSite( + site: any, + options: { + withVariables?: boolean; + code?: boolean; + } = {}, + ): Promise<{ + success: boolean; + deploymentId?: string; + error?: string; + errorCode?: string; + }> { + const sitesService = await getSitesService(); + let siteExists = false; - log("Pushing functions ..."); + try { + const response = await sitesService.get({ siteId: site["$id"] }); + siteExists = true; - Spinner.start(false); - let successfullyPushed = 0; - let successfullyDeployed = 0; - const failedDeployments: any[] = []; - const errors: any[] = []; + if (response.framework !== site.framework) { + return { + success: false, + error: `Framework mismatch! (local=${site.framework},remote=${response.framework}) Please delete remote site or update your appwrite.config.json`, + }; + } - await Promise.all( - functions.map(async (func: any) => { - let response: any = {}; - - const ignore = func.ignore ? "appwrite.config.json" : ".gitignore"; - let functionExists = false; - let deploymentCreated = false; - - const updaterRow = new Spinner({ - status: "", - resource: func.name, - id: func["$id"], - end: `Ignoring using: ${ignore}`, + await sitesService.update({ + siteId: site["$id"], + name: site.name, + framework: site.framework, + enabled: site.enabled, + logging: site.logging, + timeout: site.timeout, + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + buildRuntime: site.buildRuntime, + adapter: site.adapter, + specification: site.specification, }); + } catch (e: any) { + if (Number(e.code) === 404) { + siteExists = false; + } else { + return { success: false, error: e.message }; + } + } - updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); - const functionsService = await getFunctionsService(); + if (!siteExists) { try { - response = await functionsService.get({ functionId: func["$id"] }); - functionExists = true; - if (response.runtime !== func.runtime) { - updaterRow.fail({ - errorMessage: `Runtime mismatch! (local=${func.runtime},remote=${response.runtime}) Please delete remote function or update your appwrite.config.json`, - }); - return; - } - - updaterRow.update({ status: "Updating" }).replaceSpinner(SPINNER_ARC); - - response = await functionsService.update({ - functionId: func["$id"], - name: func.name, - runtime: func.runtime, - execute: func.execute, - events: func.events, - schedule: func.schedule, - timeout: func.timeout, - enabled: func.enabled, - logging: func.logging, - entrypoint: func.entrypoint, - commands: func.commands, - scopes: func.scopes, - specification: func.specification, + await sitesService.create({ + siteId: site.$id, + name: site.name, + framework: site.framework, + enabled: site.enabled, + logging: site.logging, + timeout: site.timeout, + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + buildRuntime: site.buildRuntime, + adapter: site.adapter, + specification: site.specification, }); - } catch (e: any) { - if (Number(e.code) === 404) { - functionExists = false; - } else { - errors.push(e); - updaterRow.fail({ - errorMessage: e.message ?? "General error occurs please try again", - }); - return; - } - } - - if (!functionExists) { - updaterRow.update({ status: "Creating" }).replaceSpinner(SPINNER_DOTS); + let domain = ""; try { - response = await functionsService.create({ - functionId: func.$id, - name: func.name, - runtime: func.runtime, - execute: func.execute, - events: func.events, - schedule: func.schedule, - timeout: func.timeout, - enabled: func.enabled, - logging: func.logging, - entrypoint: func.entrypoint, - commands: func.commands, - scopes: func.scopes, - specification: func.specification, - }); - - let domain = ""; - try { - const consoleService = await getConsoleService(); - const variables = await consoleService.variables(); - domain = ID.unique() + "." + variables["_APP_DOMAIN_FUNCTIONS"]; - } catch (error) { - console.error("Error fetching console variables."); - throw error; - } - - try { - const proxyService = await getProxyService(); - const rule = await proxyService.createFunctionRule( - domain, - func.$id, - ); - } catch (error) { - console.error("Error creating function rule."); - throw error; - } + const consoleService = await getConsoleService(); + const variables = await consoleService.variables(); + domain = ID.unique() + "." + variables["_APP_DOMAIN_SITES"]; + } catch (error) { + return { success: false, error: "Error fetching console variables." }; + } - updaterRow.update({ status: "Created" }); - } catch (e: any) { - errors.push(e); - updaterRow.fail({ - errorMessage: e.message ?? "General error occurs please try again", - }); - return; + try { + const proxyService = await getProxyService(); + await proxyService.createSiteRule(domain, site.$id); + } catch (error) { + return { success: false, error: "Error creating site rule." }; } + } catch (e: any) { + return { success: false, error: e.message }; } + } - if (withVariables) { - updaterRow - .update({ status: "Updating variables" }) - .replaceSpinner(SPINNER_ARC); - - const functionsService = await getFunctionsService(); + if (options.withVariables) { + try { const { variables } = await paginate( async (args: any) => { - return await functionsService.listVariables({ - functionId: args.functionId, - }); - }, - { - functionId: func["$id"], + return await sitesService.listVariables({ siteId: args.siteId }); }, + { siteId: site["$id"] }, 100, "variables", ); await Promise.all( variables.map(async (variable: any) => { - const functionsService = await getFunctionsService(); - await functionsService.deleteVariable({ - functionId: func["$id"], + await sitesService.deleteVariable({ + siteId: site["$id"], variableId: variable["$id"], }); }), ); - const envFileLocation = `${func["path"]}/.env`; + const envFileLocation = `${site["path"]}/.env`; let envVariables: Array<{ key: string; value: string }> = []; try { if (fs.existsSync(envFileLocation)) { @@ -1036,148 +1146,348 @@ const pushFunction = async ({ ); } } catch (error) { - // Handle parsing errors gracefully envVariables = []; } + await Promise.all( envVariables.map(async (variable) => { - const functionsService = await getFunctionsService(); - await functionsService.createVariable({ - functionId: func["$id"], + await sitesService.createVariable({ + siteId: site["$id"], key: variable.key, value: variable.value, secret: false, }); }), ); + } catch (e: any) { + return { + success: false, + error: `Failed to update variables: ${e.message}`, + }; } + } - if (code === false) { - successfullyPushed++; - successfullyDeployed++; - updaterRow.update({ status: "Pushed" }); - updaterRow.stopSpinner(); - return; + if (options.code === false) { + return { success: true }; + } + + try { + const response = await sitesService.createDeployment({ + siteId: site["$id"], + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + code: site.path, + activate: true, + }); + + return { success: true, deploymentId: response["$id"] }; + } catch (e: any) { + return { + success: false, + error: e.message ?? "An unknown error occurred. Please try again.", + errorCode: e.code, + }; + } + } + + public async getDeploymentStatus( + resourceId: string, + deploymentId: string, + resourceType: "function" | "site", + ): Promise<{ + status: string; + url?: string; + }> { + if (resourceType === "function") { + const functionsService = await getFunctionsService(); + const response = await functionsService.getDeployment({ + functionId: resourceId, + deploymentId: deploymentId, + }); + + const status = response["status"]; + let url = ""; + + if (status === "ready") { + const proxyService = await getProxyService(); + const res = await proxyService.listRules([ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceType", + values: ["function"], + }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceId", + values: [resourceId], + }), + JSON.stringify({ + method: "equal", + attribute: "trigger", + values: ["manual"], + }), + ]); + + if (Number(res.total) === 1) { + url = res.rules[0].domain; + } } - try { - updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_ARC); - const functionsService = await getFunctionsService(); - response = await functionsService.createDeployment({ - functionId: func["$id"], - entrypoint: func.entrypoint, - commands: func.commands, - code: func.path, - activate: true, - }); + return { status, url }; + } else { + const sitesService = await getSitesService(); + const response = await sitesService.getDeployment({ + siteId: resourceId, + deploymentId: deploymentId, + }); - updaterRow.update({ status: "Pushed" }); - deploymentCreated = true; - successfullyPushed++; - } catch (e: any) { - errors.push(e); + const status = response["status"]; + let url = ""; + + if (status === "ready") { + const proxyService = await getProxyService(); + const res = await proxyService.listRules([ + JSON.stringify({ method: "limit", values: [1] }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceType", + values: ["site"], + }), + JSON.stringify({ + method: "equal", + attribute: "deploymentResourceId", + values: [resourceId], + }), + JSON.stringify({ + method: "equal", + attribute: "trigger", + values: ["manual"], + }), + ]); - switch (e.code) { - case "ENOENT": - updaterRow.fail({ - errorMessage: "Not found in the current directory. Skipping...", - }); - break; - default: - updaterRow.fail({ - errorMessage: - e.message ?? "An unknown error occurred. Please try again.", - }); + if (Number(res.total) === 1) { + url = res.rules[0].domain; } } - if (deploymentCreated && !asyncDeploy) { - try { - const deploymentId = response["$id"]; - updaterRow.update({ - status: "Deploying", - end: "Checking deployment status...", - }); - let pollChecks = 0; + return { status, url }; + } + } +} - while (true) { - const functionsService = await getFunctionsService(); - response = await functionsService.getDeployment({ - functionId: func["$id"], - deploymentId: deploymentId, - }); +async function createPushInstance(): Promise { + return new Push(); +} + +const pushResources = async ({ + skipDeprecated = false, +}: PushResourcesOptions = {}): Promise => { + const actions: Record Promise> = { + settings: pushSettings, + functions: pushFunction, + sites: pushSite, + collections: pushCollection, + tables: pushTable, + buckets: pushBucket, + teams: pushTeam, + messages: pushMessagingTopic, + }; + + if (skipDeprecated) { + delete actions.collections; + } + + if (cliConfig.all) { + for (let action of Object.values(actions)) { + await action(); + } + } else { + const answers = await inquirer.prompt(questionsPushResources); + + const action = actions[answers.resource]; + if (action !== undefined) { + await action(); + } + } +}; + +const pushSettings = async (): Promise => { + checkDeployConditions(localConfig); + + try { + const projectsService = await getProjectsService(); + let response = await projectsService.get( + localConfig.getProject().projectId, + ); + + const remoteSettings = localConfig.createSettingsObject(response ?? {}); + const localSettings = localConfig.getProject().projectSettings ?? {}; + + log("Checking for changes ..."); + const changes: any[] = []; + + changes.push( + ...getObjectChanges(remoteSettings, localSettings, "services", "Service"), + ); + changes.push( + ...getObjectChanges( + remoteSettings["auth"] ?? {}, + localSettings["auth"] ?? {}, + "methods", + "Auth method", + ), + ); + changes.push( + ...getObjectChanges( + remoteSettings["auth"] ?? {}, + localSettings["auth"] ?? {}, + "security", + "Auth security", + ), + ); + + if (changes.length > 0) { + drawTable(changes); + if ((await getConfirmation()) !== true) { + success(`Successfully pushed 0 project settings.`); + return; + } + } + } catch (e) {} + + try { + log("Pushing project settings ..."); + + const pushInstance = await createPushInstance(); + const config = localConfig.getProject(); + const settings = config.projectSettings ?? {}; + + if (config.projectName) { + log("Applying project name ..."); + } + + if (settings.services) { + log("Applying service statuses ..."); + } + + if (settings.auth) { + if (settings.auth.security) { + log("Applying auth security settings ..."); + } + + if (settings.auth.methods) { + log("Applying auth methods statuses ..."); + } + } + + await pushInstance.pushSettings({ + projectId: config.projectId, + projectName: config.projectName, + settings: config.projectSettings, + }); + + success(`Successfully pushed ${chalk.bold("all")} project settings.`); + } catch (e) { + throw e; + } +}; + +const pushSite = async ({ + siteId, + async: asyncDeploy, + code, + withVariables, +}: PushSiteOptions = {}): Promise => { + process.chdir(localConfig.configDirectoryPath); + + const siteIds: string[] = []; + + if (siteId) { + siteIds.push(siteId); + } else if (cliConfig.all) { + checkDeployConditions(localConfig); + const sites = localConfig.getSites(); + siteIds.push( + ...sites.map((site: any) => { + return site.$id; + }), + ); + } + + if (siteIds.length <= 0) { + const answers = await inquirer.prompt(questionsPushSites); + if (answers.sites) { + siteIds.push(...answers.sites); + } + } - const status = response["status"]; - if (status === "ready") { - successfullyDeployed++; + if (siteIds.length === 0) { + log("No sites found."); + hint( + "Use 'appwrite pull sites' to synchronize existing one, or use 'appwrite init site' to create a new one.", + ); + return; + } - let url = ""; - const proxyService = await getProxyService(); - const res = await proxyService.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["function"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [func["$id"]], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), - ]); - - if (Number(res.total) === 1) { - url = res.rules[0].domain; - } + let sites = siteIds.map((id: string) => { + const sites = localConfig.getSites(); + const site = sites.find((s: any) => s.$id === id); - updaterRow.update({ status: "Deployed", end: url }); + if (!site) { + throw new Error("Site '" + id + "' not found."); + } - break; - } else if (status === "failed") { - failedDeployments.push({ - name: func["name"], - $id: func["$id"], - deployment: response["$id"], - }); - updaterRow.fail({ errorMessage: `Failed to deploy` }); + return site; + }); - break; - } else { - updaterRow.update({ - status: "Deploying", - end: `Current status: ${status}`, - }); - } + log("Validating sites ..."); + // Validation is done BEFORE pushing so the deployment process can be run in async with progress update + for (let site of sites) { + if (!site.buildCommand) { + log(`Site ${site.name} is missing build command.`); + const answers = await inquirer.prompt(questionsGetEntrypoint); + site.buildCommand = answers.entrypoint; + localConfig.addSite(site); + } + } - pollChecks++; - await new Promise((resolve) => - setTimeout(resolve, POLL_DEBOUNCE * 1.5), - ); - } - } catch (e: any) { - errors.push(e); - updaterRow.fail({ - errorMessage: - e.message ?? "Unknown error occurred. Please try again", - }); - } - } + if ( + !(await approveChanges( + sites, + async (args: any) => { + const sitesService = await getSitesService(); + return await sitesService.get({ siteId: args.siteId }); + }, + KeysSite, + "siteId", + "sites", + ["vars"], + )) + ) { + return; + } - updaterRow.stopSpinner(); - }), - ); + log("Pushing sites ..."); + + const pushInstance = await createPushInstance(); + const result = await pushInstance.pushSite(sites, { + async: asyncDeploy, + code, + withVariables, + }); - Spinner.stop(); + const { + successfullyPushed, + successfullyDeployed, + failedDeployments, + errors, + } = result; failedDeployments.forEach((failed) => { const { name, deployment, $id } = failed; - const failUrl = `${globalConfig.getEndpoint().slice(0, -3)}/console/project-${localConfig.getProject().projectId}/functions/function-${$id}/deployment-${deployment}`; + const failUrl = `${globalConfig.getEndpoint().slice(0, -3)}/console/project-${localConfig.getProject().projectId}/sites/site-${$id}/deployments/deployment-${deployment}`; error( `Deployment of ${name} has failed. Check at ${failUrl} for more details\n`, @@ -1186,16 +1496,16 @@ const pushFunction = async ({ if (!asyncDeploy) { if (successfullyPushed === 0) { - error("No functions were pushed."); + error("No sites were pushed."); } else if (successfullyDeployed !== successfullyPushed) { warn( - `Successfully pushed ${successfullyDeployed} of ${successfullyPushed} functions`, + `Successfully pushed ${successfullyDeployed} of ${successfullyPushed} sites`, ); } else { - success(`Successfully pushed ${successfullyPushed} functions.`); + success(`Successfully pushed ${successfullyPushed} sites.`); } } else { - success(`Successfully pushed ${successfullyPushed} functions.`); + success(`Successfully pushed ${successfullyPushed} sites.`); } if (cliConfig.verbose) { @@ -1205,174 +1515,125 @@ const pushFunction = async ({ } }; -const checkAndApplyTablesDBChanges = - async (): Promise => { - log("Checking for tablesDB changes ..."); +const pushFunction = async ({ + functionId, + async: asyncDeploy, + code, + withVariables, +}: PushFunctionOptions = {}): Promise => { + process.chdir(localConfig.configDirectoryPath); - const localTablesDBs = localConfig.getTablesDBs(); - const { databases: remoteTablesDBs } = await paginate( - async (args: any) => { - const tablesDBService = await getTablesDBService(); - return await tablesDBService.list(args.queries || []); - }, - {}, - 100, - "databases", - ); + const functionIds: string[] = []; - if (localTablesDBs.length === 0 && remoteTablesDBs.length === 0) { - return { applied: false, resyncNeeded: false }; - } + if (functionId) { + functionIds.push(functionId); + } else if (cliConfig.all) { + checkDeployConditions(localConfig); + const functions = localConfig.getFunctions(); + functionIds.push( + ...functions.map((func: any) => { + return func.$id; + }), + ); + } - const changes: any[] = []; - const toCreate: any[] = []; - const toUpdate: any[] = []; - const toDelete: any[] = []; - - // Check for deletions - remote DBs that aren't in local config - for (const remoteDB of remoteTablesDBs) { - const localDB = localTablesDBs.find((db: any) => db.$id === remoteDB.$id); - if (!localDB) { - toDelete.push(remoteDB); - changes.push({ - id: remoteDB.$id, - action: chalk.red("deleting"), - key: "Database", - remote: remoteDB.name, - local: "(deleted locally)", - }); - } + if (functionIds.length <= 0) { + const answers = await inquirer.prompt(questionsPushFunctions); + if (answers.functions) { + functionIds.push(...answers.functions); } + } - // Check for additions and updates - for (const localDB of localTablesDBs) { - const remoteDB = remoteTablesDBs.find( - (db: any) => db.$id === localDB.$id, - ); - - if (!remoteDB) { - toCreate.push(localDB); - changes.push({ - id: localDB.$id, - action: chalk.green("creating"), - key: "Database", - remote: "(does not exist)", - local: localDB.name, - }); - } else { - let hasChanges = false; - - if (remoteDB.name !== localDB.name) { - hasChanges = true; - changes.push({ - id: localDB.$id, - action: chalk.yellow("updating"), - key: "Name", - remote: remoteDB.name, - local: localDB.name, - }); - } + if (functionIds.length === 0) { + log("No functions found."); + hint( + "Use 'appwrite pull functions' to synchronize existing one, or use 'appwrite init function' to create a new one.", + ); + return; + } - if (remoteDB.enabled !== localDB.enabled) { - hasChanges = true; - changes.push({ - id: localDB.$id, - action: chalk.yellow("updating"), - key: "Enabled", - remote: remoteDB.enabled, - local: localDB.enabled, - }); - } + let functions = functionIds.map((id: string) => { + const functions = localConfig.getFunctions(); + const func = functions.find((f: any) => f.$id === id); - if (hasChanges) { - toUpdate.push(localDB); - } - } + if (!func) { + throw new Error("Function '" + id + "' not found."); } - if (changes.length === 0) { - return { applied: false, resyncNeeded: false }; + return func; + }); + + log("Validating functions ..."); + for (let func of functions) { + if (!func.entrypoint) { + log(`Function ${func.name} is missing an entrypoint.`); + const answers = await inquirer.prompt(questionsGetEntrypoint); + func.entrypoint = answers.entrypoint; + localConfig.addFunction(func); } + } - log("Found changes in tablesDB resource:"); - drawTable(changes); + if ( + !(await approveChanges( + functions, + async (args: any) => { + const functionsService = await getFunctionsService(); + return await functionsService.get({ functionId: args.functionId }); + }, + KeysFunction, + "functionId", + "functions", + ["vars"], + )) + ) { + return; + } - if (toDelete.length > 0) { - console.log( - `${chalk.red("------------------------------------------------------------------")}`, - ); - console.log( - `${chalk.red("| WARNING: Database deletion will also delete all related tables |")}`, - ); - console.log( - `${chalk.red("------------------------------------------------------------------")}`, - ); - console.log(); - } + log("Pushing functions ..."); - if ((await getConfirmation()) !== true) { - return { applied: false, resyncNeeded: false }; - } + const pushInstance = await createPushInstance(); + const result = await pushInstance.pushFunction(functions, { + async: asyncDeploy, + code, + withVariables, + }); - // Apply deletions first - let needsResync = false; - for (const db of toDelete) { - try { - log(`Deleting database ${db.name} ( ${db.$id} ) ...`); - const tablesDBService = await getTablesDBService(); - await tablesDBService.delete(db.$id); - success(`Deleted ${db.name} ( ${db.$id} )`); - needsResync = true; - } catch (e: any) { - error( - `Failed to delete database ${db.name} ( ${db.$id} ): ${e.message}`, - ); - throw new Error( - `Database sync failed during deletion of ${db.$id}. Some changes may have been applied.`, - ); - } - } + const { + successfullyPushed, + successfullyDeployed, + failedDeployments, + errors, + } = result; - // Apply creations - for (const db of toCreate) { - try { - log(`Creating database ${db.name} ( ${db.$id} ) ...`); - const tablesDBService = await getTablesDBService(); - await tablesDBService.create(db.$id, db.name, db.enabled); - success(`Created ${db.name} ( ${db.$id} )`); - } catch (e: any) { - error( - `Failed to create database ${db.name} ( ${db.$id} ): ${e.message}`, - ); - throw new Error( - `Database sync failed during creation of ${db.$id}. Some changes may have been applied.`, - ); - } - } + failedDeployments.forEach((failed) => { + const { name, deployment, $id } = failed; + const failUrl = `${globalConfig.getEndpoint().slice(0, -3)}/console/project-${localConfig.getProject().projectId}/functions/function-${$id}/deployment-${deployment}`; - // Apply updates - for (const db of toUpdate) { - try { - log(`Updating database ${db.name} ( ${db.$id} ) ...`); - const tablesDBService = await getTablesDBService(); - await tablesDBService.update(db.$id, db.name, db.enabled); - success(`Updated ${db.name} ( ${db.$id} )`); - } catch (e: any) { - error( - `Failed to update database ${db.name} ( ${db.$id} ): ${e.message}`, - ); - throw new Error( - `Database sync failed during update of ${db.$id}. Some changes may have been applied.`, - ); - } - } + error( + `Deployment of ${name} has failed. Check at ${failUrl} for more details\n`, + ); + }); - if (toDelete.length === 0) { - console.log(); + if (!asyncDeploy) { + if (successfullyPushed === 0) { + error("No functions were pushed."); + } else if (successfullyDeployed !== successfullyPushed) { + warn( + `Successfully pushed ${successfullyDeployed} of ${successfullyPushed} functions`, + ); + } else { + success(`Successfully pushed ${successfullyPushed} functions.`); } + } else { + success(`Successfully pushed ${successfullyPushed} functions.`); + } - return { applied: true, resyncNeeded: needsResync }; - }; + if (cliConfig.verbose) { + errors.forEach((e) => { + console.error(e); + }); + } +}; const pushTable = async ({ attempts, @@ -1826,8 +2087,6 @@ const pushCollection = async ({ }; const pushBucket = async (): Promise => { - let response: any = {}; - let bucketIds: string[] = []; const configBuckets = localConfig.getBuckets(); @@ -1875,55 +2134,17 @@ const pushBucket = async (): Promise => { log("Pushing buckets ..."); + const pushInstance = await createPushInstance(); + for (let bucket of buckets) { log(`Pushing bucket ${chalk.bold(bucket["name"])} ...`); - - const storageService = await getStorageService(); - try { - response = await storageService.getBucket(bucket["$id"]); - - await storageService.updateBucket( - bucket["$id"], - bucket.name, - bucket["$permissions"], - bucket.fileSecurity, - bucket.enabled, - bucket.maximumFileSize, - bucket.allowedFileExtensions, - bucket.encryption, - bucket.antivirus, - bucket.compression, - ); - } catch (e: any) { - if (Number(e.code) === 404) { - log( - `Bucket ${bucket.name} does not exist in the project. Creating ... `, - ); - - response = await storageService.createBucket( - bucket["$id"], - bucket.name, - bucket["$permissions"], - bucket.fileSecurity, - bucket.enabled, - bucket.maximumFileSize, - bucket.allowedFileExtensions, - bucket.compression, - bucket.encryption, - bucket.antivirus, - ); - } else { - throw e; - } - } + await pushInstance.pushBucket(bucket); } success(`Successfully pushed ${buckets.length} buckets.`); }; const pushTeam = async (): Promise => { - let response: any = {}; - let teamIds: string[] = []; const configTeams = localConfig.getTeams(); @@ -1971,31 +2192,17 @@ const pushTeam = async (): Promise => { log("Pushing teams ..."); + const pushInstance = await createPushInstance(); + for (let team of teams) { log(`Pushing team ${chalk.bold(team["name"])} ...`); - - const teamsService = await getTeamsService(); - try { - response = await teamsService.get(team["$id"]); - - await teamsService.updateName(team["$id"], team.name); - } catch (e: any) { - if (Number(e.code) === 404) { - log(`Team ${team.name} does not exist in the project. Creating ... `); - - response = await teamsService.create(team["$id"], team.name); - } else { - throw e; - } - } + await pushInstance.pushTeam(team); } success(`Successfully pushed ${teams.length} teams.`); }; const pushMessagingTopic = async (): Promise => { - let response: any = {}; - let topicsIds: string[] = []; const configTopics = localConfig.getMessagingTopics(); @@ -2043,34 +2250,12 @@ const pushMessagingTopic = async (): Promise => { log("Pushing topics ..."); + const pushInstance = await createPushInstance(); + for (let topic of topics) { log(`Pushing topic ${chalk.bold(topic["name"])} ...`); - - const messagingService = await getMessagingService(); - try { - response = await messagingService.getTopic(topic["$id"]); - log(`Topic ${topic.name} ( ${topic["$id"]} ) already exists.`); - - await messagingService.updateTopic( - topic["$id"], - topic.name, - topic.subscribe, - ); - } catch (e: any) { - if (Number(e.code) === 404) { - log(`Topic ${topic.name} does not exist in the project. Creating ... `); - - response = await messagingService.createTopic( - topic["$id"], - topic.name, - topic.subscribe, - ); - - success(`Created ${topic.name} ( ${topic["$id"]} )`); - } else { - throw e; - } - } + await pushInstance.pushMessagingTopic(topic); + success(`Created ${topic.name} ( ${topic["$id"]} )`); } success(`Successfully pushed ${topics.length} topics.`); diff --git a/lib/commands/utils/change-approval.ts b/lib/commands/utils/change-approval.ts new file mode 100644 index 00000000..cda6d1e6 --- /dev/null +++ b/lib/commands/utils/change-approval.ts @@ -0,0 +1,186 @@ +import chalk from "chalk"; +import inquirer from "inquirer"; +import { cliConfig, success, warn, log, drawTable } from "../../parser.js"; +import { whitelistKeys } from "../../config.js"; +import { + questionPushChanges, + questionPushChangesConfirmation, +} from "../../questions.js"; + +/** + * Check if a value is considered empty + */ +export const isEmpty = (value: any): boolean => + value === null || + value === undefined || + (typeof value === "string" && value.trim().length === 0) || + (Array.isArray(value) && value.length === 0); + +/** + * Prompt user for confirmation to proceed with push + */ +export const getConfirmation = async (): Promise => { + if (cliConfig.force) { + return true; + } + + async function fixConfirmation(): Promise { + const answers = await inquirer.prompt(questionPushChangesConfirmation); + if (answers.changes !== "YES" && answers.changes !== "NO") { + return await fixConfirmation(); + } + + return answers.changes; + } + + let answers = await inquirer.prompt(questionPushChanges); + + if (answers.changes !== "YES" && answers.changes !== "NO") { + answers.changes = await fixConfirmation(); + } + + if (answers.changes === "YES") { + return true; + } + + warn("Skipping push action. Changes were not applied."); + return false; +}; + +/** + * Compare two objects and return their differences + */ +interface ObjectChange { + group: string; + setting: string; + remote: string; + local: string; +} + +type ComparableValue = boolean | number | string | any[] | undefined; + +export const getObjectChanges = >( + remote: T, + local: T, + index: keyof T, + what: string, +): ObjectChange[] => { + const changes: ObjectChange[] = []; + + const remoteNested = remote[index]; + const localNested = local[index]; + + if ( + remoteNested && + localNested && + typeof remoteNested === "object" && + !Array.isArray(remoteNested) && + typeof localNested === "object" && + !Array.isArray(localNested) + ) { + const remoteObj = remoteNested as Record; + const localObj = localNested as Record; + + for (const [service, status] of Object.entries(remoteObj)) { + const localValue = localObj[service]; + let valuesEqual = false; + + if (Array.isArray(status) && Array.isArray(localValue)) { + valuesEqual = JSON.stringify(status) === JSON.stringify(localValue); + } else { + valuesEqual = status === localValue; + } + + if (!valuesEqual) { + changes.push({ + group: what, + setting: service, + remote: chalk.red(String(status ?? "")), + local: chalk.green(String(localValue ?? "")), + }); + } + } + } + + return changes; +}; + +/** + * Approve changes before pushing resources + * Compares local resources with remote resources and prompts user for confirmation + */ +export const approveChanges = async ( + resource: any[], + resourceGetFunction: Function, + keys: Set, + resourceName: string, + resourcePlural: string, + skipKeys: string[] = [], + secondId: string = "", + secondResourceName: string = "", +): Promise => { + log("Checking for changes ..."); + const changes: any[] = []; + + await Promise.all( + resource.map(async (localResource) => { + try { + const options: Record = { + [resourceName]: localResource["$id"], + }; + + if (secondId !== "" && secondResourceName !== "") { + options[secondResourceName] = localResource[secondId]; + } + + const remoteResource = await resourceGetFunction(options); + + for (let [key, value] of Object.entries( + whitelistKeys(remoteResource, keys), + )) { + if (skipKeys.includes(key)) { + continue; + } + + if (isEmpty(value) && isEmpty(localResource[key])) { + continue; + } + + if (Array.isArray(value) && Array.isArray(localResource[key])) { + if (JSON.stringify(value) !== JSON.stringify(localResource[key])) { + changes.push({ + id: localResource["$id"], + key, + remote: chalk.red((value as string[]).join("\n")), + local: chalk.green(localResource[key].join("\n")), + }); + } + } else if (value !== localResource[key]) { + changes.push({ + id: localResource["$id"], + key, + remote: chalk.red(value), + local: chalk.green(localResource[key]), + }); + } + } + } catch (e: any) { + if (Number(e.code) !== 404) { + throw e; + } + } + }), + ); + + if (changes.length === 0) { + return true; + } + + drawTable(changes); + if ((await getConfirmation()) === true) { + return true; + } + + success(`Successfully pushed 0 ${resourcePlural}.`); + return false; +}; diff --git a/lib/commands/utils/database-sync.ts b/lib/commands/utils/database-sync.ts new file mode 100644 index 00000000..008e9ae1 --- /dev/null +++ b/lib/commands/utils/database-sync.ts @@ -0,0 +1,180 @@ +import chalk from "chalk"; +import { localConfig } from "../../config.js"; +import { log, success, error, drawTable } from "../../parser.js"; +import { paginate } from "../../paginate.js"; +import { getTablesDBService } from "../../services.js"; +import { getConfirmation } from "./change-approval.js"; + +export interface TablesDBChangesResult { + applied: boolean; + resyncNeeded: boolean; +} + +/** + * Check for and apply changes to tablesDB (databases) + * Handles creation, update, and deletion of databases + */ +export const checkAndApplyTablesDBChanges = + async (): Promise => { + log("Checking for tablesDB changes ..."); + + const localTablesDBs = localConfig.getTablesDBs(); + const { databases: remoteTablesDBs } = await paginate( + async (args: any) => { + const tablesDBService = await getTablesDBService(); + return await tablesDBService.list(args.queries || []); + }, + {}, + 100, + "databases", + ); + + if (localTablesDBs.length === 0 && remoteTablesDBs.length === 0) { + return { applied: false, resyncNeeded: false }; + } + + const changes: any[] = []; + const toCreate: any[] = []; + const toUpdate: any[] = []; + const toDelete: any[] = []; + + // Check for deletions - remote DBs that aren't in local config + for (const remoteDB of remoteTablesDBs) { + const localDB = localTablesDBs.find((db: any) => db.$id === remoteDB.$id); + if (!localDB) { + toDelete.push(remoteDB); + changes.push({ + id: remoteDB.$id, + action: chalk.red("deleting"), + key: "Database", + remote: remoteDB.name, + local: "(deleted locally)", + }); + } + } + + // Check for additions and updates + for (const localDB of localTablesDBs) { + const remoteDB = remoteTablesDBs.find( + (db: any) => db.$id === localDB.$id, + ); + + if (!remoteDB) { + toCreate.push(localDB); + changes.push({ + id: localDB.$id, + action: chalk.green("creating"), + key: "Database", + remote: "(does not exist)", + local: localDB.name, + }); + } else { + let hasChanges = false; + + if (remoteDB.name !== localDB.name) { + hasChanges = true; + changes.push({ + id: localDB.$id, + action: chalk.yellow("updating"), + key: "Name", + remote: remoteDB.name, + local: localDB.name, + }); + } + + if (remoteDB.enabled !== localDB.enabled) { + hasChanges = true; + changes.push({ + id: localDB.$id, + action: chalk.yellow("updating"), + key: "Enabled", + remote: remoteDB.enabled, + local: localDB.enabled, + }); + } + + if (hasChanges) { + toUpdate.push(localDB); + } + } + } + + if (changes.length === 0) { + return { applied: false, resyncNeeded: false }; + } + + log("Found changes in tablesDB resource:"); + drawTable(changes); + + if (toDelete.length > 0) { + console.log( + `${chalk.red("------------------------------------------------------------------")}`, + ); + console.log( + `${chalk.red("| WARNING: Database deletion will also delete all related tables |")}`, + ); + console.log( + `${chalk.red("------------------------------------------------------------------")}`, + ); + console.log(); + } + + if ((await getConfirmation()) !== true) { + return { applied: false, resyncNeeded: false }; + } + + // Apply deletions first + let needsResync = false; + for (const db of toDelete) { + try { + log(`Deleting database ${db.name} ( ${db.$id} ) ...`); + const tablesDBService = await getTablesDBService(); + await tablesDBService.delete(db.$id); + success(`Deleted ${db.name} ( ${db.$id} )`); + needsResync = true; + } catch (e: any) { + error( + `Failed to delete database ${db.name} ( ${db.$id} ): ${e.message}`, + ); + throw new Error( + `Database sync failed during deletion of ${db.$id}. Some changes may have been applied.`, + ); + } + } + + // Apply creations + for (const db of toCreate) { + try { + log(`Creating database ${db.name} ( ${db.$id} ) ...`); + const tablesDBService = await getTablesDBService(); + await tablesDBService.create(db.$id, db.name, db.enabled); + success(`Created ${db.name} ( ${db.$id} )`); + } catch (e: any) { + error( + `Failed to create database ${db.name} ( ${db.$id} ): ${e.message}`, + ); + throw new Error( + `Database sync failed during creation of ${db.$id}. Some changes may have been applied.`, + ); + } + } + + // Apply updates + for (const db of toUpdate) { + try { + log(`Updating database ${db.name} ( ${db.$id} ) ...`); + const tablesDBService = await getTablesDBService(); + await tablesDBService.update(db.$id, db.name, db.enabled); + success(`Updated ${db.name} ( ${db.$id} )`); + } catch (e: any) { + error( + `Failed to update database ${db.name} ( ${db.$id} ): ${e.message}`, + ); + throw new Error( + `Database sync failed during update of ${db.$id}. Some changes may have been applied.`, + ); + } + } + + return { applied: true, resyncNeeded: needsResync }; + }; From b887c5c779b0c19c03ba13fa64b7bb2ae904ba5f Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 17:49:19 +0530 Subject: [PATCH 21/41] remove methods --- lib/commands/push.ts | 327 ------------------------------------------- 1 file changed, 327 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 730f093b..481c52a5 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -864,333 +864,6 @@ export class Push { }; } - public async pushSingleFunction( - func: any, - options: { - withVariables?: boolean; - code?: boolean; - } = {}, - ): Promise<{ - success: boolean; - deploymentId?: string; - error?: string; - errorCode?: string; - }> { - const functionsService = await getFunctionsService(); - let functionExists = false; - - try { - const response = await functionsService.get({ functionId: func["$id"] }); - functionExists = true; - - if (response.runtime !== func.runtime) { - return { - success: false, - error: `Runtime mismatch! (local=${func.runtime},remote=${response.runtime}) Please delete remote function or update your appwrite.config.json`, - }; - } - - await functionsService.update({ - functionId: func["$id"], - name: func.name, - runtime: func.runtime, - execute: func.execute, - events: func.events, - schedule: func.schedule, - timeout: func.timeout, - enabled: func.enabled, - logging: func.logging, - entrypoint: func.entrypoint, - commands: func.commands, - scopes: func.scopes, - specification: func.specification, - }); - } catch (e: any) { - if (Number(e.code) === 404) { - functionExists = false; - } else { - return { success: false, error: e.message }; - } - } - - if (!functionExists) { - try { - await functionsService.create({ - functionId: func.$id, - name: func.name, - runtime: func.runtime, - execute: func.execute, - events: func.events, - schedule: func.schedule, - timeout: func.timeout, - enabled: func.enabled, - logging: func.logging, - entrypoint: func.entrypoint, - commands: func.commands, - scopes: func.scopes, - specification: func.specification, - }); - - let domain = ""; - try { - const consoleService = await getConsoleService(); - const variables = await consoleService.variables(); - domain = ID.unique() + "." + variables["_APP_DOMAIN_FUNCTIONS"]; - } catch (error) { - return { success: false, error: "Error fetching console variables." }; - } - - try { - const proxyService = await getProxyService(); - await proxyService.createFunctionRule(domain, func.$id); - } catch (error) { - return { success: false, error: "Error creating function rule." }; - } - } catch (e: any) { - return { success: false, error: e.message }; - } - } - - if (options.withVariables) { - try { - const { variables } = await paginate( - async (args: any) => { - return await functionsService.listVariables({ - functionId: args.functionId, - }); - }, - { functionId: func["$id"] }, - 100, - "variables", - ); - - await Promise.all( - variables.map(async (variable: any) => { - await functionsService.deleteVariable({ - functionId: func["$id"], - variableId: variable["$id"], - }); - }), - ); - - const envFileLocation = `${func["path"]}/.env`; - let envVariables: Array<{ key: string; value: string }> = []; - try { - if (fs.existsSync(envFileLocation)) { - const envObject = parseDotenv( - fs.readFileSync(envFileLocation, "utf8"), - ); - envVariables = Object.entries(envObject || {}).map( - ([key, value]) => ({ key, value }), - ); - } - } catch (error) { - envVariables = []; - } - - await Promise.all( - envVariables.map(async (variable) => { - await functionsService.createVariable({ - functionId: func["$id"], - key: variable.key, - value: variable.value, - secret: false, - }); - }), - ); - } catch (e: any) { - return { - success: false, - error: `Failed to update variables: ${e.message}`, - }; - } - } - - if (options.code === false) { - return { success: true }; - } - - try { - const response = await functionsService.createDeployment({ - functionId: func["$id"], - entrypoint: func.entrypoint, - commands: func.commands, - code: func.path, - activate: true, - }); - - return { success: true, deploymentId: response["$id"] }; - } catch (e: any) { - return { - success: false, - error: e.message ?? "An unknown error occurred. Please try again.", - errorCode: e.code, - }; - } - } - - public async pushSingleSite( - site: any, - options: { - withVariables?: boolean; - code?: boolean; - } = {}, - ): Promise<{ - success: boolean; - deploymentId?: string; - error?: string; - errorCode?: string; - }> { - const sitesService = await getSitesService(); - let siteExists = false; - - try { - const response = await sitesService.get({ siteId: site["$id"] }); - siteExists = true; - - if (response.framework !== site.framework) { - return { - success: false, - error: `Framework mismatch! (local=${site.framework},remote=${response.framework}) Please delete remote site or update your appwrite.config.json`, - }; - } - - await sitesService.update({ - siteId: site["$id"], - name: site.name, - framework: site.framework, - enabled: site.enabled, - logging: site.logging, - timeout: site.timeout, - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - buildRuntime: site.buildRuntime, - adapter: site.adapter, - specification: site.specification, - }); - } catch (e: any) { - if (Number(e.code) === 404) { - siteExists = false; - } else { - return { success: false, error: e.message }; - } - } - - if (!siteExists) { - try { - await sitesService.create({ - siteId: site.$id, - name: site.name, - framework: site.framework, - enabled: site.enabled, - logging: site.logging, - timeout: site.timeout, - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - buildRuntime: site.buildRuntime, - adapter: site.adapter, - specification: site.specification, - }); - - let domain = ""; - try { - const consoleService = await getConsoleService(); - const variables = await consoleService.variables(); - domain = ID.unique() + "." + variables["_APP_DOMAIN_SITES"]; - } catch (error) { - return { success: false, error: "Error fetching console variables." }; - } - - try { - const proxyService = await getProxyService(); - await proxyService.createSiteRule(domain, site.$id); - } catch (error) { - return { success: false, error: "Error creating site rule." }; - } - } catch (e: any) { - return { success: false, error: e.message }; - } - } - - if (options.withVariables) { - try { - const { variables } = await paginate( - async (args: any) => { - return await sitesService.listVariables({ siteId: args.siteId }); - }, - { siteId: site["$id"] }, - 100, - "variables", - ); - - await Promise.all( - variables.map(async (variable: any) => { - await sitesService.deleteVariable({ - siteId: site["$id"], - variableId: variable["$id"], - }); - }), - ); - - const envFileLocation = `${site["path"]}/.env`; - let envVariables: Array<{ key: string; value: string }> = []; - try { - if (fs.existsSync(envFileLocation)) { - const envObject = parseDotenv( - fs.readFileSync(envFileLocation, "utf8"), - ); - envVariables = Object.entries(envObject || {}).map( - ([key, value]) => ({ key, value }), - ); - } - } catch (error) { - envVariables = []; - } - - await Promise.all( - envVariables.map(async (variable) => { - await sitesService.createVariable({ - siteId: site["$id"], - key: variable.key, - value: variable.value, - secret: false, - }); - }), - ); - } catch (e: any) { - return { - success: false, - error: `Failed to update variables: ${e.message}`, - }; - } - } - - if (options.code === false) { - return { success: true }; - } - - try { - const response = await sitesService.createDeployment({ - siteId: site["$id"], - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - code: site.path, - activate: true, - }); - - return { success: true, deploymentId: response["$id"] }; - } catch (e: any) { - return { - success: false, - error: e.message ?? "An unknown error occurred. Please try again.", - errorCode: e.code, - }; - } - } - public async getDeploymentStatus( resourceId: string, deploymentId: string, From 093a79362c0918fef79afedbf39e5b3e5ad5f53c Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 17:53:15 +0530 Subject: [PATCH 22/41] improvements --- lib/commands/push.ts | 119 ++++++++++++++++++++++++------------------- 1 file changed, 66 insertions(+), 53 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 481c52a5..8ca729e2 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -51,7 +51,11 @@ import { getTeamsService, getProjectsService, } from "../services.js"; -import { ApiService, AuthMethod } from "@appwrite.io/console"; +import { + ApiService, + AuthMethod, + AppwriteException, +} from "@appwrite.io/console"; import { checkDeployConditions } from "../utils.js"; import { Pools } from "./utils/pools.js"; import { Attributes, Collection } from "./utils/attributes.js"; @@ -98,62 +102,65 @@ export class Push { const settings = config.settings ?? {}; if (projectName) { - await projectsService.update(projectId, projectName); + await projectsService.update({ + projectId: projectId, + name: projectName, + }); } if (settings.services) { for (let [service, status] of Object.entries(settings.services)) { - await projectsService.updateServiceStatus( - projectId, - service as ApiService, - status, - ); + await projectsService.updateServiceStatus({ + projectId: projectId, + service: service as ApiService, + status: status, + }); } } if (settings.auth) { if (settings.auth.security) { - await projectsService.updateAuthDuration( + await projectsService.updateAuthDuration({ projectId, - settings.auth.security.duration, - ); - await projectsService.updateAuthLimit( + duration: settings.auth.security.duration, + }); + await projectsService.updateAuthLimit({ projectId, - settings.auth.security.limit, - ); - await projectsService.updateAuthSessionsLimit( + limit: settings.auth.security.limit, + }); + await projectsService.updateAuthSessionsLimit({ projectId, - settings.auth.security.sessionsLimit, - ); - await projectsService.updateAuthPasswordDictionary( + limit: settings.auth.security.sessionsLimit, + }); + await projectsService.updateAuthPasswordDictionary({ projectId, - settings.auth.security.passwordDictionary, - ); - await projectsService.updateAuthPasswordHistory( + enabled: settings.auth.security.passwordDictionary, + }); + await projectsService.updateAuthPasswordHistory({ projectId, - settings.auth.security.passwordHistory, - ); - await projectsService.updatePersonalDataCheck( + limit: settings.auth.security.passwordHistory, + }); + await projectsService.updatePersonalDataCheck({ projectId, - settings.auth.security.personalDataCheck, - ); - await projectsService.updateSessionAlerts( + enabled: settings.auth.security.personalDataCheck, + }); + await projectsService.updateSessionAlerts({ projectId, - settings.auth.security.sessionAlerts, - ); - await projectsService.updateMockNumbers( + alerts: settings.auth.security.sessionAlerts, + }); + await projectsService.updateMockNumbers({ projectId, - settings.auth.security.mockNumbers, - ); + numbers: settings.auth.security.mockNumbers, + }); } if (settings.auth.methods) { for (let [method, status] of Object.entries(settings.auth.methods)) { - await projectsService.updateAuthStatus( + await projectsService.updateAuthStatus({ projectId, - method as AuthMethod, - status, - ); + method: method as AuthMethod, + status: status, + }); } } } @@ -176,8 +183,8 @@ export class Push { antivirus: bucket.antivirus, compression: bucket.compression, }); - } catch (e: any) { - if (Number(e.code) === 404) { + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { await storageService.createBucket({ bucketId: bucket["$id"], name: bucket.name, @@ -201,10 +208,16 @@ export class Push { try { await teamsService.get(team["$id"]); - await teamsService.updateName(team["$id"], team.name); - } catch (e: any) { - if (Number(e.code) === 404) { - await teamsService.create(team["$id"], team.name); + await teamsService.updateName({ + teamId: team["$id"], + name: team.name, + }); + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { + await teamsService.create({ + teamId: team["$id"], + name: team.name, + }); } else { throw e; } @@ -216,18 +229,18 @@ export class Push { try { await messagingService.getTopic(topic["$id"]); - await messagingService.updateTopic( - topic["$id"], - topic.name, - topic.subscribe, - ); - } catch (e: any) { - if (Number(e.code) === 404) { - await messagingService.createTopic( - topic["$id"], - topic.name, - topic.subscribe, - ); + await messagingService.updateTopic({ + topicId: topic["$id"], + name: topic.name, + subscribe: topic.subscribe, + }); + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { + await messagingService.createTopic({ + topicId: topic["$id"], + name: topic.name, + subscribe: topic.subscribe, + }); } else { throw e; } From 0342f1ac99f26d5521a7970a6bfe400f372d04e8 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Tue, 6 Jan 2026 18:14:45 +0530 Subject: [PATCH 23/41] fix constructor --- lib/commands/push.ts | 230 +++++++++++++++++++++++++++++++++---------- 1 file changed, 177 insertions(+), 53 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 8ca729e2..fb1077ca 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -51,10 +51,12 @@ import { getTeamsService, getProjectsService, } from "../services.js"; +import { sdkForProject, sdkForConsole } from "../sdks.js"; import { ApiService, AuthMethod, AppwriteException, + Client, } from "@appwrite.io/console"; import { checkDeployConditions } from "../utils.js"; import { Pools } from "./utils/pools.js"; @@ -93,10 +95,16 @@ interface PushTableOptions { } export class Push { - constructor() {} + private projectClient: Client; + private consoleClient: Client; + + constructor(projectClient: Client, consoleClient: Client) { + this.projectClient = projectClient; + this.consoleClient = consoleClient; + } public async pushSettings(config: ConfigType): Promise { - const projectsService = await getProjectsService(); + const projectsService = await getProjectsService(this.consoleClient); const projectId = config.projectId; const projectName = config.projectName; const settings = config.settings ?? {}; @@ -167,7 +175,7 @@ export class Push { } public async pushBucket(bucket: any): Promise { - const storageService = await getStorageService(); + const storageService = await getStorageService(this.projectClient); try { await storageService.getBucket(bucket["$id"]); @@ -204,7 +212,7 @@ export class Push { } public async pushTeam(team: any): Promise { - const teamsService = await getTeamsService(); + const teamsService = await getTeamsService(this.projectClient); try { await teamsService.get(team["$id"]); @@ -225,7 +233,7 @@ export class Push { } public async pushMessagingTopic(topic: any): Promise { - const messagingService = await getMessagingService(); + const messagingService = await getMessagingService(this.projectClient); try { await messagingService.getTopic(topic["$id"]); @@ -247,6 +255,79 @@ export class Push { } } + public async pushBuckets(buckets: any[]): Promise<{ + successfullyPushed: number; + errors: any[]; + }> { + let successfullyPushed = 0; + const errors: any[] = []; + + for (const bucket of buckets) { + try { + log(`Pushing bucket ${chalk.bold(bucket["name"])} ...`); + await this.pushBucket(bucket); + successfullyPushed++; + } catch (e: any) { + errors.push(e); + error(`Failed to push bucket ${bucket["name"]}: ${e.message}`); + } + } + + return { + successfullyPushed, + errors, + }; + } + + public async pushTeams(teams: any[]): Promise<{ + successfullyPushed: number; + errors: any[]; + }> { + let successfullyPushed = 0; + const errors: any[] = []; + + for (const team of teams) { + try { + log(`Pushing team ${chalk.bold(team["name"])} ...`); + await this.pushTeam(team); + successfullyPushed++; + } catch (e: any) { + errors.push(e); + error(`Failed to push team ${team["name"]}: ${e.message}`); + } + } + + return { + successfullyPushed, + errors, + }; + } + + public async pushMessagingTopics(topics: any[]): Promise<{ + successfullyPushed: number; + errors: any[]; + }> { + let successfullyPushed = 0; + const errors: any[] = []; + + for (const topic of topics) { + try { + log(`Pushing topic ${chalk.bold(topic["name"])} ...`); + await this.pushMessagingTopic(topic); + success(`Created ${topic.name} ( ${topic["$id"]} )`); + successfullyPushed++; + } catch (e: any) { + errors.push(e); + error(`Failed to push topic ${topic["name"]}: ${e.message}`); + } + } + + return { + successfullyPushed, + errors, + }; + } + public async pushFunction( functions: any[], options: { @@ -284,7 +365,7 @@ export class Push { }); updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); - const functionsService = await getFunctionsService(); + const functionsService = await getFunctionsService(this.projectClient); try { response = await functionsService.get({ functionId: func["$id"] }); functionExists = true; @@ -351,7 +432,9 @@ export class Push { let domain = ""; try { - const consoleService = await getConsoleService(); + const consoleService = await getConsoleService( + this.projectClient, + ); const variables = await consoleService.variables(); domain = ID.unique() + "." + variables["_APP_DOMAIN_FUNCTIONS"]; } catch (error) { @@ -360,7 +443,7 @@ export class Push { } try { - const proxyService = await getProxyService(); + const proxyService = await getProxyService(this.projectClient); await proxyService.createFunctionRule(domain, func.$id); } catch (error) { console.error("Error creating function rule."); @@ -383,10 +466,12 @@ export class Push { .update({ status: "Updating variables" }) .replaceSpinner(SPINNER_DOTS); - const functionsService = await getFunctionsService(); + const functionsServiceForVars = await getFunctionsService( + this.projectClient, + ); const { variables } = await paginate( async (args: any) => { - return await functionsService.listVariables({ + return await functionsServiceForVars.listVariables({ functionId: args.functionId, }); }, @@ -399,8 +484,10 @@ export class Push { await Promise.all( variables.map(async (variable: any) => { - const functionsService = await getFunctionsService(); - await functionsService.deleteVariable({ + const functionsServiceDel = await getFunctionsService( + this.projectClient, + ); + await functionsServiceDel.deleteVariable({ functionId: func["$id"], variableId: variable["$id"], }); @@ -423,8 +510,10 @@ export class Push { } await Promise.all( envVariables.map(async (variable) => { - const functionsService = await getFunctionsService(); - await functionsService.createVariable({ + const functionsServiceCreate = await getFunctionsService( + this.projectClient, + ); + await functionsServiceCreate.createVariable({ functionId: func["$id"], key: variable.key, value: variable.value, @@ -444,8 +533,10 @@ export class Push { try { updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); - const functionsService = await getFunctionsService(); - response = await functionsService.createDeployment({ + const functionsServiceDeploy = await getFunctionsService( + this.projectClient, + ); + response = await functionsServiceDeploy.createDeployment({ functionId: func["$id"], entrypoint: func.entrypoint, commands: func.commands, @@ -482,8 +573,10 @@ export class Push { }); while (true) { - const functionsService = await getFunctionsService(); - response = await functionsService.getDeployment({ + const functionsServicePoll = await getFunctionsService( + this.projectClient, + ); + response = await functionsServicePoll.getDeployment({ functionId: func["$id"], deploymentId: deploymentId, }); @@ -493,8 +586,10 @@ export class Push { successfullyDeployed++; let url = ""; - const proxyService = await getProxyService(); - const res = await proxyService.listRules([ + const proxyServiceUrl = await getProxyService( + this.projectClient, + ); + const res = await proxyServiceUrl.listRules([ JSON.stringify({ method: "limit", values: [1] }), JSON.stringify({ method: "equal", @@ -601,7 +696,7 @@ export class Push { updaterRow.update({ status: "Getting" }).startSpinner(SPINNER_DOTS); - const sitesService = await getSitesService(); + const sitesService = await getSitesService(this.projectClient); try { response = await sitesService.get({ siteId: site["$id"] }); siteExists = true; @@ -666,7 +761,9 @@ export class Push { let domain = ""; try { - const consoleService = await getConsoleService(); + const consoleService = await getConsoleService( + this.projectClient, + ); const variables = await consoleService.variables(); domain = ID.unique() + "." + variables["_APP_DOMAIN_SITES"]; } catch (error) { @@ -675,7 +772,7 @@ export class Push { } try { - const proxyService = await getProxyService(); + const proxyService = await getProxyService(this.projectClient); await proxyService.createSiteRule(domain, site.$id); } catch (error) { console.error("Error creating site rule."); @@ -698,10 +795,12 @@ export class Push { .update({ status: "Creating variables" }) .replaceSpinner(SPINNER_DOTS); - const sitesService = await getSitesService(); + const sitesServiceForVars = await getSitesService(this.projectClient); const { variables } = await paginate( async (args: any) => { - return await sitesService.listVariables({ siteId: args.siteId }); + return await sitesServiceForVars.listVariables({ + siteId: args.siteId, + }); }, { siteId: site["$id"], @@ -712,8 +811,8 @@ export class Push { await Promise.all( variables.map(async (variable: any) => { - const sitesService = await getSitesService(); - await sitesService.deleteVariable({ + const sitesServiceDel = await getSitesService(this.projectClient); + await sitesServiceDel.deleteVariable({ siteId: site["$id"], variableId: variable["$id"], }); @@ -736,8 +835,10 @@ export class Push { } await Promise.all( envVariables.map(async (variable) => { - const sitesService = await getSitesService(); - await sitesService.createVariable({ + const sitesServiceCreate = await getSitesService( + this.projectClient, + ); + await sitesServiceCreate.createVariable({ siteId: site["$id"], key: variable.key, value: variable.value, @@ -757,8 +858,8 @@ export class Push { try { updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); - const sitesService = await getSitesService(); - response = await sitesService.createDeployment({ + const sitesServiceDeploy = await getSitesService(this.projectClient); + response = await sitesServiceDeploy.createDeployment({ siteId: site["$id"], installCommand: site.installCommand, buildCommand: site.buildCommand, @@ -796,8 +897,10 @@ export class Push { }); while (true) { - const sitesService = await getSitesService(); - response = await sitesService.getDeployment({ + const sitesServicePoll = await getSitesService( + this.projectClient, + ); + response = await sitesServicePoll.getDeployment({ siteId: site["$id"], deploymentId: deploymentId, }); @@ -807,8 +910,10 @@ export class Push { successfullyDeployed++; let url = ""; - const proxyService = await getProxyService(); - const res = await proxyService.listRules([ + const proxyServiceUrl = await getProxyService( + this.projectClient, + ); + const res = await proxyServiceUrl.listRules([ JSON.stringify({ method: "limit", values: [1] }), JSON.stringify({ method: "equal", @@ -886,7 +991,7 @@ export class Push { url?: string; }> { if (resourceType === "function") { - const functionsService = await getFunctionsService(); + const functionsService = await getFunctionsService(this.projectClient); const response = await functionsService.getDeployment({ functionId: resourceId, deploymentId: deploymentId, @@ -896,7 +1001,7 @@ export class Push { let url = ""; if (status === "ready") { - const proxyService = await getProxyService(); + const proxyService = await getProxyService(this.projectClient); const res = await proxyService.listRules([ JSON.stringify({ method: "limit", values: [1] }), JSON.stringify({ @@ -923,7 +1028,7 @@ export class Push { return { status, url }; } else { - const sitesService = await getSitesService(); + const sitesService = await getSitesService(this.projectClient); const response = await sitesService.getDeployment({ siteId: resourceId, deploymentId: deploymentId, @@ -933,7 +1038,7 @@ export class Push { let url = ""; if (status === "ready") { - const proxyService = await getProxyService(); + const proxyService = await getProxyService(this.projectClient); const res = await proxyService.listRules([ JSON.stringify({ method: "limit", values: [1] }), JSON.stringify({ @@ -964,7 +1069,9 @@ export class Push { } async function createPushInstance(): Promise { - return new Push(); + const projectClient = await sdkForProject(); + const consoleClient = await sdkForConsole(); + return new Push(projectClient, consoleClient); } const pushResources = async ({ @@ -1821,13 +1928,19 @@ const pushBucket = async (): Promise => { log("Pushing buckets ..."); const pushInstance = await createPushInstance(); + const result = await pushInstance.pushBuckets(buckets); + + const { successfullyPushed, errors } = result; - for (let bucket of buckets) { - log(`Pushing bucket ${chalk.bold(bucket["name"])} ...`); - await pushInstance.pushBucket(bucket); + if (successfullyPushed === 0) { + error("No buckets were pushed."); + } else { + success(`Successfully pushed ${successfullyPushed} buckets.`); } - success(`Successfully pushed ${buckets.length} buckets.`); + if (cliConfig.verbose) { + errors.forEach((e) => console.error(e)); + } }; const pushTeam = async (): Promise => { @@ -1879,13 +1992,19 @@ const pushTeam = async (): Promise => { log("Pushing teams ..."); const pushInstance = await createPushInstance(); + const result = await pushInstance.pushTeams(teams); + + const { successfullyPushed, errors } = result; - for (let team of teams) { - log(`Pushing team ${chalk.bold(team["name"])} ...`); - await pushInstance.pushTeam(team); + if (successfullyPushed === 0) { + error("No teams were pushed."); + } else { + success(`Successfully pushed ${successfullyPushed} teams.`); } - success(`Successfully pushed ${teams.length} teams.`); + if (cliConfig.verbose) { + errors.forEach((e) => console.error(e)); + } }; const pushMessagingTopic = async (): Promise => { @@ -1937,14 +2056,19 @@ const pushMessagingTopic = async (): Promise => { log("Pushing topics ..."); const pushInstance = await createPushInstance(); + const result = await pushInstance.pushMessagingTopics(topics); - for (let topic of topics) { - log(`Pushing topic ${chalk.bold(topic["name"])} ...`); - await pushInstance.pushMessagingTopic(topic); - success(`Created ${topic.name} ( ${topic["$id"]} )`); + const { successfullyPushed, errors } = result; + + if (successfullyPushed === 0) { + error("No topics were pushed."); + } else { + success(`Successfully pushed ${successfullyPushed} topics.`); } - success(`Successfully pushed ${topics.length} topics.`); + if (cliConfig.verbose) { + errors.forEach((e) => console.error(e)); + } }; export const push = new Command("push") From 2395d93a802b564906a8a32b913413bbbbeb6b31 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 08:39:05 +0530 Subject: [PATCH 24/41] more refactor --- lib/commands/push.ts | 992 +++++++++++++++++++++++++------------------ 1 file changed, 581 insertions(+), 411 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index fb1077ca..d7ae078c 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -15,6 +15,8 @@ import { KeysCollection, KeysTable, } from "../config.js"; +import type { SettingsType } from "./config.js"; +import type { ConfigType } from "./config.js"; import { Spinner, SPINNER_DOTS } from "../spinner.js"; import { paginate } from "../paginate.js"; import { @@ -67,7 +69,6 @@ import { getObjectChanges, } from "./utils/change-approval.js"; import { checkAndApplyTablesDBChanges } from "./utils/database-sync.js"; -import type { ConfigType } from "./config.js"; const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; @@ -103,7 +104,180 @@ export class Push { this.consoleClient = consoleClient; } - public async pushSettings(config: ConfigType): Promise { + public async pushResources( + config: ConfigType, + options: { + skipDeprecated?: boolean; + functionOptions?: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + }; + siteOptions?: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + }; + attempts?: number; + } = { skipDeprecated: true }, + ): Promise<{ + results: Record; + errors: any[]; + }> { + const { skipDeprecated = true } = options; + const results: Record = {}; + const allErrors: any[] = []; + + // Push settings + if (config.projectName || config.settings) { + try { + log("Pushing settings ..."); + await this.pushSettings({ + projectId: config.projectId, + projectName: config.projectName, + settings: config.settings, + }); + results.settings = { success: true }; + } catch (e: any) { + allErrors.push(e); + results.settings = { success: false, error: e.message }; + } + } + + // Push buckets + if (config.buckets && config.buckets.length > 0) { + try { + log("Pushing buckets ..."); + const result = await this.pushBuckets(config.buckets); + results.buckets = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.buckets = { successfullyPushed: 0, errors: [e] }; + } + } + + // Push teams + if (config.teams && config.teams.length > 0) { + try { + log("Pushing teams ..."); + const result = await this.pushTeams(config.teams); + results.teams = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.teams = { successfullyPushed: 0, errors: [e] }; + } + } + + // Push messaging topics + if (config.topics && config.topics.length > 0) { + try { + log("Pushing topics ..."); + const result = await this.pushMessagingTopics(config.topics); + results.topics = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.topics = { successfullyPushed: 0, errors: [e] }; + } + } + + // Push functions + if (config.functions && config.functions.length > 0) { + try { + log("Pushing functions ..."); + const result = await this.pushFunctions( + config.functions, + options.functionOptions, + ); + results.functions = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.functions = { + successfullyPushed: 0, + successfullyDeployed: 0, + failedDeployments: [], + errors: [e], + }; + } + } + + // Push sites + if (config.sites && config.sites.length > 0) { + try { + log("Pushing sites ..."); + const result = await this.pushSites(config.sites, options.siteOptions); + results.sites = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.sites = { + successfullyPushed: 0, + successfullyDeployed: 0, + failedDeployments: [], + errors: [e], + }; + } + } + + // Push tables + if (config.tablesDB && config.tablesDB.length > 0) { + try { + log("Pushing tables ..."); + const result = await this.pushTables(config.tablesDB, options.attempts); + results.tables = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.tables = { successfullyPushed: 0, errors: [e] }; + } + } + + // Push collections (unless skipDeprecated is true) + if ( + !skipDeprecated && + config.collections && + config.collections.length > 0 + ) { + try { + log("Pushing collections ..."); + // Add database names to collections + const collectionsWithDbNames = config.collections.map( + (collection: any) => { + const database = config.databases?.find( + (db: any) => db.$id === collection.databaseId, + ); + return { + ...collection, + databaseName: database?.name ?? collection.databaseId, + }; + }, + ); + const result = await this.pushCollections( + collectionsWithDbNames, + options.attempts, + ); + results.collections = result; + allErrors.push(...result.errors); + } catch (e: any) { + allErrors.push(e); + results.collections = { successfullyPushed: 0, errors: [e] }; + } + } + + return { + results, + errors: allErrors, + }; + } + + public async pushSettings(config: { + projectId: string; + projectName?: string; + settings?: SettingsType; + }): Promise { const projectsService = await getProjectsService(this.consoleClient); const projectId = config.projectId; const projectName = config.projectName; @@ -174,87 +348,6 @@ export class Push { } } - public async pushBucket(bucket: any): Promise { - const storageService = await getStorageService(this.projectClient); - - try { - await storageService.getBucket(bucket["$id"]); - await storageService.updateBucket({ - bucketId: bucket["$id"], - name: bucket.name, - permissions: bucket["$permissions"], - fileSecurity: bucket.fileSecurity, - enabled: bucket.enabled, - maximumFileSize: bucket.maximumFileSize, - allowedFileExtensions: bucket.allowedFileExtensions, - encryption: bucket.encryption, - antivirus: bucket.antivirus, - compression: bucket.compression, - }); - } catch (e: unknown) { - if (e instanceof AppwriteException && Number(e.code) === 404) { - await storageService.createBucket({ - bucketId: bucket["$id"], - name: bucket.name, - permissions: bucket["$permissions"], - fileSecurity: bucket.fileSecurity, - enabled: bucket.enabled, - maximumFileSize: bucket.maximumFileSize, - allowedFileExtensions: bucket.allowedFileExtensions, - compression: bucket.compression, - encryption: bucket.encryption, - antivirus: bucket.antivirus, - }); - } else { - throw e; - } - } - } - - public async pushTeam(team: any): Promise { - const teamsService = await getTeamsService(this.projectClient); - - try { - await teamsService.get(team["$id"]); - await teamsService.updateName({ - teamId: team["$id"], - name: team.name, - }); - } catch (e: unknown) { - if (e instanceof AppwriteException && Number(e.code) === 404) { - await teamsService.create({ - teamId: team["$id"], - name: team.name, - }); - } else { - throw e; - } - } - } - - public async pushMessagingTopic(topic: any): Promise { - const messagingService = await getMessagingService(this.projectClient); - - try { - await messagingService.getTopic(topic["$id"]); - await messagingService.updateTopic({ - topicId: topic["$id"], - name: topic.name, - subscribe: topic.subscribe, - }); - } catch (e: unknown) { - if (e instanceof AppwriteException && Number(e.code) === 404) { - await messagingService.createTopic({ - topicId: topic["$id"], - name: topic.name, - subscribe: topic.subscribe, - }); - } else { - throw e; - } - } - } - public async pushBuckets(buckets: any[]): Promise<{ successfullyPushed: number; errors: any[]; @@ -265,7 +358,41 @@ export class Push { for (const bucket of buckets) { try { log(`Pushing bucket ${chalk.bold(bucket["name"])} ...`); - await this.pushBucket(bucket); + const storageService = await getStorageService(this.projectClient); + + try { + await storageService.getBucket(bucket["$id"]); + await storageService.updateBucket({ + bucketId: bucket["$id"], + name: bucket.name, + permissions: bucket["$permissions"], + fileSecurity: bucket.fileSecurity, + enabled: bucket.enabled, + maximumFileSize: bucket.maximumFileSize, + allowedFileExtensions: bucket.allowedFileExtensions, + encryption: bucket.encryption, + antivirus: bucket.antivirus, + compression: bucket.compression, + }); + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { + await storageService.createBucket({ + bucketId: bucket["$id"], + name: bucket.name, + permissions: bucket["$permissions"], + fileSecurity: bucket.fileSecurity, + enabled: bucket.enabled, + maximumFileSize: bucket.maximumFileSize, + allowedFileExtensions: bucket.allowedFileExtensions, + compression: bucket.compression, + encryption: bucket.encryption, + antivirus: bucket.antivirus, + }); + } else { + throw e; + } + } + successfullyPushed++; } catch (e: any) { errors.push(e); @@ -289,7 +416,25 @@ export class Push { for (const team of teams) { try { log(`Pushing team ${chalk.bold(team["name"])} ...`); - await this.pushTeam(team); + const teamsService = await getTeamsService(this.projectClient); + + try { + await teamsService.get(team["$id"]); + await teamsService.updateName({ + teamId: team["$id"], + name: team.name, + }); + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { + await teamsService.create({ + teamId: team["$id"], + name: team.name, + }); + } else { + throw e; + } + } + successfullyPushed++; } catch (e: any) { errors.push(e); @@ -313,7 +458,27 @@ export class Push { for (const topic of topics) { try { log(`Pushing topic ${chalk.bold(topic["name"])} ...`); - await this.pushMessagingTopic(topic); + const messagingService = await getMessagingService(this.projectClient); + + try { + await messagingService.getTopic(topic["$id"]); + await messagingService.updateTopic({ + topicId: topic["$id"], + name: topic.name, + subscribe: topic.subscribe, + }); + } catch (e: unknown) { + if (e instanceof AppwriteException && Number(e.code) === 404) { + await messagingService.createTopic({ + topicId: topic["$id"], + name: topic.name, + subscribe: topic.subscribe, + }); + } else { + throw e; + } + } + success(`Created ${topic.name} ( ${topic["$id"]} )`); successfullyPushed++; } catch (e: any) { @@ -328,7 +493,7 @@ export class Push { }; } - public async pushFunction( + public async pushFunctions( functions: any[], options: { async?: boolean; @@ -658,7 +823,7 @@ export class Push { }; } - public async pushSite( + public async pushSites( sites: any[], options: { async?: boolean; @@ -982,89 +1147,291 @@ export class Push { }; } - public async getDeploymentStatus( - resourceId: string, - deploymentId: string, - resourceType: "function" | "site", + public async pushTables( + tables: any[], + attempts?: number, ): Promise<{ - status: string; - url?: string; + successfullyPushed: number; + errors: any[]; }> { - if (resourceType === "function") { - const functionsService = await getFunctionsService(this.projectClient); - const response = await functionsService.getDeployment({ - functionId: resourceId, - deploymentId: deploymentId, - }); + const pollMaxDebounces = attempts ?? POLL_DEFAULT_VALUE; + const pools = new Pools(pollMaxDebounces); + const attributes = new Attributes(pools); + + let tablesChanged = new Set(); + const errors: any[] = []; + + // Parallel tables actions + await Promise.all( + tables.map(async (table: any) => { + try { + const tablesDBService = await getTablesDBService(this.projectClient); + const remoteTable = await tablesDBService.getTable( + table["databaseId"], + table["$id"], + ); + + const changes: string[] = []; + if (remoteTable.name !== table.name) changes.push("name"); + if (remoteTable.rowSecurity !== table.rowSecurity) + changes.push("rowSecurity"); + if (remoteTable.enabled !== table.enabled) changes.push("enabled"); + if ( + JSON.stringify(remoteTable["$permissions"]) !== + JSON.stringify(table["$permissions"]) + ) + changes.push("permissions"); + + if (changes.length > 0) { + await tablesDBService.updateTable( + table["databaseId"], + table["$id"], + table.name, + table.rowSecurity, + table["$permissions"], + ); + + success( + `Updated ${table.name} ( ${table["$id"]} ) - ${changes.join(", ")}`, + ); + tablesChanged.add(table["$id"]); + } + table.remoteVersion = remoteTable; + + table.isExisted = true; + } catch (e: any) { + if (Number(e.code) === 404) { + log( + `Table ${table.name} does not exist in the project. Creating ... `, + ); + const tablesDBService = await getTablesDBService( + this.projectClient, + ); + await tablesDBService.createTable( + table["databaseId"], + table["$id"], + table.name, + table.rowSecurity, + table["$permissions"], + ); + + success(`Created ${table.name} ( ${table["$id"]} )`); + tablesChanged.add(table["$id"]); + } else { + errors.push(e); + throw e; + } + } + }), + ); + + // Serialize attribute actions + for (let table of tables) { + let columns = table.columns; + let indexes = table.indexes; - const status = response["status"]; - let url = ""; - - if (status === "ready") { - const proxyService = await getProxyService(this.projectClient); - const res = await proxyService.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["function"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [resourceId], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), - ]); - - if (Number(res.total) === 1) { - url = res.rules[0].domain; + if (table.isExisted) { + columns = await attributes.attributesToCreate( + table.remoteVersion.columns, + table.columns, + table as Collection, + ); + indexes = await attributes.attributesToCreate( + table.remoteVersion.indexes, + table.indexes, + table as Collection, + true, + ); + + if ( + Array.isArray(columns) && + columns.length <= 0 && + Array.isArray(indexes) && + indexes.length <= 0 + ) { + continue; } } - return { status, url }; - } else { - const sitesService = await getSitesService(this.projectClient); - const response = await sitesService.getDeployment({ - siteId: resourceId, - deploymentId: deploymentId, - }); + log( + `Pushing table ${table.name} ( ${table["databaseId"]} - ${table["$id"]} ) attributes`, + ); + + try { + await attributes.createColumns(columns, table as Collection); + } catch (e) { + errors.push(e); + throw e; + } + + try { + await attributes.createIndexes(indexes, table as Collection); + } catch (e) { + errors.push(e); + throw e; + } + tablesChanged.add(table["$id"]); + success(`Successfully pushed ${table.name} ( ${table["$id"]} )`); + } + + return { + successfullyPushed: tablesChanged.size, + errors, + }; + } + + public async pushCollections( + collections: any[], + attempts?: number, + ): Promise<{ + successfullyPushed: number; + errors: any[]; + }> { + const pools = new Pools(attempts ?? POLL_DEFAULT_VALUE); + const attributes = new Attributes(pools); + + const errors: any[] = []; + + const databases = Array.from( + new Set(collections.map((collection: any) => collection["databaseId"])), + ); + + // Parallel db actions + await Promise.all( + databases.map(async (databaseId: any) => { + const databasesService = await getDatabasesService(this.projectClient); + try { + const database = await databasesService.get(databaseId); - const status = response["status"]; - let url = ""; - - if (status === "ready") { - const proxyService = await getProxyService(this.projectClient); - const res = await proxyService.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["site"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [resourceId], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), - ]); - - if (Number(res.total) === 1) { - url = res.rules[0].domain; + // Note: We can't get the local database name here since we don't have access to localConfig + // This will need to be handled by the caller if needed + const localDatabaseName = + collections.find((c: any) => c.databaseId === databaseId) + ?.databaseName ?? databaseId; + + if (database.name !== localDatabaseName) { + await databasesService.update(databaseId, localDatabaseName); + + success(`Updated ${localDatabaseName} ( ${databaseId} ) name`); + } + } catch (err) { + log(`Database ${databaseId} not found. Creating it now ...`); + + const localDatabaseName = + collections.find((c: any) => c.databaseId === databaseId) + ?.databaseName ?? databaseId; + + await databasesService.create(databaseId, localDatabaseName); } + }), + ); + + // Parallel collection actions + await Promise.all( + collections.map(async (collection: any) => { + try { + const databasesService = await getDatabasesService( + this.projectClient, + ); + const remoteCollection = await databasesService.getCollection( + collection["databaseId"], + collection["$id"], + ); + + if (remoteCollection.name !== collection.name) { + await databasesService.updateCollection( + collection["databaseId"], + collection["$id"], + collection.name, + ); + + success(`Updated ${collection.name} ( ${collection["$id"]} ) name`); + } + collection.remoteVersion = remoteCollection; + + collection.isExisted = true; + } catch (e: any) { + if (Number(e.code) === 404) { + log( + `Collection ${collection.name} does not exist in the project. Creating ... `, + ); + const databasesService = await getDatabasesService( + this.projectClient, + ); + await databasesService.createCollection( + collection["databaseId"], + collection["$id"], + collection.name, + collection.documentSecurity, + collection["$permissions"], + ); + } else { + errors.push(e); + throw e; + } + } + }), + ); + + let numberOfCollections = 0; + // Serialize attribute actions + for (let collection of collections) { + let collectionAttributes = collection.attributes; + let indexes = collection.indexes; + + if (collection.isExisted) { + collectionAttributes = await attributes.attributesToCreate( + collection.remoteVersion.attributes, + collection.attributes, + collection as Collection, + ); + indexes = await attributes.attributesToCreate( + collection.remoteVersion.indexes, + collection.indexes, + collection as Collection, + true, + ); + + if ( + Array.isArray(collectionAttributes) && + collectionAttributes.length <= 0 && + Array.isArray(indexes) && + indexes.length <= 0 + ) { + continue; + } + } + + log( + `Pushing collection ${collection.name} ( ${collection["databaseId"]} - ${collection["$id"]} ) attributes`, + ); + + try { + await attributes.createAttributes( + collectionAttributes, + collection as Collection, + ); + } catch (e) { + errors.push(e); + throw e; } - return { status, url }; + try { + await attributes.createIndexes(indexes, collection as Collection); + } catch (e) { + errors.push(e); + throw e; + } + numberOfCollections++; + success( + `Successfully pushed ${collection.name} ( ${collection["$id"]} )`, + ); } + + return { + successfullyPushed: numberOfCollections, + errors, + }; } } @@ -1077,26 +1444,33 @@ async function createPushInstance(): Promise { const pushResources = async ({ skipDeprecated = false, }: PushResourcesOptions = {}): Promise => { - const actions: Record Promise> = { - settings: pushSettings, - functions: pushFunction, - sites: pushSite, - collections: pushCollection, - tables: pushTable, - buckets: pushBucket, - teams: pushTeam, - messages: pushMessagingTopic, - }; - - if (skipDeprecated) { - delete actions.collections; - } - if (cliConfig.all) { - for (let action of Object.values(actions)) { - await action(); - } + checkDeployConditions(localConfig); + + const pushInstance = await createPushInstance(); + const config = localConfig.getProject() as ConfigType; + + await pushInstance.pushResources(config, { + skipDeprecated, + functionOptions: { code: true, withVariables: false }, + siteOptions: { code: true, withVariables: false }, + }); } else { + const actions: Record Promise> = { + settings: pushSettings, + functions: pushFunction, + sites: pushSite, + collections: pushCollection, + tables: pushTable, + buckets: pushBucket, + teams: pushTeam, + messages: pushMessagingTopic, + }; + + if (skipDeprecated) { + delete actions.collections; + } + const answers = await inquirer.prompt(questionsPushResources); const action = actions[answers.resource]; @@ -1265,7 +1639,7 @@ const pushSite = async ({ log("Pushing sites ..."); const pushInstance = await createPushInstance(); - const result = await pushInstance.pushSite(sites, { + const result = await pushInstance.pushSites(sites, { async: asyncDeploy, code, withVariables, @@ -1385,7 +1759,7 @@ const pushFunction = async ({ log("Pushing functions ..."); const pushInstance = await createPushInstance(); - const result = await pushInstance.pushFunction(functions, { + const result = await pushInstance.pushFunctions(functions, { async: asyncDeploy, code, withVariables, @@ -1433,10 +1807,6 @@ const pushTable = async ({ }: PushTableOptions = {}): Promise => { const tables: any[] = []; - const pollMaxDebounces = attempts ?? POLL_DEFAULT_VALUE; - const pools = new Pools(pollMaxDebounces); - const attributes = new Attributes(pools); - const { resyncNeeded } = await checkAndApplyTablesDBChanges(); if (resyncNeeded) { log("Resyncing configuration due to tablesDB deletions ..."); @@ -1582,117 +1952,23 @@ const pushTable = async ({ ) { return; } - let tablesChanged = new Set(); - - // Parallel tables actions - await Promise.all( - tables.map(async (table: any) => { - try { - const tablesDBService = await getTablesDBService(); - const remoteTable = await tablesDBService.getTable( - table["databaseId"], - table["$id"], - ); - - const changes: string[] = []; - if (remoteTable.name !== table.name) changes.push("name"); - if (remoteTable.rowSecurity !== table.rowSecurity) - changes.push("rowSecurity"); - if (remoteTable.enabled !== table.enabled) changes.push("enabled"); - if ( - JSON.stringify(remoteTable["$permissions"]) !== - JSON.stringify(table["$permissions"]) - ) - changes.push("permissions"); - - if (changes.length > 0) { - await tablesDBService.updateTable( - table["databaseId"], - table["$id"], - table.name, - table.rowSecurity, - table["$permissions"], - ); - - success( - `Updated ${table.name} ( ${table["$id"]} ) - ${changes.join(", ")}`, - ); - tablesChanged.add(table["$id"]); - } - table.remoteVersion = remoteTable; - - table.isExisted = true; - } catch (e: any) { - if (Number(e.code) === 404) { - log( - `Table ${table.name} does not exist in the project. Creating ... `, - ); - const tablesDBService = await getTablesDBService(); - await tablesDBService.createTable( - table["databaseId"], - table["$id"], - table.name, - table.rowSecurity, - table["$permissions"], - ); - success(`Created ${table.name} ( ${table["$id"]} )`); - tablesChanged.add(table["$id"]); - } else { - throw e; - } - } - }), - ); - - // Serialize attribute actions - for (let table of tables) { - let columns = table.columns; - let indexes = table.indexes; - - if (table.isExisted) { - columns = await attributes.attributesToCreate( - table.remoteVersion.columns, - table.columns, - table as Collection, - ); - indexes = await attributes.attributesToCreate( - table.remoteVersion.indexes, - table.indexes, - table as Collection, - true, - ); + log("Pushing tables ..."); - if ( - Array.isArray(columns) && - columns.length <= 0 && - Array.isArray(indexes) && - indexes.length <= 0 - ) { - continue; - } - } - - log( - `Pushing table ${table.name} ( ${table["databaseId"]} - ${table["$id"]} ) attributes`, - ); + const pushInstance = await createPushInstance(); + const result = await pushInstance.pushTables(tables, attempts); - try { - await attributes.createColumns(columns, table as Collection); - } catch (e) { - throw e; - } + const { successfullyPushed, errors } = result; - try { - await attributes.createIndexes(indexes, table as Collection); - } catch (e) { - throw e; - } - tablesChanged.add(table["$id"]); - success(`Successfully pushed ${table.name} ( ${table["$id"]} )`); + if (successfullyPushed === 0) { + error("No tables were pushed."); + } else { + success(`Successfully pushed ${successfullyPushed} tables.`); } - success(`Successfully pushed ${tablesChanged.size} tables`); + if (cliConfig.verbose) { + errors.forEach((e) => console.error(e)); + } }; const pushCollection = async ({ @@ -1703,10 +1979,6 @@ const pushCollection = async ({ ); const collections: any[] = []; - // Create fresh instances per operation to avoid shared state issues - const pools = new Pools(attempts ?? POLL_DEFAULT_VALUE); - const attributes = new Attributes(pools); - if (cliConfig.all) { checkDeployConditions(localConfig); collections.push(...localConfig.getCollections()); @@ -1732,37 +2004,11 @@ const pushCollection = async ({ return; } - const databases = Array.from( - new Set(collections.map((collection: any) => collection["databaseId"])), - ); - - // Parallel db actions - await Promise.all( - databases.map(async (databaseId: any) => { - const localDatabase = localConfig.getDatabase(databaseId); - - const databasesService = await getDatabasesService(); - try { - const database = await databasesService.get(databaseId); - - if (database.name !== (localDatabase.name ?? databaseId)) { - await databasesService.update( - databaseId, - localDatabase.name ?? databaseId, - ); - - success(`Updated ${localDatabase.name} ( ${databaseId} ) name`); - } - } catch (err) { - log(`Database ${databaseId} not found. Creating it now ...`); - - await databasesService.create( - databaseId, - localDatabase.name ?? databaseId, - ); - } - }), - ); + // Add database names to collections for the class method + collections.forEach((collection: any) => { + const localDatabase = localConfig.getDatabase(collection.databaseId); + collection.databaseName = localDatabase.name ?? collection.databaseId; + }); if ( !(await approveChanges( @@ -1784,99 +2030,23 @@ const pushCollection = async ({ ) { return; } - // Parallel collection actions - await Promise.all( - collections.map(async (collection: any) => { - try { - const databasesService = await getDatabasesService(); - const remoteCollection = await databasesService.getCollection( - collection["databaseId"], - collection["$id"], - ); - if (remoteCollection.name !== collection.name) { - await databasesService.updateCollection( - collection["databaseId"], - collection["$id"], - collection.name, - ); + log("Pushing collections ..."); - success(`Updated ${collection.name} ( ${collection["$id"]} ) name`); - } - collection.remoteVersion = remoteCollection; - - collection.isExisted = true; - } catch (e: any) { - if (Number(e.code) === 404) { - log( - `Collection ${collection.name} does not exist in the project. Creating ... `, - ); - const databasesService = await getDatabasesService(); - await databasesService.createCollection( - collection["databaseId"], - collection["$id"], - collection.name, - collection.documentSecurity, - collection["$permissions"], - ); - } else { - throw e; - } - } - }), - ); - let numberOfCollections = 0; - // Serialize attribute actions - for (let collection of collections) { - let collectionAttributes = collection.attributes; - let indexes = collection.indexes; - - if (collection.isExisted) { - collectionAttributes = await attributes.attributesToCreate( - collection.remoteVersion.attributes, - collection.attributes, - collection as Collection, - ); - indexes = await attributes.attributesToCreate( - collection.remoteVersion.indexes, - collection.indexes, - collection as Collection, - true, - ); - - if ( - Array.isArray(collectionAttributes) && - collectionAttributes.length <= 0 && - Array.isArray(indexes) && - indexes.length <= 0 - ) { - continue; - } - } - - log( - `Pushing collection ${collection.name} ( ${collection["databaseId"]} - ${collection["$id"]} ) attributes`, - ); + const pushInstance = await createPushInstance(); + const result = await pushInstance.pushCollections(collections, attempts); - try { - await attributes.createAttributes( - collectionAttributes, - collection as Collection, - ); - } catch (e) { - throw e; - } + const { successfullyPushed, errors } = result; - try { - await attributes.createIndexes(indexes, collection as Collection); - } catch (e) { - throw e; - } - numberOfCollections++; - success(`Successfully pushed ${collection.name} ( ${collection["$id"]} )`); + if (successfullyPushed === 0) { + error("No collections were pushed."); + } else { + success(`Successfully pushed ${successfullyPushed} collections.`); } - success(`Successfully pushed ${numberOfCollections} collections`); + if (cliConfig.verbose) { + errors.forEach((e) => console.error(e)); + } }; const pushBucket = async (): Promise => { From 1e3f49d4ba58dd9aa6cd77d80912dace81c5f049 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 09:23:07 +0530 Subject: [PATCH 25/41] fix package deployment --- lib/commands/pull.ts | 237 ++++++++++++++++++++++--------------------- lib/commands/push.ts | 11 +- lib/utils.ts | 24 +++++ 3 files changed, 156 insertions(+), 116 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 6035bec5..94325772 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -101,11 +101,13 @@ export class Pull { private projectClient: Client; private consoleClient: Client; private configDirectoryPath: string; + private silent: boolean; - constructor(projectClient: Client, consoleClient: Client) { + constructor(projectClient: Client, consoleClient: Client, silent = false) { this.projectClient = projectClient; this.consoleClient = consoleClient; this.configDirectoryPath = process.cwd(); + this.silent = silent; } /** @@ -115,6 +117,33 @@ export class Pull { this.configDirectoryPath = path; } + /** + * Log a message (respects silent mode) + */ + private log(message: string): void { + if (!this.silent) { + log(message); + } + } + + /** + * Log a success message (respects silent mode) + */ + private success(message: string): void { + if (!this.silent) { + success(message); + } + } + + /** + * Log a warning message (respects silent mode) + */ + private warn(message: string): void { + if (!this.silent) { + warn(message); + } + } + /** * Download and extract deployment code for a resource */ @@ -267,12 +296,15 @@ export class Pull { * Pull project settings */ public async pullSettings(projectId: string): Promise { + this.log("Pulling project settings ..."); + const projectsService = new Projects(this.consoleClient); - const response = await projectsService.get(projectId); - const rawSettings = response as RawProjectSettings; + const rawSettings = await projectsService.get({ projectId: projectId }); + + this.success(`Successfully pulled ${chalk.bold("all")} project settings.`); return { - projectName: response.name, + projectName: rawSettings.name, settings: createSettingsObject(rawSettings), rawSettings, }; @@ -284,6 +316,8 @@ export class Pull { public async pullFunctions( options: PullFunctionsOptions = {}, ): Promise { + this.log("Fetching functions ..."); + const originalCwd = process.cwd(); process.chdir(this.configDirectoryPath); @@ -305,6 +339,8 @@ export class Pull { }); if (fetchResponse["functions"].length <= 0) { + this.log("No functions found."); + this.success(`Successfully pulled ${chalk.bold(0)} functions.`); return []; } @@ -320,6 +356,8 @@ export class Pull { const result: FunctionConfig[] = []; for (const func of functions) { + this.log(`Pulling function ${chalk.bold(func.name)} ...`); + const funcPath = `functions/${func.name}`; const holdingVars = func.vars || []; @@ -366,6 +404,13 @@ export class Pull { } } + if (options.code === false) { + this.warn("Source code download skipped."); + } + + this.success( + `Successfully pulled ${chalk.bold(result.length)} functions.`, + ); return result; } finally { process.chdir(originalCwd); @@ -378,6 +423,8 @@ export class Pull { public async pullSites( options: PullSitesOptions = {}, ): Promise { + this.log("Fetching sites ..."); + const originalCwd = process.cwd(); process.chdir(this.configDirectoryPath); @@ -399,6 +446,8 @@ export class Pull { }); if (fetchResponse["sites"].length <= 0) { + this.log("No sites found."); + this.success(`Successfully pulled ${chalk.bold(0)} sites.`); return []; } @@ -414,6 +463,8 @@ export class Pull { const result: SiteConfig[] = []; for (const site of sites) { + this.log(`Pulling site ${chalk.bold(site.name)} ...`); + const sitePath = `sites/${site.name}`; const holdingVars = site.vars || []; @@ -460,6 +511,11 @@ export class Pull { } } + if (options.code === false) { + this.warn("Source code download skipped."); + } + + this.success(`Successfully pulled ${chalk.bold(result.length)} sites.`); return result; } finally { process.chdir(originalCwd); @@ -473,11 +529,20 @@ export class Pull { databases: any[]; collections: any[]; }> { + this.warn( + "appwrite pull collection has been deprecated. Please consider using 'appwrite pull tables' instead", + ); + this.log("Fetching collections ..."); + const databasesService = new Databases(this.projectClient); const fetchResponse = await databasesService.list([Query.limit(1)]); if (fetchResponse["databases"].length <= 0) { + this.log("No collections found."); + this.success( + `Successfully pulled ${chalk.bold(0)} collections from ${chalk.bold(0)} databases.`, + ); return { databases: [], collections: [] }; } @@ -492,6 +557,9 @@ export class Pull { const allCollections: any[] = []; for (const database of databases) { + this.log( + `Pulling all collections from ${chalk.bold(database.name)} database ...`, + ); allDatabases.push(database); const { collections } = await paginate( @@ -511,6 +579,10 @@ export class Pull { } } + this.success( + `Successfully pulled ${chalk.bold(allCollections.length)} collections from ${chalk.bold(allDatabases.length)} databases.`, + ); + return { databases: allDatabases, collections: allCollections, @@ -524,6 +596,8 @@ export class Pull { databases: any[]; tables: any[]; }> { + this.log("Fetching tables ..."); + const tablesDBService = new TablesDB(this.projectClient); const fetchResponse = await tablesDBService.list({ @@ -531,6 +605,10 @@ export class Pull { }); if (fetchResponse["databases"].length <= 0) { + this.log("No tables found."); + this.success( + `Successfully pulled ${chalk.bold(0)} tables from ${chalk.bold(0)} tableDBs.`, + ); return { databases: [], tables: [] }; } @@ -545,6 +623,9 @@ export class Pull { const allTables: any[] = []; for (const database of databases) { + this.log( + `Pulling all tables from ${chalk.bold(database.name)} database ...`, + ); allDatabases.push(database); const { tables } = await paginate( @@ -563,6 +644,10 @@ export class Pull { } } + this.success( + `Successfully pulled ${chalk.bold(allTables.length)} tables from ${chalk.bold(allDatabases.length)} tableDBs.`, + ); + return { databases: allDatabases, tables: allTables, @@ -573,6 +658,8 @@ export class Pull { * Pull storage buckets from the project */ public async pullBuckets(): Promise { + this.log("Fetching buckets ..."); + const storageService = new Storage(this.projectClient); const fetchResponse = await storageService.listBuckets({ @@ -580,6 +667,8 @@ export class Pull { }); if (fetchResponse["buckets"].length <= 0) { + this.log("No buckets found."); + this.success(`Successfully pulled ${chalk.bold(0)} buckets.`); return []; } @@ -590,6 +679,12 @@ export class Pull { "buckets", ); + for (const bucket of buckets) { + this.log(`Pulling bucket ${chalk.bold(bucket.name)} ...`); + } + + this.success(`Successfully pulled ${chalk.bold(buckets.length)} buckets.`); + return buckets; } @@ -597,6 +692,8 @@ export class Pull { * Pull teams from the project */ public async pullTeams(): Promise { + this.log("Fetching teams ..."); + const teamsService = new Teams(this.projectClient); const fetchResponse = await teamsService.list({ @@ -604,6 +701,8 @@ export class Pull { }); if (fetchResponse["teams"].length <= 0) { + this.log("No teams found."); + this.success(`Successfully pulled ${chalk.bold(0)} teams.`); return []; } @@ -614,6 +713,12 @@ export class Pull { "teams", ); + for (const team of teams) { + this.log(`Pulling team ${chalk.bold(team.name)} ...`); + } + + this.success(`Successfully pulled ${chalk.bold(teams.length)} teams.`); + return teams; } @@ -621,6 +726,8 @@ export class Pull { * Pull messaging topics from the project */ public async pullMessagingTopics(): Promise { + this.log("Fetching topics ..."); + const messagingService = new Messaging(this.projectClient); const fetchResponse = await messagingService.listTopics({ @@ -628,6 +735,8 @@ export class Pull { }); if (fetchResponse["topics"].length <= 0) { + this.log("No topics found."); + this.success(`Successfully pulled ${chalk.bold(0)} topics.`); return []; } @@ -638,6 +747,12 @@ export class Pull { "topics", ); + for (const topic of topics) { + this.log(`Pulling topic ${chalk.bold(topic.name)} ...`); + } + + this.success(`Successfully pulled ${chalk.bold(topics.length)} topics.`); + return topics; } } @@ -686,31 +801,17 @@ export const pullResources = async ({ }; const pullSettings = async (): Promise => { - log("Pulling project settings ..."); - - try { - const pullInstance = await createPullInstance(); - const projectId = localConfig.getProject().projectId; - const settings = await pullInstance.pullSettings(projectId); - - localConfig.setProject( - projectId, - settings.projectName, - settings.rawSettings, - ); + const pullInstance = await createPullInstance(); + const projectId = localConfig.getProject().projectId; + const settings = await pullInstance.pullSettings(projectId); - success(`Successfully pulled ${chalk.bold("all")} project settings.`); - } catch (e) { - throw e; - } + localConfig.setProject(projectId, settings.projectName, settings.rawSettings); }; const pullFunctions = async ({ code, withVariables, }: PullFunctionsOptions = {}): Promise => { - log("Fetching functions ..."); - const functionsService = await getFunctionsService(); const fetchResponse = await functionsService.list([Query.limit(1)]); if (fetchResponse["functions"].length <= 0) { @@ -747,25 +848,16 @@ const pullFunctions = async ({ }); for (const func of functions) { - log(`Pulling function ${chalk.bold(func["name"])} ...`); const localFunction = localConfig.getFunction(func.$id); func["path"] = localFunction["path"] || func["path"]; localConfig.addFunction(func); } - - if (!shouldPullCode) { - warn("Source code download skipped."); - } - - success(`Successfully pulled ${chalk.bold(functions.length)} functions.`); }; const pullSites = async ({ code, withVariables, }: PullSitesOptions = {}): Promise => { - log("Fetching sites ..."); - const sitesService = await getSitesService(); const fetchResponse = await sitesService.list({ queries: [Query.limit(1)], @@ -804,146 +896,63 @@ const pullSites = async ({ }); for (const site of sites) { - log(`Pulling site ${chalk.bold(site["name"])} ...`); const localSite = localConfig.getSite(site.$id); site["path"] = localSite["path"] || site["path"]; localConfig.addSite(site); } - - if (!shouldPullCode) { - warn("Source code download skipped."); - } - - success(`Successfully pulled ${chalk.bold(sites.length)} sites.`); }; const pullCollection = async (): Promise => { - warn( - "appwrite pull collection has been deprecated. Please consider using 'appwrite pull tables' instead", - ); - log("Fetching collections ..."); - - const databasesService = await getDatabasesService(); - const fetchResponse = await databasesService.list({ - queries: [Query.limit(1)], - }); - if (fetchResponse["databases"].length <= 0) { - log("No collections found."); - success( - `Successfully pulled ${chalk.bold(0)} collections from ${chalk.bold(0)} databases.`, - ); - return; - } - const pullInstance = await createPullInstance(); const { databases, collections } = await pullInstance.pullCollections(); for (const database of databases) { - log( - `Pulling all collections from ${chalk.bold(database["name"])} database ...`, - ); localConfig.addDatabase(database); } for (const collection of collections) { localConfig.addCollection(collection); } - - success( - `Successfully pulled ${chalk.bold(collections.length)} collections from ${chalk.bold(databases.length)} databases.`, - ); }; const pullTable = async (): Promise => { - log("Fetching tables ..."); - - const tablesDBService = await getTablesDBService(); - const fetchResponse = await tablesDBService.list({ - queries: [Query.limit(1)], - }); - if (fetchResponse["databases"].length <= 0) { - log("No tables found."); - success( - `Successfully pulled ${chalk.bold(0)} tables from ${chalk.bold(0)} tableDBs.`, - ); - return; - } - const pullInstance = await createPullInstance(); const { databases, tables } = await pullInstance.pullTables(); for (const database of databases) { - log(`Pulling all tables from ${chalk.bold(database["name"])} database ...`); localConfig.addTablesDB(database); } for (const table of tables) { localConfig.addTable(table); } - - success( - `Successfully pulled ${chalk.bold(tables.length)} tables from ${chalk.bold(databases.length)} tableDBs.`, - ); }; const pullBucket = async (): Promise => { - log("Fetching buckets ..."); - const pullInstance = await createPullInstance(); const buckets = await pullInstance.pullBuckets(); - if (buckets.length === 0) { - log("No buckets found."); - success(`Successfully pulled ${chalk.bold(0)} buckets.`); - return; - } - for (const bucket of buckets) { - log(`Pulling bucket ${chalk.bold(bucket["name"])} ...`); localConfig.addBucket(bucket); } - - success(`Successfully pulled ${chalk.bold(buckets.length)} buckets.`); }; const pullTeam = async (): Promise => { - log("Fetching teams ..."); - const pullInstance = await createPullInstance(); const teams = await pullInstance.pullTeams(); - if (teams.length === 0) { - log("No teams found."); - success(`Successfully pulled ${chalk.bold(0)} teams.`); - return; - } - for (const team of teams) { - log(`Pulling team ${chalk.bold(team["name"])} ...`); localConfig.addTeam(team); } - - success(`Successfully pulled ${chalk.bold(teams.length)} teams.`); }; const pullMessagingTopic = async (): Promise => { - log("Fetching topics ..."); - const pullInstance = await createPullInstance(); const topics = await pullInstance.pullMessagingTopics(); - if (topics.length === 0) { - log("No topics found."); - success(`Successfully pulled ${chalk.bold(0)} topics.`); - return; - } - for (const topic of topics) { - log(`Pulling topic ${chalk.bold(topic["name"])} ...`); localConfig.addMessagingTopic(topic); } - - success(`Successfully pulled ${chalk.bold(topics.length)} topics.`); }; /** Commander.js exports */ diff --git a/lib/commands/push.ts b/lib/commands/push.ts index d7ae078c..940d03f3 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -1,4 +1,6 @@ import fs from "fs"; +import path from "path"; +import tar from "tar"; import { parse as parseDotenv } from "dotenv"; import chalk from "chalk"; import inquirer from "inquirer"; @@ -19,6 +21,7 @@ import type { SettingsType } from "./config.js"; import type { ConfigType } from "./config.js"; import { Spinner, SPINNER_DOTS } from "../spinner.js"; import { paginate } from "../paginate.js"; +import { packageDirectory } from "../utils.js"; import { questionsPushBuckets, questionsPushTeams, @@ -701,11 +704,13 @@ export class Push { const functionsServiceDeploy = await getFunctionsService( this.projectClient, ); + + const codeFile = await packageDirectory(func.path); response = await functionsServiceDeploy.createDeployment({ functionId: func["$id"], entrypoint: func.entrypoint, commands: func.commands, - code: func.path, + code: codeFile, activate: true, }); @@ -1024,12 +1029,14 @@ export class Push { try { updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); const sitesServiceDeploy = await getSitesService(this.projectClient); + + const codeFile = await packageDirectory(site.path); response = await sitesServiceDeploy.createDeployment({ siteId: site["$id"], installCommand: site.installCommand, buildCommand: site.buildCommand, outputDirectory: site.outputDirectory, - code: site.path, + code: codeFile, activate: true, }); diff --git a/lib/utils.ts b/lib/utils.ts index 734c30ef..608443f5 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -5,6 +5,7 @@ import childProcess from "child_process"; import chalk from "chalk"; import { fetch } from "undici"; import { localConfig, globalConfig } from "./config.js"; +import tar from "tar"; /** * Get the latest version from npm registry @@ -350,3 +351,26 @@ export function isCloud(): boolean { const hostname = new URL(endpoint).hostname; return hostname.endsWith("appwrite.io"); } + +/** + * Package a directory into a tar.gz File object for deployment + */ +export async function packageDirectory(dirPath: string): Promise { + const tempFile = `${dirPath.replace(/[^a-zA-Z0-9]/g, "_")}-${Date.now()}.tar.gz`; + + await tar.create( + { + gzip: true, + file: tempFile, + cwd: dirPath, + }, + ["."], + ); + + const buffer = fs.readFileSync(tempFile); + fs.unlinkSync(tempFile); + + return new File([buffer], path.basename(tempFile), { + type: "application/gzip", + }); +} From 0f8f8333e6ee21041ae9c3fdd7cde97b04ad3e48 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 09:38:16 +0530 Subject: [PATCH 26/41] refactor deployment code --- lib/commands/pull.ts | 85 +-------------- lib/commands/push.ts | 49 +++++---- lib/commands/utils/deployment.ts | 177 +++++++++++++++++++++++++++++++ lib/utils.ts | 24 ----- 4 files changed, 213 insertions(+), 122 deletions(-) create mode 100644 lib/commands/utils/deployment.ts diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 94325772..feb55ed8 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -51,6 +51,7 @@ import type { FunctionConfig, SiteConfig, } from "../types.js"; +import { downloadDeploymentCode } from "./utils/deployment.js"; export interface PullOptions { all?: boolean; @@ -144,84 +145,6 @@ export class Pull { } } - /** - * Download and extract deployment code for a resource - */ - private async downloadDeploymentCode(params: { - resourceId: string; - resourcePath: string; - holdingVars: { key: string; value: string }[]; - withVariables?: boolean; - listDeployments: () => Promise; - getDownloadUrl: (deploymentId: string) => string; - }): Promise { - const { - resourceId, - resourcePath, - holdingVars, - withVariables, - listDeployments, - getDownloadUrl, - } = params; - - let deploymentId: string | null = null; - try { - const deployments = await listDeployments(); - if (deployments["total"] > 0) { - deploymentId = deployments["deployments"][0]["$id"]; - } - } catch (e: unknown) { - if (e instanceof AppwriteException) { - error(e.message); - } - } - - if (deploymentId === null) { - return; - } - - const compressedFileName = `${resourceId}-${+new Date()}.tar.gz`; - const downloadUrl = getDownloadUrl(deploymentId); - - const downloadBuffer = await this.projectClient.call( - "get", - new URL(downloadUrl), - {}, - {}, - "arrayBuffer", - ); - - try { - fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); - } catch (err) { - const message = err instanceof Error ? err.message : String(err); - throw new Error( - `Failed to write deployment archive to "${compressedFileName}": ${message}`, - ); - } - - tar.extract({ - sync: true, - cwd: resourcePath, - file: compressedFileName, - strict: false, - }); - - fs.rmSync(compressedFileName); - - if (withVariables) { - const envFileLocation = `${resourcePath}/.env`; - try { - fs.rmSync(envFileLocation); - } catch {} - - fs.writeFileSync( - envFileLocation, - holdingVars.map((r) => `${r.key}=${r.value}\n`).join(""), - ); - } - } - /** * Pull resources from Appwrite project and return updated config * @@ -385,7 +308,7 @@ export class Pull { } if (options.code !== false) { - await this.downloadDeploymentCode({ + await downloadDeploymentCode({ resourceId: func["$id"], resourcePath: funcPath, holdingVars, @@ -400,6 +323,7 @@ export class Pull { functionId: func["$id"], deploymentId, }), + projectClient: this.projectClient, }); } } @@ -492,7 +416,7 @@ export class Pull { } if (options.code !== false) { - await this.downloadDeploymentCode({ + await downloadDeploymentCode({ resourceId: site["$id"], resourcePath: sitePath, holdingVars, @@ -507,6 +431,7 @@ export class Pull { siteId: site["$id"], deploymentId, }), + projectClient: this.projectClient, }); } } diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 940d03f3..d9ea45cc 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -21,7 +21,7 @@ import type { SettingsType } from "./config.js"; import type { ConfigType } from "./config.js"; import { Spinner, SPINNER_DOTS } from "../spinner.js"; import { paginate } from "../paginate.js"; -import { packageDirectory } from "../utils.js"; +import { pushDeployment } from "./utils/deployment.js"; import { questionsPushBuckets, questionsPushTeams, @@ -705,16 +705,23 @@ export class Push { this.projectClient, ); - const codeFile = await packageDirectory(func.path); - response = await functionsServiceDeploy.createDeployment({ - functionId: func["$id"], - entrypoint: func.entrypoint, - commands: func.commands, - code: codeFile, - activate: true, + const result = await pushDeployment({ + resourcePath: func.path, + createDeployment: async (codeFile) => { + return await functionsServiceDeploy.createDeployment({ + functionId: func["$id"], + entrypoint: func.entrypoint, + commands: func.commands, + code: codeFile, + activate: true, + }); + }, + pollForStatus: false, }); + response = result.deployment; updaterRow.update({ status: "Pushed" }); + deploymentCreated = true; successfullyPushed++; } catch (e: any) { @@ -779,7 +786,7 @@ export class Push { ]); if (Number(res.total) === 1) { - url = res.rules[0].domain; + url = `https://${res.rules[0].domain}`; } updaterRow.update({ status: "Deployed", end: url }); @@ -1030,16 +1037,22 @@ export class Push { updaterRow.update({ status: "Pushing" }).replaceSpinner(SPINNER_DOTS); const sitesServiceDeploy = await getSitesService(this.projectClient); - const codeFile = await packageDirectory(site.path); - response = await sitesServiceDeploy.createDeployment({ - siteId: site["$id"], - installCommand: site.installCommand, - buildCommand: site.buildCommand, - outputDirectory: site.outputDirectory, - code: codeFile, - activate: true, + const result = await pushDeployment({ + resourcePath: site.path, + createDeployment: async (codeFile) => { + return await sitesServiceDeploy.createDeployment({ + siteId: site["$id"], + installCommand: site.installCommand, + buildCommand: site.buildCommand, + outputDirectory: site.outputDirectory, + code: codeFile, + activate: true, + }); + }, + pollForStatus: false, }); + response = result.deployment; updaterRow.update({ status: "Pushed" }); deploymentCreated = true; successfullyPushed++; @@ -1105,7 +1118,7 @@ export class Push { ]); if (Number(res.total) === 1) { - url = res.rules[0].domain; + url = `https://${res.rules[0].domain}`; } updaterRow.update({ status: "Deployed", end: url }); diff --git a/lib/commands/utils/deployment.ts b/lib/commands/utils/deployment.ts new file mode 100644 index 00000000..780c3232 --- /dev/null +++ b/lib/commands/utils/deployment.ts @@ -0,0 +1,177 @@ +import fs from "fs"; +import path from "path"; +import tar from "tar"; +import { Client, AppwriteException } from "@appwrite.io/console"; + +const POLL_DEBOUNCE = 2000; // Milliseconds + +/** + * Package a directory into a tar.gz File object for deployment + * @private - Only used internally by pushDeployment + */ +async function packageDirectory(dirPath: string): Promise { + const tempFile = `${dirPath.replace(/[^a-zA-Z0-9]/g, "_")}-${Date.now()}.tar.gz`; + + await tar.create( + { + gzip: true, + file: tempFile, + cwd: dirPath, + }, + ["."], + ); + + const buffer = fs.readFileSync(tempFile); + fs.unlinkSync(tempFile); + + return new File([buffer], path.basename(tempFile), { + type: "application/gzip", + }); +} + +/** + * Download and extract deployment code for a resource + */ +export async function downloadDeploymentCode(params: { + resourceId: string; + resourcePath: string; + holdingVars: { key: string; value: string }[]; + withVariables?: boolean; + listDeployments: () => Promise; + getDownloadUrl: (deploymentId: string) => string; + projectClient: Client; +}): Promise { + const { + resourceId, + resourcePath, + holdingVars, + withVariables, + listDeployments, + getDownloadUrl, + projectClient, + } = params; + + let deploymentId: string | null = null; + try { + const deployments = await listDeployments(); + if (deployments["total"] > 0) { + deploymentId = deployments["deployments"][0]["$id"]; + } + } catch (e: unknown) { + if (e instanceof AppwriteException) { + throw e; + } + } + + if (deploymentId === null) { + return; + } + + const compressedFileName = `${resourceId}-${+new Date()}.tar.gz`; + const downloadUrl = getDownloadUrl(deploymentId); + + const downloadBuffer = await projectClient.call( + "get", + new URL(downloadUrl), + {}, + {}, + "arrayBuffer", + ); + + try { + fs.writeFileSync(compressedFileName, Buffer.from(downloadBuffer as any)); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + throw new Error( + `Failed to write deployment archive to "${compressedFileName}": ${message}`, + ); + } + + tar.extract({ + sync: true, + cwd: resourcePath, + file: compressedFileName, + strict: false, + }); + + fs.rmSync(compressedFileName); + + if (withVariables) { + const envFileLocation = `${resourcePath}/.env`; + try { + fs.rmSync(envFileLocation); + } catch {} + + fs.writeFileSync( + envFileLocation, + holdingVars.map((r) => `${r.key}=${r.value}\n`).join(""), + ); + } +} + +export interface PushDeploymentParams { + resourcePath: string; + createDeployment: (codeFile: File) => Promise; + getDeployment?: (deploymentId: string) => Promise; + pollForStatus?: boolean; + onStatusUpdate?: (status: string) => void; +} + +export interface PushDeploymentResult { + deployment: any; + wasPolled: boolean; + finalStatus?: string; +} + +/** + * Push a deployment for a resource (function or site) + * Handles packaging, creating the deployment, and optionally polling for status + */ +export async function pushDeployment( + params: PushDeploymentParams, +): Promise { + const { + resourcePath, + createDeployment, + getDeployment, + pollForStatus = false, + onStatusUpdate, + } = params; + + // Package the directory + const codeFile = await packageDirectory(resourcePath); + + // Create the deployment + let deployment = await createDeployment(codeFile); + + // Poll for deployment status if requested + let finalStatus: string | undefined; + let wasPolled = false; + + if (pollForStatus && getDeployment) { + wasPolled = true; + const deploymentId = deployment["$id"]; + + while (true) { + deployment = await getDeployment(deploymentId); + const status = deployment["status"]; + + if (onStatusUpdate) { + onStatusUpdate(status); + } + + if (status === "ready" || status === "failed") { + finalStatus = status; + break; + } + + await new Promise((resolve) => setTimeout(resolve, POLL_DEBOUNCE * 1.5)); + } + } + + return { + deployment, + wasPolled, + finalStatus, + }; +} diff --git a/lib/utils.ts b/lib/utils.ts index 608443f5..734c30ef 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -5,7 +5,6 @@ import childProcess from "child_process"; import chalk from "chalk"; import { fetch } from "undici"; import { localConfig, globalConfig } from "./config.js"; -import tar from "tar"; /** * Get the latest version from npm registry @@ -351,26 +350,3 @@ export function isCloud(): boolean { const hostname = new URL(endpoint).hostname; return hostname.endsWith("appwrite.io"); } - -/** - * Package a directory into a tar.gz File object for deployment - */ -export async function packageDirectory(dirPath: string): Promise { - const tempFile = `${dirPath.replace(/[^a-zA-Z0-9]/g, "_")}-${Date.now()}.tar.gz`; - - await tar.create( - { - gzip: true, - file: tempFile, - cwd: dirPath, - }, - ["."], - ); - - const buffer = fs.readFileSync(tempFile); - fs.unlinkSync(tempFile); - - return new File([buffer], path.basename(tempFile), { - type: "application/gzip", - }); -} From 9e3e9edfc410d8abe8af54a2b4914d61726fbc5a Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 09:39:21 +0530 Subject: [PATCH 27/41] use query clas --- lib/commands/push.ts | 55 ++++++++++++++------------------------------ 1 file changed, 17 insertions(+), 38 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index d9ea45cc..ada8fce7 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -62,6 +62,7 @@ import { AuthMethod, AppwriteException, Client, + Query, } from "@appwrite.io/console"; import { checkDeployConditions } from "../utils.js"; import { Pools } from "./utils/pools.js"; @@ -766,24 +767,14 @@ export class Push { const proxyServiceUrl = await getProxyService( this.projectClient, ); - const res = await proxyServiceUrl.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["function"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [func["$id"]], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), - ]); + const res = await proxyServiceUrl.listRules({ + queries: [ + Query.limit(1), + Query.equal("deploymentResourceType", "function"), + Query.equal("deploymentResourceId", func["$id"]), + Query.equal("trigger", "manual"), + ], + }); if (Number(res.total) === 1) { url = `https://${res.rules[0].domain}`; @@ -1099,22 +1090,10 @@ export class Push { this.projectClient, ); const res = await proxyServiceUrl.listRules([ - JSON.stringify({ method: "limit", values: [1] }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceType", - values: ["site"], - }), - JSON.stringify({ - method: "equal", - attribute: "deploymentResourceId", - values: [site["$id"]], - }), - JSON.stringify({ - method: "equal", - attribute: "trigger", - values: ["manual"], - }), + Query.limit(1), + Query.equal("deploymentResourceType", "site"), + Query.equal("deploymentResourceId", site["$id"]), + Query.equal("trigger", "manual"), ]); if (Number(res.total) === 1) { @@ -1186,10 +1165,10 @@ export class Push { tables.map(async (table: any) => { try { const tablesDBService = await getTablesDBService(this.projectClient); - const remoteTable = await tablesDBService.getTable( - table["databaseId"], - table["$id"], - ); + const remoteTable = await tablesDBService.getTable({ + databaseId: table["databaseId"], + tableId: table["$id"], + }); const changes: string[] = []; if (remoteTable.name !== table.name) changes.push("name"); From 9cd53ef39bbedfce3403315f0fc8deac34df013f Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 09:44:00 +0530 Subject: [PATCH 28/41] unused import --- lib/commands/push.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/commands/push.ts b/lib/commands/push.ts index ada8fce7..59d4f9d4 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -1,6 +1,4 @@ import fs from "fs"; -import path from "path"; -import tar from "tar"; import { parse as parseDotenv } from "dotenv"; import chalk from "chalk"; import inquirer from "inquirer"; From c9b92e704db7dc2e8e65d6eb6f99a7f0851f8ccf Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 11:35:36 +0530 Subject: [PATCH 29/41] throw all errors --- lib/commands/utils/deployment.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/commands/utils/deployment.ts b/lib/commands/utils/deployment.ts index 780c3232..e5746c7d 100644 --- a/lib/commands/utils/deployment.ts +++ b/lib/commands/utils/deployment.ts @@ -59,6 +59,9 @@ export async function downloadDeploymentCode(params: { } } catch (e: unknown) { if (e instanceof AppwriteException) { + this.error(e.message); + return; + } else { throw e; } } From 7ab03f190d07930a8b2284e95f1f8eec8c16ba42 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 15:29:31 +0530 Subject: [PATCH 30/41] improve the definitive zod type --- lib/commands/config.ts | 411 ++++++++++++++++++++-------------------- lib/commands/pull.ts | 39 ++-- lib/commands/push.ts | 42 ++-- lib/config.ts | 115 ++++------- lib/emulation/docker.ts | 8 +- lib/types.ts | 248 ++---------------------- lib/utils.ts | 42 ++++ 7 files changed, 347 insertions(+), 558 deletions(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index 25709b47..b05a148b 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -1,97 +1,12 @@ import { z } from "zod"; -import type { ProjectSettings, RawProjectSettings } from "../types.js"; + +// ============================================================================ +// Internal Helpers (not exported) +// ============================================================================ const INT64_MIN = BigInt("-9223372036854775808"); const INT64_MAX = BigInt("9223372036854775807"); -const createSettingsObject = ( - settings: RawProjectSettings, -): ProjectSettings => { - return { - services: { - account: settings.serviceStatusForAccount, - avatars: settings.serviceStatusForAvatars, - databases: settings.serviceStatusForDatabases, - locale: settings.serviceStatusForLocale, - health: settings.serviceStatusForHealth, - storage: settings.serviceStatusForStorage, - teams: settings.serviceStatusForTeams, - users: settings.serviceStatusForUsers, - sites: settings.serviceStatusForSites, - functions: settings.serviceStatusForFunctions, - graphql: settings.serviceStatusForGraphql, - messaging: settings.serviceStatusForMessaging, - }, - auth: { - methods: { - jwt: settings.authJWT, - phone: settings.authPhone, - invites: settings.authInvites, - anonymous: settings.authAnonymous, - "email-otp": settings.authEmailOtp, - "magic-url": settings.authUsersAuthMagicURL, - "email-password": settings.authEmailPassword, - }, - security: { - duration: settings.authDuration, - limit: settings.authLimit, - sessionsLimit: settings.authSessionsLimit, - passwordHistory: settings.authPasswordHistory, - passwordDictionary: settings.authPasswordDictionary, - personalDataCheck: settings.authPersonalDataCheck, - sessionAlerts: settings.authSessionAlerts, - mockNumbers: settings.authMockNumbers, - }, - }, - }; -}; - -const SiteSchema = z - .object({ - path: z.string().optional(), - $id: z.string(), - name: z.string(), - enabled: z.boolean().optional(), - logging: z.boolean().optional(), - timeout: z.number().optional(), - framework: z.string().optional(), - buildRuntime: z.string().optional(), - adapter: z.string().optional(), - installCommand: z.string().optional(), - buildCommand: z.string().optional(), - outputDirectory: z.string().optional(), - fallbackFile: z.string().optional(), - specification: z.string().optional(), - }) - .strict(); - -const FunctionSchema = z - .object({ - path: z.string().optional(), - $id: z.string(), - execute: z.array(z.string()).optional(), - name: z.string(), - enabled: z.boolean().optional(), - logging: z.boolean().optional(), - runtime: z.string().optional(), - specification: z.string().optional(), - scopes: z.array(z.string()).optional(), - events: z.array(z.string()).optional(), - schedule: z.string().optional(), - timeout: z.number().optional(), - entrypoint: z.string().optional(), - commands: z.string().optional(), - }) - .strict(); - -const DatabaseSchema = z - .object({ - $id: z.string(), - name: z.string(), - enabled: z.boolean().optional(), - }) - .strict(); - const int64Schema = z.preprocess( (val) => { if (typeof val === "bigint") { @@ -143,6 +58,153 @@ const int64Schema = z.preprocess( }), ); +const MockNumberSchema = z + .object({ + phone: z.string(), + otp: z.string(), + }) + .strict(); + +// ============================================================================ +// Config Schema +// ============================================================================ + +const ConfigSchema = z + .object({ + projectId: z.string(), + projectName: z.string().optional(), + endpoint: z.string().optional(), + settings: z.lazy(() => SettingsSchema).optional(), + functions: z.array(z.lazy(() => FunctionSchema)).optional(), + sites: z.array(z.lazy(() => SiteSchema)).optional(), + databases: z.array(z.lazy(() => DatabaseSchema)).optional(), + collections: z.array(z.lazy(() => CollectionSchema)).optional(), + tables: z.array(z.lazy(() => TablesDBSchema)).optional(), + topics: z.array(z.lazy(() => TopicSchema)).optional(), + teams: z.array(z.lazy(() => TeamSchema)).optional(), + buckets: z.array(z.lazy(() => BucketSchema)).optional(), + messages: z.array(z.lazy(() => MessageSchema)).optional(), + }) + .strict(); + +// ============================================================================ +// Project Settings +// ============================================================================ + +const SettingsSchema = z + .object({ + services: z + .object({ + account: z.boolean().optional(), + avatars: z.boolean().optional(), + databases: z.boolean().optional(), + locale: z.boolean().optional(), + health: z.boolean().optional(), + storage: z.boolean().optional(), + teams: z.boolean().optional(), + users: z.boolean().optional(), + sites: z.boolean().optional(), + functions: z.boolean().optional(), + graphql: z.boolean().optional(), + messaging: z.boolean().optional(), + }) + .strict() + .optional(), + auth: z + .object({ + methods: z + .object({ + jwt: z.boolean().optional(), + phone: z.boolean().optional(), + invites: z.boolean().optional(), + anonymous: z.boolean().optional(), + "email-otp": z.boolean().optional(), + "magic-url": z.boolean().optional(), + "email-password": z.boolean().optional(), + }) + .strict() + .optional(), + security: z + .object({ + duration: z.number().optional(), + limit: z.number().optional(), + sessionsLimit: z.number().optional(), + passwordHistory: z.number().optional(), + passwordDictionary: z.boolean().optional(), + personalDataCheck: z.boolean().optional(), + sessionAlerts: z.boolean().optional(), + mockNumbers: z.array(MockNumberSchema).optional(), + }) + .strict() + .optional(), + }) + .strict() + .optional(), + }) + .strict(); + +// ============================================================================ +// Functions and Sites +// ============================================================================ + +const SiteSchema = z + .object({ + path: z.string().optional(), + $id: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + logging: z.boolean().optional(), + timeout: z.number().optional(), + framework: z.string().optional(), + buildRuntime: z.string().optional(), + adapter: z.string().optional(), + installCommand: z.string().optional(), + buildCommand: z.string().optional(), + outputDirectory: z.string().optional(), + fallbackFile: z.string().optional(), + specification: z.string().optional(), + vars: z.record(z.string(), z.string()).optional(), + ignore: z.string().optional(), + }) + .strict(); + +const FunctionSchema = z + .object({ + path: z.string().optional(), + $id: z.string(), + execute: z.array(z.string()).optional(), + name: z.string(), + enabled: z.boolean().optional(), + logging: z.boolean().optional(), + runtime: z.string().optional(), + specification: z.string().optional(), + scopes: z.array(z.string()).optional(), + events: z.array(z.string()).optional(), + schedule: z.string().optional(), + timeout: z.number().optional(), + entrypoint: z.string().optional(), + commands: z.string().optional(), + vars: z.record(z.string(), z.string()).optional(), + ignore: z.string().optional(), + }) + .strict(); + +// ============================================================================ +// Databases +// ============================================================================ + +const DatabaseSchema = z + .object({ + $id: z.string(), + name: z.string(), + enabled: z.boolean().optional(), + }) + .strict(); + +// ============================================================================ +// Collections (legacy) +// ============================================================================ + const AttributeSchemaBase = z .object({ key: z.string(), @@ -194,7 +256,6 @@ const AttributeSchema = AttributeSchemaBase.refine( path: ["default"], }, ); -const ColumnSchema = AttributeSchema; const IndexSchema = z .object({ @@ -206,16 +267,6 @@ const IndexSchema = z }) .strict(); -const IndexTableSchema = z - .object({ - key: z.string(), - type: z.string(), - status: z.string().optional(), - columns: z.array(z.string()), - orders: z.array(z.string()).optional(), - }) - .strict(); - const CollectionSchema = z .object({ $id: z.string(), @@ -231,11 +282,9 @@ const CollectionSchema = z .superRefine((data, ctx) => { if (data.attributes && data.attributes.length > 0) { const seenKeys = new Set(); - const duplicateKeys = new Set(); data.attributes.forEach((attr, index) => { if (seenKeys.has(attr.key)) { - duplicateKeys.add(attr.key); ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Attribute with the key '${attr.key}' already exists. Attribute keys must be unique, try again with a different key.`, @@ -249,11 +298,9 @@ const CollectionSchema = z if (data.indexes && data.indexes.length > 0) { const seenKeys = new Set(); - const duplicateKeys = new Set(); data.indexes.forEach((index, indexPos) => { if (seenKeys.has(index.key)) { - duplicateKeys.add(index.key); ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Index with the key '${index.key}' already exists. Index keys must be unique, try again with a different key.`, @@ -266,6 +313,22 @@ const CollectionSchema = z } }); +// ============================================================================ +// Tables +// ============================================================================ + +const ColumnSchema = AttributeSchema; + +const IndexTableSchema = z + .object({ + key: z.string(), + type: z.string(), + status: z.string().optional(), + columns: z.array(z.string()), + orders: z.array(z.string()).optional(), + }) + .strict(); + const TablesDBSchema = z .object({ $id: z.string(), @@ -281,11 +344,9 @@ const TablesDBSchema = z .superRefine((data, ctx) => { if (data.columns && data.columns.length > 0) { const seenKeys = new Set(); - const duplicateKeys = new Set(); data.columns.forEach((col, index) => { if (seenKeys.has(col.key)) { - duplicateKeys.add(col.key); ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Column with the key '${col.key}' already exists. Column keys must be unique, try again with a different key.`, @@ -299,11 +360,9 @@ const TablesDBSchema = z if (data.indexes && data.indexes.length > 0) { const seenKeys = new Set(); - const duplicateKeys = new Set(); data.indexes.forEach((index, indexPos) => { if (seenKeys.has(index.key)) { - duplicateKeys.add(index.key); ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Index with the key '${index.key}' already exists. Index keys must be unique, try again with a different key.`, @@ -316,20 +375,9 @@ const TablesDBSchema = z } }); -const BucketSchema = z - .object({ - $id: z.string(), - $permissions: z.array(z.string()).optional(), - fileSecurity: z.boolean().optional(), - name: z.string(), - enabled: z.boolean().optional(), - maximumFileSize: z.number().optional(), - allowedFileExtensions: z.array(z.string()).optional(), - compression: z.string().optional(), - encryption: z.boolean().optional(), - antivirus: z.boolean().optional(), - }) - .strict(); +// ============================================================================ +// Topics +// ============================================================================ const TopicSchema = z .object({ @@ -339,6 +387,10 @@ const TopicSchema = z }) .strict(); +// ============================================================================ +// Teams +// ============================================================================ + const TeamSchema = z .object({ $id: z.string(), @@ -346,6 +398,10 @@ const TeamSchema = z }) .strict(); +// ============================================================================ +// Messages +// ============================================================================ + const MessageSchema = z .object({ $id: z.string(), @@ -357,99 +413,51 @@ const MessageSchema = z }) .strict(); -const SettingsSchema = z - .object({ - services: z - .object({ - account: z.boolean().optional(), - avatars: z.boolean().optional(), - databases: z.boolean().optional(), - locale: z.boolean().optional(), - health: z.boolean().optional(), - storage: z.boolean().optional(), - teams: z.boolean().optional(), - users: z.boolean().optional(), - sites: z.boolean().optional(), - functions: z.boolean().optional(), - graphql: z.boolean().optional(), - messaging: z.boolean().optional(), - }) - .strict() - .optional(), - auth: z - .object({ - methods: z - .object({ - jwt: z.boolean().optional(), - phone: z.boolean().optional(), - invites: z.boolean().optional(), - anonymous: z.boolean().optional(), - "email-otp": z.boolean().optional(), - "magic-url": z.boolean().optional(), - "email-password": z.boolean().optional(), - }) - .strict() - .optional(), - security: z - .object({ - duration: z.number().optional(), - limit: z.number().optional(), - sessionsLimit: z.number().optional(), - passwordHistory: z.number().optional(), - passwordDictionary: z.boolean().optional(), - personalDataCheck: z.boolean().optional(), - sessionAlerts: z.boolean().optional(), - mockNumbers: z - .array( - z - .object({ - phone: z.string(), - otp: z.string(), - }) - .strict(), - ) - .optional(), - }) - .strict() - .optional(), - }) - .strict() - .optional(), - }) - .strict(); +// ============================================================================ +// Buckets +// ============================================================================ -const configSchema = z +const BucketSchema = z .object({ - projectId: z.string(), - projectName: z.string().optional(), - endpoint: z.string().optional(), - settings: SettingsSchema.optional(), - functions: z.array(FunctionSchema).optional(), - sites: z.array(SiteSchema).optional(), - databases: z.array(DatabaseSchema).optional(), - collections: z.array(CollectionSchema).optional(), - tablesDB: z.array(TablesDBSchema).optional(), - topics: z.array(TopicSchema).optional(), - teams: z.array(TeamSchema).optional(), - buckets: z.array(BucketSchema).optional(), - messages: z.array(MessageSchema).optional(), + $id: z.string(), + $permissions: z.array(z.string()).optional(), + fileSecurity: z.boolean().optional(), + name: z.string(), + enabled: z.boolean().optional(), + maximumFileSize: z.number().optional(), + allowedFileExtensions: z.array(z.string()).optional(), + compression: z.string().optional(), + encryption: z.boolean().optional(), + antivirus: z.boolean().optional(), }) .strict(); -export type ConfigType = z.infer; +// ============================================================================ +// Type Exports (inferred from Zod schemas - single source of truth) +// ============================================================================ + +export type ConfigType = z.infer; export type SettingsType = z.infer; -export type FunctionType = z.infer; export type SiteType = z.infer; +export type FunctionType = z.infer; export type DatabaseType = z.infer; export type CollectionType = z.infer; -export type TablesDBType = z.infer; +export type AttributeType = z.infer; +export type IndexType = z.infer; +export type TableType = z.infer; +export type ColumnType = z.infer; +export type TableIndexType = z.infer; export type TopicType = z.infer; export type TeamType = z.infer; export type MessageType = z.infer; export type BucketType = z.infer; +// ============================================================================ +// Schema Exports +// ============================================================================ + export { - configSchema, + ConfigSchema, /** Project Settings */ SettingsSchema, @@ -482,7 +490,4 @@ export { /** Buckets */ BucketSchema, - - /** Helper functions */ - createSettingsObject, }; diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index feb55ed8..8b234ad5 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -43,14 +43,9 @@ import { commandDescriptions, } from "../parser.js"; import type { ConfigType } from "./config.js"; -import { createSettingsObject } from "./config.js"; +import { createSettingsObject } from "../utils.js"; import { ProjectNotInitializedError } from "./errors.js"; -import type { - ProjectSettings, - RawProjectSettings, - FunctionConfig, - SiteConfig, -} from "../types.js"; +import type { SettingsType, FunctionType, SiteType } from "../types.js"; import { downloadDeploymentCode } from "./utils/deployment.js"; export interface PullOptions { @@ -85,8 +80,8 @@ interface PullResourcesOptions { export interface PullSettingsResult { projectName: string; - settings: ProjectSettings; - rawSettings: RawProjectSettings; + settings: SettingsType; + project: Models.Project; } async function createPullInstance(): Promise { @@ -188,7 +183,7 @@ export class Pull { if (shouldPullAll || options.tables) { const { databases, tables } = await this.pullTables(); updatedConfig.databases = databases; - updatedConfig.tablesDB = tables; + updatedConfig.tables = tables; } if (options.collections) { @@ -222,14 +217,14 @@ export class Pull { this.log("Pulling project settings ..."); const projectsService = new Projects(this.consoleClient); - const rawSettings = await projectsService.get({ projectId: projectId }); + const project = await projectsService.get({ projectId: projectId }); this.success(`Successfully pulled ${chalk.bold("all")} project settings.`); return { - projectName: rawSettings.name, - settings: createSettingsObject(rawSettings), - rawSettings, + projectName: project.name, + settings: createSettingsObject(project), + project, }; } @@ -238,7 +233,7 @@ export class Pull { */ public async pullFunctions( options: PullFunctionsOptions = {}, - ): Promise { + ): Promise { this.log("Fetching functions ..."); const originalCwd = process.cwd(); @@ -276,7 +271,7 @@ export class Pull { functions = allFunctions; } - const result: FunctionConfig[] = []; + const result: FunctionType[] = []; for (const func of functions) { this.log(`Pulling function ${chalk.bold(func.name)} ...`); @@ -284,7 +279,7 @@ export class Pull { const funcPath = `functions/${func.name}`; const holdingVars = func.vars || []; - const functionConfig: FunctionConfig = { + const functionConfig: FunctionType = { $id: func.$id, name: func.name, runtime: func.runtime, @@ -344,9 +339,7 @@ export class Pull { /** * Pull sites from the project */ - public async pullSites( - options: PullSitesOptions = {}, - ): Promise { + public async pullSites(options: PullSitesOptions = {}): Promise { this.log("Fetching sites ..."); const originalCwd = process.cwd(); @@ -384,7 +377,7 @@ export class Pull { sites = fetchedSites; } - const result: SiteConfig[] = []; + const result: SiteType[] = []; for (const site of sites) { this.log(`Pulling site ${chalk.bold(site.name)} ...`); @@ -392,7 +385,7 @@ export class Pull { const sitePath = `sites/${site.name}`; const holdingVars = site.vars || []; - const siteConfig: SiteConfig = { + const siteConfig: SiteType = { $id: site.$id, name: site.name, path: sitePath, @@ -730,7 +723,7 @@ const pullSettings = async (): Promise => { const projectId = localConfig.getProject().projectId; const settings = await pullInstance.pullSettings(projectId); - localConfig.setProject(projectId, settings.projectName, settings.rawSettings); + localConfig.setProject(projectId, settings.projectName, settings.project); }; const pullFunctions = async ({ diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 59d4f9d4..ec8a6b78 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -15,8 +15,8 @@ import { KeysCollection, KeysTable, } from "../config.js"; -import type { SettingsType } from "./config.js"; -import type { ConfigType } from "./config.js"; +import type { SettingsType, ConfigType } from "./config.js"; +import { createSettingsObject } from "../utils.js"; import { Spinner, SPINNER_DOTS } from "../spinner.js"; import { paginate } from "../paginate.js"; import { pushDeployment } from "./utils/deployment.js"; @@ -225,10 +225,10 @@ export class Push { } // Push tables - if (config.tablesDB && config.tablesDB.length > 0) { + if (config.tables && config.tables.length > 0) { try { log("Pushing tables ..."); - const result = await this.pushTables(config.tablesDB, options.attempts); + const result = await this.pushTables(config.tables, options.attempts); results.tables = result; allErrors.push(...result.errors); } catch (e: any) { @@ -1162,8 +1162,8 @@ export class Push { await Promise.all( tables.map(async (table: any) => { try { - const tablesDBService = await getTablesDBService(this.projectClient); - const remoteTable = await tablesDBService.getTable({ + const tablesService = await getTablesDBService(this.projectClient); + const remoteTable = await tablesService.getTable({ databaseId: table["databaseId"], tableId: table["$id"], }); @@ -1180,7 +1180,7 @@ export class Push { changes.push("permissions"); if (changes.length > 0) { - await tablesDBService.updateTable( + await tablesService.updateTable( table["databaseId"], table["$id"], table.name, @@ -1201,10 +1201,8 @@ export class Push { log( `Table ${table.name} does not exist in the project. Creating ... `, ); - const tablesDBService = await getTablesDBService( - this.projectClient, - ); - await tablesDBService.createTable( + const tablesService = await getTablesDBService(this.projectClient); + await tablesService.createTable( table["databaseId"], table["$id"], table.name, @@ -1486,7 +1484,7 @@ const pushSettings = async (): Promise => { localConfig.getProject().projectId, ); - const remoteSettings = localConfig.createSettingsObject(response ?? {}); + const remoteSettings = createSettingsObject(response); const localSettings = localConfig.getProject().projectSettings ?? {}; log("Checking for changes ..."); @@ -1806,13 +1804,13 @@ const pushTable = async ({ const { resyncNeeded } = await checkAndApplyTablesDBChanges(); if (resyncNeeded) { - log("Resyncing configuration due to tablesDB deletions ..."); + log("Resyncing configuration due to tables deletions ..."); const remoteTablesDBs = ( await paginate( async (args: any) => { - const tablesDBService = await getTablesDBService(); - return await tablesDBService.list(args.queries || []); + const tablesService = await getTablesDBService(); + return await tablesService.list(args.queries || []); }, {}, 100, @@ -1832,7 +1830,7 @@ const pushTable = async ({ const validTablesDBs = localTablesDBs.filter((db: any) => remoteDatabaseIds.has(db.$id), ); - localConfig.set("tablesDB", validTablesDBs); + localConfig.set("tables", validTablesDBs); success("Configuration resynced successfully."); console.log(); @@ -1847,8 +1845,8 @@ const pushTable = async ({ try { const { tables: remoteTables } = await paginate( async (args: any) => { - const tablesDBService = await getTablesDBService(); - return await tablesDBService.listTables( + const tablesService = await getTablesDBService(); + return await tablesService.listTables( args.databaseId, args.queries || [], ); @@ -1895,8 +1893,8 @@ const pushTable = async ({ log( `Deleting table ${table.name} ( ${table.$id} ) from database ${table.databaseName} ...`, ); - const tablesDBService = await getTablesDBService(); - await tablesDBService.deleteTable(table.databaseId, table.$id); + const tablesService = await getTablesDBService(); + await tablesService.deleteTable(table.databaseId, table.$id); success(`Deleted ${table.name} ( ${table.$id} )`); } catch (e: any) { error( @@ -1936,8 +1934,8 @@ const pushTable = async ({ !(await approveChanges( tables, async (args: any) => { - const tablesDBService = await getTablesDBService(); - return await tablesDBService.getTable(args.databaseId, args.tableId); + const tablesService = await getTablesDBService(); + return await tablesService.getTable(args.databaseId, args.tableId); }, KeysTable, "tableId", diff --git a/lib/config.ts b/lib/config.ts index b2bf54b7..026f24d4 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -3,22 +3,23 @@ import fs from "fs"; import _path from "path"; import process from "process"; import JSONbig from "json-bigint"; +import type { Models } from "@appwrite.io/console"; import type { - BucketConfig, - CollectionConfig, + BucketType, + CollectionType, ConfigData, Entity, - FunctionConfig, + FunctionType, GlobalConfigData, - ProjectConfigData, - ProjectSettings, - RawProjectSettings, + ConfigType, + SettingsType, SessionData, - SiteConfig, - TableConfig, - TeamConfig, - TopicConfig, + SiteType, + TableType, + TeamType, + TopicType, } from "./types.js"; +import { createSettingsObject } from "./utils.js"; const JSONBigInt = JSONbig({ storeAsString: false }); @@ -302,7 +303,7 @@ class Config { } } -class Local extends Config { +class Local extends Config { static CONFIG_FILE_PATH = "appwrite.config.json"; static CONFIG_FILE_PATH_LEGACY = "appwrite.json"; configDirectoryPath = ""; @@ -347,21 +348,21 @@ class Local extends Config { } getEndpoint(): string { - return (this.get("endpoint" as keyof ProjectConfigData) as string) || ""; + return (this.get("endpoint" as keyof ConfigType) as string) || ""; } setEndpoint(endpoint: string): void { this.set("endpoint" as any, endpoint); } - getSites(): SiteConfig[] { + getSites(): SiteType[] { if (!this.has("sites")) { return []; } return this.get("sites") ?? []; } - getSite($id: string): SiteConfig | Record { + getSite($id: string): SiteType | Record { if (!this.has("sites")) { return {}; } @@ -376,7 +377,7 @@ class Local extends Config { return {}; } - addSite(props: SiteConfig): void { + addSite(props: SiteType): void { props = whitelistKeys(props, KeysSite, { vars: KeysVars, }); @@ -401,14 +402,14 @@ class Local extends Config { this.set("sites", sites); } - getFunctions(): FunctionConfig[] { + getFunctions(): FunctionType[] { if (!this.has("functions")) { return []; } return this.get("functions") ?? []; } - getFunction($id: string): FunctionConfig | Record { + getFunction($id: string): FunctionType | Record { if (!this.has("functions")) { return {}; } @@ -423,7 +424,7 @@ class Local extends Config { return {}; } - addFunction(props: FunctionConfig): void { + addFunction(props: FunctionType): void { props = whitelistKeys(props, KeysFunction, { vars: KeysVars, }); @@ -448,14 +449,14 @@ class Local extends Config { this.set("functions", functions); } - getCollections(): CollectionConfig[] { + getCollections(): CollectionType[] { if (!this.has("collections")) { return []; } return this.get("collections") ?? []; } - getCollection($id: string): CollectionConfig | Record { + getCollection($id: string): CollectionType | Record { if (!this.has("collections")) { return {}; } @@ -470,7 +471,7 @@ class Local extends Config { return {}; } - addCollection(props: CollectionConfig): void { + addCollection(props: CollectionType): void { props = whitelistKeys(props, KeysCollection, { attributes: KeysAttributes, indexes: KeyIndexes, @@ -495,14 +496,14 @@ class Local extends Config { this.set("collections", collections); } - getTables(): TableConfig[] { + getTables(): TableType[] { if (!this.has("tables")) { return []; } return this.get("tables") ?? []; } - getTable($id: string): TableConfig | Record { + getTable($id: string): TableType | Record { if (!this.has("tables")) { return {}; } @@ -517,7 +518,7 @@ class Local extends Config { return {}; } - addTable(props: TableConfig): void { + addTable(props: TableType): void { props = whitelistKeys(props, KeysTable, { columns: KeysColumns, indexes: KeyIndexesColumns, @@ -542,14 +543,14 @@ class Local extends Config { this.set("tables", tables); } - getBuckets(): BucketConfig[] { + getBuckets(): BucketType[] { if (!this.has("buckets")) { return []; } return this.get("buckets") ?? []; } - getBucket($id: string): BucketConfig | Record { + getBucket($id: string): BucketType | Record { if (!this.has("buckets")) { return {}; } @@ -564,7 +565,7 @@ class Local extends Config { return {}; } - addBucket(props: BucketConfig): void { + addBucket(props: BucketType): void { props = whitelistKeys(props, KeysStorage); if (!this.has("buckets")) { @@ -583,14 +584,14 @@ class Local extends Config { this.set("buckets", buckets); } - getMessagingTopics(): TopicConfig[] { + getMessagingTopics(): TopicType[] { if (!this.has("topics")) { return []; } return this.get("topics") ?? []; } - getMessagingTopic($id: string): TopicConfig | Record { + getMessagingTopic($id: string): TopicType | Record { if (!this.has("topics")) { return {}; } @@ -605,7 +606,7 @@ class Local extends Config { return {}; } - addMessagingTopic(props: TopicConfig): void { + addMessagingTopic(props: TopicType): void { props = whitelistKeys(props, KeysTopics); if (!this.has("topics")) { @@ -648,14 +649,14 @@ class Local extends Config { this._addDBEntity("databases", props, KeysDatabase); } - getTeams(): TeamConfig[] { + getTeams(): TeamType[] { if (!this.has("teams")) { return []; } return this.get("teams") ?? []; } - getTeam($id: string): TeamConfig | Record { + getTeam($id: string): TeamType | Record { if (!this.has("teams")) { return {}; } @@ -670,7 +671,7 @@ class Local extends Config { return {}; } - addTeam(props: TeamConfig): void { + addTeam(props: TeamType): void { props = whitelistKeys(props, KeysTeams); if (!this.has("teams")) { this.set("teams", []); @@ -691,7 +692,7 @@ class Local extends Config { getProject(): { projectId?: string; projectName?: string; - projectSettings?: ProjectSettings; + projectSettings?: SettingsType; } { if (!this.has("projectId")) { return {}; @@ -707,7 +708,7 @@ class Local extends Config { setProject( projectId: string, projectName: string = "", - projectSettings?: RawProjectSettings, + project?: Models.Project, ): void { this.set("projectId", projectId); @@ -715,51 +716,11 @@ class Local extends Config { this.set("projectName", projectName); } - if (projectSettings === undefined) { + if (project === undefined) { return; } - this.set("settings", this.createSettingsObject(projectSettings)); - } - - createSettingsObject(projectSettings: RawProjectSettings): ProjectSettings { - return { - services: { - account: projectSettings.serviceStatusForAccount, - avatars: projectSettings.serviceStatusForAvatars, - databases: projectSettings.serviceStatusForDatabases, - locale: projectSettings.serviceStatusForLocale, - health: projectSettings.serviceStatusForHealth, - storage: projectSettings.serviceStatusForStorage, - teams: projectSettings.serviceStatusForTeams, - users: projectSettings.serviceStatusForUsers, - sites: projectSettings.serviceStatusForSites, - functions: projectSettings.serviceStatusForFunctions, - graphql: projectSettings.serviceStatusForGraphql, - messaging: projectSettings.serviceStatusForMessaging, - }, - auth: { - methods: { - jwt: projectSettings.authJWT, - phone: projectSettings.authPhone, - invites: projectSettings.authInvites, - anonymous: projectSettings.authAnonymous, - "email-otp": projectSettings.authEmailOtp, - "magic-url": projectSettings.authUsersAuthMagicURL, - "email-password": projectSettings.authEmailPassword, - }, - security: { - duration: projectSettings.authDuration, - limit: projectSettings.authLimit, - sessionsLimit: projectSettings.authSessionsLimit, - passwordHistory: projectSettings.authPasswordHistory, - passwordDictionary: projectSettings.authPasswordDictionary, - personalDataCheck: projectSettings.authPersonalDataCheck, - sessionAlerts: projectSettings.authSessionAlerts, - mockNumbers: projectSettings.authMockNumbers, - }, - }, - }; + this.set("settings", createSettingsObject(project)); } } diff --git a/lib/emulation/docker.ts b/lib/emulation/docker.ts index cb6cf296..6a5bab66 100644 --- a/lib/emulation/docker.ts +++ b/lib/emulation/docker.ts @@ -10,7 +10,7 @@ import fs from "fs"; import { log, error, success } from "../parser.js"; import { openRuntimesVersion, systemTools, Queue } from "./utils.js"; import { getAllFiles } from "../utils.js"; -import type { FunctionConfig } from "../types.js"; +import type { FunctionType } from "../types.js"; export async function dockerStop(id: string): Promise { const stopProcess = childProcess.spawn("docker", ["rm", "--force", id], { @@ -26,7 +26,7 @@ export async function dockerStop(id: string): Promise { }); } -export async function dockerPull(func: FunctionConfig): Promise { +export async function dockerPull(func: FunctionType): Promise { const runtimeChunks = func.runtime.split("-"); const runtimeVersion = runtimeChunks.pop(); const runtimeName = runtimeChunks.join("-"); @@ -48,7 +48,7 @@ export async function dockerPull(func: FunctionConfig): Promise { } export async function dockerBuild( - func: FunctionConfig, + func: FunctionType, variables: Record, ): Promise { const runtimeChunks = func.runtime.split("-"); @@ -182,7 +182,7 @@ export async function dockerBuild( } export async function dockerStart( - func: FunctionConfig, + func: FunctionType, variables: Record, port: number, ): Promise { diff --git a/lib/types.ts b/lib/types.ts index 4ef751a5..a133a549 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -1,6 +1,24 @@ import type { File } from "undici"; import type { ReadableStream } from "node:stream/web"; -import type { Models } from "@appwrite.io/console"; + +// Re-export config types from the Zod schema source of truth +export type { + ConfigType, + SettingsType, + FunctionType, + SiteType, + DatabaseType, + CollectionType, + TableType, + TopicType, + TeamType, + MessageType, + BucketType, + AttributeType, + IndexType, + ColumnType, + TableIndexType, +} from "./commands/config.js"; export type ResponseType = "json" | "arraybuffer"; @@ -74,231 +92,3 @@ export interface GlobalConfigData extends ConfigData { current: string; cookie?: string; } - -export interface ProjectSettings { - services?: { - account?: boolean; - avatars?: boolean; - databases?: boolean; - locale?: boolean; - health?: boolean; - storage?: boolean; - teams?: boolean; - users?: boolean; - sites?: boolean; - functions?: boolean; - graphql?: boolean; - messaging?: boolean; - }; - auth?: { - methods?: { - jwt?: boolean; - phone?: boolean; - invites?: boolean; - anonymous?: boolean; - "email-otp"?: boolean; - "magic-url"?: boolean; - "email-password"?: boolean; - }; - security?: { - duration?: number; - limit?: number; - sessionsLimit?: number; - passwordHistory?: number; - passwordDictionary?: boolean; - personalDataCheck?: boolean; - sessionAlerts?: boolean; - mockNumbers?: Models.MockNumber[]; - }; - }; -} - -export interface RawProjectSettings { - serviceStatusForAccount?: boolean; - serviceStatusForAvatars?: boolean; - serviceStatusForDatabases?: boolean; - serviceStatusForLocale?: boolean; - serviceStatusForHealth?: boolean; - serviceStatusForStorage?: boolean; - serviceStatusForTeams?: boolean; - serviceStatusForUsers?: boolean; - serviceStatusForSites?: boolean; - serviceStatusForFunctions?: boolean; - serviceStatusForGraphql?: boolean; - serviceStatusForMessaging?: boolean; - authJWT?: boolean; - authPhone?: boolean; - authInvites?: boolean; - authAnonymous?: boolean; - authEmailOtp?: boolean; - authUsersAuthMagicURL?: boolean; - authEmailPassword?: boolean; - authDuration?: number; - authLimit?: number; - authSessionsLimit?: number; - authPasswordHistory?: number; - authPasswordDictionary?: boolean; - authPersonalDataCheck?: boolean; - authSessionAlerts?: boolean; - authMockNumbers?: Models.MockNumber[]; -} - -export interface DatabaseConfig { - $id: string; - name: string; - enabled?: boolean; -} - -export interface AttributeConfig { - key: string; - type: string; - required?: boolean; - array?: boolean; - size?: number; - default?: unknown; - min?: number; - max?: number; - format?: string; - elements?: string[]; - relatedCollection?: string; - relationType?: string; - twoWay?: boolean; - twoWayKey?: string; - onDelete?: string; - side?: string; - encrypt?: boolean; -} - -export interface IndexConfig { - key: string; - type: string; - status?: string; - attributes?: string[]; - orders?: string[]; -} - -export interface CollectionConfig { - $id: string; - $permissions?: string[]; - databaseId: string; - name: string; - enabled?: boolean; - documentSecurity?: boolean; - attributes?: AttributeConfig[]; - indexes?: IndexConfig[]; -} - -export interface ColumnConfig { - key: string; - type: string; - required?: boolean; - array?: boolean; - size?: number; - default?: unknown; - min?: number; - max?: number; - format?: string; - elements?: string[]; - relatedTable?: string; - relationType?: string; - twoWay?: boolean; - twoWayKey?: string; - onDelete?: string; - side?: string; - encrypt?: boolean; -} - -export interface TableIndexConfig { - key: string; - type: string; - status?: string; - columns?: string[]; - orders?: string[]; -} - -export interface TableConfig { - $id: string; - $permissions?: string[]; - databaseId: string; - name: string; - enabled?: boolean; - rowSecurity?: boolean; - columns?: ColumnConfig[]; - indexes?: TableIndexConfig[]; -} - -export interface BucketConfig { - $id: string; - $permissions?: string[]; - name: string; - enabled?: boolean; - fileSecurity?: boolean; - maximumFileSize?: number; - allowedFileExtensions?: string[]; - compression?: string; - encryption?: boolean; - antivirus?: boolean; -} - -export interface FunctionConfig { - $id: string; - name: string; - runtime: string; - path: string; - entrypoint: string; - execute?: string[]; - enabled?: boolean; - logging?: boolean; - events?: string[]; - schedule?: string; - timeout?: number; - vars?: Record; - commands?: string; - scopes?: string[]; - specification?: string; - ignore?: string; -} - -export interface SiteConfig { - $id: string; - name: string; - path: string; - enabled?: boolean; - logging?: boolean; - timeout?: number; - framework: string; - buildRuntime?: string; - adapter?: string; - installCommand?: string; - buildCommand?: string; - outputDirectory?: string; - fallbackFile?: string; - specification?: string; - vars?: Record; - ignore?: string; -} - -export interface TeamConfig { - $id: string; - name: string; -} - -export interface TopicConfig { - $id: string; - name: string; - subscribe?: string[]; -} - -export interface ProjectConfigData extends ConfigData { - projectId?: string; - projectName?: string; - settings?: ProjectSettings; - functions?: FunctionConfig[]; - collections?: CollectionConfig[]; - databases?: DatabaseConfig[]; - buckets?: BucketConfig[]; - teams?: TeamConfig[]; - topics?: TopicConfig[]; - sites?: SiteConfig[]; - tables?: TableConfig[]; -} diff --git a/lib/utils.ts b/lib/utils.ts index 734c30ef..670e58c1 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -4,7 +4,49 @@ import net from "net"; import childProcess from "child_process"; import chalk from "chalk"; import { fetch } from "undici"; +import type { Models } from "@appwrite.io/console"; import { localConfig, globalConfig } from "./config.js"; +import type { SettingsType } from "./commands/config.js"; + +export const createSettingsObject = (project: Models.Project): SettingsType => { + return { + services: { + account: project.serviceStatusForAccount, + avatars: project.serviceStatusForAvatars, + databases: project.serviceStatusForDatabases, + locale: project.serviceStatusForLocale, + health: project.serviceStatusForHealth, + storage: project.serviceStatusForStorage, + teams: project.serviceStatusForTeams, + users: project.serviceStatusForUsers, + sites: project.serviceStatusForSites, + functions: project.serviceStatusForFunctions, + graphql: project.serviceStatusForGraphql, + messaging: project.serviceStatusForMessaging, + }, + auth: { + methods: { + jwt: project.authJWT, + phone: project.authPhone, + invites: project.authInvites, + anonymous: project.authAnonymous, + "email-otp": project.authEmailOtp, + "magic-url": project.authUsersAuthMagicURL, + "email-password": project.authEmailPassword, + }, + security: { + duration: project.authDuration, + limit: project.authLimit, + sessionsLimit: project.authSessionsLimit, + passwordHistory: project.authPasswordHistory, + passwordDictionary: project.authPasswordDictionary, + personalDataCheck: project.authPersonalDataCheck, + sessionAlerts: project.authSessionAlerts, + mockNumbers: project.authMockNumbers, + }, + }, + }; +}; /** * Get the latest version from npm registry From 23164ae1de4b131187de423c0e10776a455ae621 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 15:30:35 +0530 Subject: [PATCH 31/41] fix exports --- lib/types.ts | 37 ++++++++++++++++++------------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/lib/types.ts b/lib/types.ts index a133a549..802be639 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -1,25 +1,6 @@ import type { File } from "undici"; import type { ReadableStream } from "node:stream/web"; -// Re-export config types from the Zod schema source of truth -export type { - ConfigType, - SettingsType, - FunctionType, - SiteType, - DatabaseType, - CollectionType, - TableType, - TopicType, - TeamType, - MessageType, - BucketType, - AttributeType, - IndexType, - ColumnType, - TableIndexType, -} from "./commands/config.js"; - export type ResponseType = "json" | "arraybuffer"; export interface Headers { @@ -92,3 +73,21 @@ export interface GlobalConfigData extends ConfigData { current: string; cookie?: string; } + +export type { + ConfigType, + SettingsType, + FunctionType, + SiteType, + DatabaseType, + CollectionType, + TableType, + TopicType, + TeamType, + MessageType, + BucketType, + AttributeType, + IndexType, + ColumnType, + TableIndexType, +} from "./commands/config.js"; \ No newline at end of file From cd12942d26d0b3154fd91dad010417d23da31a2f Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 15:31:55 +0530 Subject: [PATCH 32/41] formatting --- lib/types.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/types.ts b/lib/types.ts index 802be639..7d654bce 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -90,4 +90,4 @@ export type { IndexType, ColumnType, TableIndexType, -} from "./commands/config.js"; \ No newline at end of file +} from "./commands/config.js"; From 957d233c19c2e6abbee81918199e0c8dc629efa6 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 15:39:48 +0530 Subject: [PATCH 33/41] fix exports --- lib/commands/config.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/commands/config.ts b/lib/commands/config.ts index b05a148b..1249130b 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -442,10 +442,10 @@ export type SiteType = z.infer; export type FunctionType = z.infer; export type DatabaseType = z.infer; export type CollectionType = z.infer; -export type AttributeType = z.infer; +export type AttributeType = z.infer; export type IndexType = z.infer; export type TableType = z.infer; -export type ColumnType = z.infer; +export type ColumnType = z.infer; export type TableIndexType = z.infer; export type TopicType = z.infer; export type TeamType = z.infer; From 1ae4c27133cb83e049dfe0837ced39a4e35281db Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 16:54:49 +0530 Subject: [PATCH 34/41] schema validation --- index.ts | 5 + lib/commands/push.ts | 51 ++-- lib/commands/schema.ts | 100 ++++++ lib/commands/utils/error-formatter.ts | 417 ++++++++++++++++++++++++++ 4 files changed, 543 insertions(+), 30 deletions(-) create mode 100644 lib/commands/schema.ts create mode 100644 lib/commands/utils/error-formatter.ts diff --git a/index.ts b/index.ts index 108957b8..09dcbf4a 100644 --- a/index.ts +++ b/index.ts @@ -46,6 +46,9 @@ import { tokens } from "./lib/commands/services/tokens.js"; import { users } from "./lib/commands/services/users.js"; import { vcs } from "./lib/commands/services/vcs.js"; import searchList from "inquirer-search-list"; +import { Push } from "./lib/commands/push.js"; +import { Pull } from "./lib/commands/pull.js"; +import { Schema } from "./lib/commands/schema.js"; inquirer.registerPrompt("search-list", searchList); @@ -166,3 +169,5 @@ if (process.argv.includes("-v") || process.argv.includes("--version")) { process.stdout.columns = oldWidth; } + +export { Schema, Push, Pull }; diff --git a/lib/commands/push.ts b/lib/commands/push.ts index ec8a6b78..1e080e34 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -75,8 +75,18 @@ import { checkAndApplyTablesDBChanges } from "./utils/database-sync.js"; const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; -interface PushResourcesOptions { +export interface PushOptions { skipDeprecated?: boolean; + functionOptions?: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + }; + siteOptions?: { + async?: boolean; + code?: boolean; + withVariables?: boolean; + }; } interface PushSiteOptions { @@ -108,20 +118,7 @@ export class Push { public async pushResources( config: ConfigType, - options: { - skipDeprecated?: boolean; - functionOptions?: { - async?: boolean; - code?: boolean; - withVariables?: boolean; - }; - siteOptions?: { - async?: boolean; - code?: boolean; - withVariables?: boolean; - }; - attempts?: number; - } = { skipDeprecated: true }, + options: PushOptions = { skipDeprecated: true }, ): Promise<{ results: Record; errors: any[]; @@ -228,7 +225,7 @@ export class Push { if (config.tables && config.tables.length > 0) { try { log("Pushing tables ..."); - const result = await this.pushTables(config.tables, options.attempts); + const result = await this.pushTables(config.tables); results.tables = result; allErrors.push(...result.errors); } catch (e: any) { @@ -257,10 +254,7 @@ export class Push { }; }, ); - const result = await this.pushCollections( - collectionsWithDbNames, - options.attempts, - ); + const result = await this.pushCollections(collectionsWithDbNames); results.collections = result; allErrors.push(...result.errors); } catch (e: any) { @@ -1275,14 +1269,11 @@ export class Push { }; } - public async pushCollections( - collections: any[], - attempts?: number, - ): Promise<{ + public async pushCollections(collections: any[]): Promise<{ successfullyPushed: number; errors: any[]; }> { - const pools = new Pools(attempts ?? POLL_DEFAULT_VALUE); + const pools = new Pools(POLL_DEFAULT_VALUE); const attributes = new Attributes(pools); const errors: any[] = []; @@ -1438,7 +1429,9 @@ async function createPushInstance(): Promise { const pushResources = async ({ skipDeprecated = false, -}: PushResourcesOptions = {}): Promise => { +}: { + skipDeprecated?: boolean; +} = {}): Promise => { if (cliConfig.all) { checkDeployConditions(localConfig); @@ -1966,9 +1959,7 @@ const pushTable = async ({ } }; -const pushCollection = async ({ - attempts, -}: PushTableOptions = {}): Promise => { +const pushCollection = async ({}: PushTableOptions = {}): Promise => { warn( "appwrite push collection has been deprecated. Please consider using 'appwrite push tables' instead", ); @@ -2029,7 +2020,7 @@ const pushCollection = async ({ log("Pushing collections ..."); const pushInstance = await createPushInstance(); - const result = await pushInstance.pushCollections(collections, attempts); + const result = await pushInstance.pushCollections(collections); const { successfullyPushed, errors } = result; diff --git a/lib/commands/schema.ts b/lib/commands/schema.ts new file mode 100644 index 00000000..3a4adfc7 --- /dev/null +++ b/lib/commands/schema.ts @@ -0,0 +1,100 @@ +import { Client } from "@appwrite.io/console"; +import type { ConfigType } from "./config.js"; +import { ConfigSchema } from "./config.js"; +import { Pull, PullOptions } from "./pull.js"; +import { Push, PushOptions } from "./push.js"; +import { parseWithBetterErrors } from "./utils/error-formatter.js"; +import JSONbig from "json-bigint"; +import * as fs from "fs"; + +const JSONBig = JSONbig({ storeAsString: false }); + +export class Schema { + private pullCommand: Pull; + private pushCommand: Push; + + constructor({ + projectClient, + consoleClient, + }: { + projectClient: Client; + consoleClient: Client; + }) { + this.pullCommand = new Pull(projectClient, consoleClient); + this.pushCommand = new Push(projectClient, consoleClient); + } + + /** + * Validates the provided configuration object against the schema. + * + * @param config - The configuration object to validate. + * @returns The validated and possibly transformed configuration object. + * @throws If the configuration does not match the schema. + */ + public validate(config: ConfigType): ConfigType { + return parseWithBetterErrors( + ConfigSchema, + config, + "Configuration schema validation", + config, + ); + } + + /** + * Pulls the current schema and resources from the remote Appwrite project. + * + * @param config - The local configuration object. + * @param options - Optional settings for the pull operation. + * @returns A Promise that resolves to the updated configuration object reflecting the remote state. + */ + public async pull( + config: ConfigType, + options: PullOptions = { all: true }, + ): Promise { + return await this.pullCommand.pullResources(config, options); + } + + /** + * Pushes the local configuration and schema to the remote Appwrite project. + * Optionally syncs the config file by pulling the updated state from the server after push. + * + * @param config - The local configuration object to push. + * @param options - Optional settings for the push operation. Use `force: true` to allow destructive changes. + * @param configPath - Optional path to the config file. If provided, the config will be synced after push. + * @returns A Promise that resolves when the push operation is complete. + * @throws {DestructiveChangeError} When destructive changes are detected and force is not enabled. + */ + public async push( + config: ConfigType, + options: PushOptions, + configPath?: string, + ): Promise { + await this.pushCommand.pushResources(config, options); + + if (configPath) { + const updatedConfig = await this.pullCommand.pullResources(config); + this.write(updatedConfig, configPath); + } + } + + /** + * Reads the configuration object from a file. + * + * @param path - The path to the file to read. + * @returns The configuration object. + */ + public read(path: string): ConfigType { + return JSONBig.parse(fs.readFileSync(path, "utf8")) as ConfigType; + } + + /** + * Writes the configuration object to a file. + * + * @param config - The configuration object to write. + * @param path - The path to the file to write. + * @returns void + */ + public write(config: ConfigType, path: string): void { + fs.writeFileSync(path, JSONBig.stringify(config, null, 4)); + } +} diff --git a/lib/commands/utils/error-formatter.ts b/lib/commands/utils/error-formatter.ts new file mode 100644 index 00000000..fae21ec4 --- /dev/null +++ b/lib/commands/utils/error-formatter.ts @@ -0,0 +1,417 @@ +import { ZodError, ZodIssue, z } from "zod"; +import type { + $ZodIssueInvalidType, + $ZodIssueTooBig, + $ZodIssueTooSmall, + $ZodIssueUnrecognizedKeys, + $ZodIssueNotMultipleOf, + $ZodIssueInvalidStringFormat, +} from "zod/v4/core"; + +/** + * Formats a Zod validation error into a human-readable message + */ +export class ZodErrorFormatter { + static formatError(error: ZodError, contextData?: any): string { + const issues = error.issues; + + if (issues.length === 1) { + return this.formatIssue(issues[0], contextData); + } + + const messages = issues.map( + (issue) => `• ${this.formatIssue(issue, contextData)}`, + ); + return `Found ${issues.length} validation errors:\n\n${messages.join("\n\n")}`; + } + + private static formatIssue(issue: ZodIssue, contextData?: any): string { + const location = this.formatPath(issue.path, contextData); + const locationText = location ? ` at ${location}` : ""; + + switch (issue.code) { + case "unrecognized_keys": { + const unrecognizedIssue = issue as $ZodIssueUnrecognizedKeys; + const keys = unrecognizedIssue.keys.map((key) => `"${key}"`).join(", "); + const propertyWord = + unrecognizedIssue.keys.length === 1 ? "property" : "properties"; + return `Unexpected ${propertyWord} ${keys}${locationText}`; + } + + case "invalid_type": { + const invalidTypeIssue = issue as $ZodIssueInvalidType; + return `Expected ${invalidTypeIssue.expected}, got ${this.formatValue(invalidTypeIssue.input)}${locationText}`; + } + + case "too_small": { + const tooSmallIssue = issue as $ZodIssueTooSmall; + const minimum = tooSmallIssue.minimum; + const origin = tooSmallIssue.origin; + + if (origin === "array") { + const itemWord = minimum === 1 ? "item" : "items"; + return `Array${locationText} must have at least ${minimum} ${itemWord}`; + } else if (origin === "string") { + const charWord = minimum === 1 ? "character" : "characters"; + return `String${locationText} must be at least ${minimum} ${charWord}`; + } else if (origin === "number") { + return `Number${locationText} must be at least ${minimum}`; + } + return `Value${locationText} must be at least ${minimum}`; + } + + case "too_big": { + const tooBigIssue = issue as $ZodIssueTooBig; + const maximum = tooBigIssue.maximum; + const origin = tooBigIssue.origin; + + if (origin === "array") { + const itemWord = maximum === 1 ? "item" : "items"; + return `Array${locationText} must have at most ${maximum} ${itemWord}`; + } else if (origin === "string") { + const charWord = maximum === 1 ? "character" : "characters"; + return `String${locationText} must be at most ${maximum} ${charWord}`; + } else if (origin === "number") { + return `Number${locationText} must be at most ${maximum}`; + } + return `Value${locationText} must be at most ${maximum}`; + } + + case "invalid_format": { + const formatIssue = issue as $ZodIssueInvalidStringFormat; + return `Invalid ${formatIssue.format} format${locationText}`; + } + + case "invalid_union": { + // Check if this is an enum validation error by examining the issue context + const unionIssue = issue as any; + if (unionIssue.unionErrors && unionIssue.unionErrors.length > 0) { + // Look for enum-like validation errors + const enumError = unionIssue.unionErrors.find( + (err: any) => + err.issues && + err.issues.some( + (subIssue: any) => + subIssue.code === "invalid_literal" || + subIssue.code === "invalid_enum_value", + ), + ); + + if (enumError) { + // Extract allowed values from the enum error + const enumIssues = enumError.issues.filter( + (subIssue: any) => + subIssue.code === "invalid_literal" || + subIssue.code === "invalid_enum_value", + ); + + if (enumIssues.length > 0) { + // Try to extract the allowed values + const allowedValues = this.extractAllowedEnumValues( + unionIssue.unionErrors, + ); + if (allowedValues.length > 0) { + const valuesList = allowedValues + .map((v) => `"${v}"`) + .join(", "); + return `Invalid value${locationText}. Expected one of: ${valuesList}`; + } + } + } + } + return `Invalid value${locationText}. None of the allowed types matched`; + } + + case "custom": + return `${issue.message || "Custom validation failed"}${locationText}`; + + case "not_multiple_of": { + const multipleOfIssue = issue as $ZodIssueNotMultipleOf; + return `Number${locationText} must be a multiple of ${multipleOfIssue.divisor}`; + } + + case "invalid_key": + return `Invalid key${locationText}`; + + case "invalid_element": + return `Invalid element${locationText}`; + + case "invalid_value": { + const invalidValueIssue = issue as any; + if ( + invalidValueIssue.values && + Array.isArray(invalidValueIssue.values) + ) { + const allowedValues = invalidValueIssue.values + .map((v: any) => `"${v}"`) + .join(", "); + return `Invalid value${locationText}. Expected one of: ${allowedValues}`; + } + return `Invalid value${locationText}`; + } + + default: + return `${(issue as ZodIssue).message || "Validation failed"}${locationText}`; + } + } + + private static formatValue(value: unknown): string { + if (value === null) { + return "null"; + } + if (value === undefined) { + return "undefined"; + } + if (typeof value === "string") { + return `"${value}"`; + } + return String(value); + } + + private static formatPath(path: PropertyKey[], contextData?: any): string { + if (path.length === 0) return ""; + + const formatted: string[] = []; + + for (let i = 0; i < path.length; i++) { + const segment = path[i]; + + if (typeof segment === "number") { + formatted.push(`[${segment}]`); + } else if (typeof segment === "string") { + if (i === 0) { + formatted.push(segment); + } else { + formatted.push(`.${segment}`); + } + } else { + // Handle symbol keys by converting to string + formatted.push(`.${String(segment)}`); + } + } + + return this.humanizePath(formatted.join(""), contextData); + } + + private static humanizePath(path: string, contextData?: any): string { + // Try to resolve names from context data first + if (contextData) { + const resolvedPath = this.resolvePathWithNames(path, contextData); + if (resolvedPath !== path) { + return resolvedPath; + } + } + + const patterns = [ + { + pattern: /^collections\[(\d+)\]\.attributes\[(\d+)\]$/, + replacement: "Collections $1 → attributes $2", + }, + { + pattern: /^collections\[(\d+)\]\.indexes\[(\d+)\]$/, + replacement: "Collections $1 → indexes $2", + }, + { + pattern: /^collections\[(\d+)\]$/, + replacement: "Collections $1", + }, + { pattern: /^databases\[(\d+)\]$/, replacement: "Databases $1" }, + { pattern: /^functions\[(\d+)\]$/, replacement: "Functions $1" }, + { pattern: /^sites\[(\d+)\]$/, replacement: "Sites $1" }, + { pattern: /^buckets\[(\d+)\]$/, replacement: "Buckets $1" }, + { pattern: /^teams\[(\d+)\]$/, replacement: "Teams $1" }, + { pattern: /^topics\[(\d+)\]$/, replacement: "Topics $1" }, + { + pattern: /^settings\.auth\.methods$/, + replacement: "auth.methods", + }, + { + pattern: /^settings\.auth\.security$/, + replacement: "auth.security", + }, + { pattern: /^settings\.services$/, replacement: "services" }, + ]; + + for (const { pattern, replacement } of patterns) { + if (pattern.test(path)) { + return path.replace(pattern, replacement); + } + } + + return path + .replace(/\[(\d+)\]/g, " $1") + .replace(/\./g, " → ") + .replace(/^(\w)/, (match) => match.toUpperCase()); + } + + private static resolvePathWithNames(path: string, contextData: any): string { + // Handle collections and their attributes/indexes + const collectionAttributeMatch = path.match( + /^collections\[(\d+)\]\.attributes\[(\d+)\](.*)$/, + ); + if (collectionAttributeMatch) { + const [, collectionIndex, attributeIndex, remainder] = + collectionAttributeMatch; + const collection = contextData.collections?.[parseInt(collectionIndex)]; + const attribute = collection?.attributes?.[parseInt(attributeIndex)]; + + if (collection && attribute) { + const collectionName = collection.name || collection.$id; + const attributeName = attribute.key; + return `Collections "${collectionName}" → attributes "${attributeName}"${remainder}`; + } + } + + const collectionIndexMatch = path.match( + /^collections\[(\d+)\]\.indexes\[(\d+)\](.*)$/, + ); + if (collectionIndexMatch) { + const [, collectionIndex, indexIndex, remainder] = collectionIndexMatch; + const collection = contextData.collections?.[parseInt(collectionIndex)]; + const index = collection?.indexes?.[parseInt(indexIndex)]; + + if (collection && index) { + const collectionName = collection.name || collection.$id; + const indexName = index.key; + return `Collections "${collectionName}" → indexes "${indexName}"${remainder}`; + } + } + + const collectionMatch = path.match(/^collections\[(\d+)\](.*)$/); + if (collectionMatch) { + const [, collectionIndex, remainder] = collectionMatch; + const collection = contextData.collections?.[parseInt(collectionIndex)]; + + if (collection) { + const collectionName = collection.name || collection.$id; + return `Collections "${collectionName}"${remainder}`; + } + } + + // Handle databases + const databaseMatch = path.match(/^databases\[(\d+)\](.*)$/); + if (databaseMatch) { + const [, databaseIndex, remainder] = databaseMatch; + const database = contextData.databases?.[parseInt(databaseIndex)]; + + if (database) { + const databaseName = database.name || database.$id; + return `Databases "${databaseName}"${remainder}`; + } + } + + // Handle functions + const functionMatch = path.match(/^functions\[(\d+)\](.*)$/); + if (functionMatch) { + const [, functionIndex, remainder] = functionMatch; + const func = contextData.functions?.[parseInt(functionIndex)]; + + if (func) { + const functionName = func.name || func.$id; + return `Functions "${functionName}"${remainder}`; + } + } + + // Handle sites + const siteMatch = path.match(/^sites\[(\d+)\](.*)$/); + if (siteMatch) { + const [, siteIndex, remainder] = siteMatch; + const site = contextData.sites?.[parseInt(siteIndex)]; + + if (site) { + const siteName = site.name || site.$id; + return `Sites "${siteName}"${remainder}`; + } + } + + // Handle buckets + const bucketMatch = path.match(/^buckets\[(\d+)\](.*)$/); + if (bucketMatch) { + const [, bucketIndex, remainder] = bucketMatch; + const bucket = contextData.buckets?.[parseInt(bucketIndex)]; + + if (bucket) { + const bucketName = bucket.name || bucket.$id; + return `Buckets "${bucketName}"${remainder}`; + } + } + + // Handle teams + const teamMatch = path.match(/^teams\[(\d+)\](.*)$/); + if (teamMatch) { + const [, teamIndex, remainder] = teamMatch; + const team = contextData.teams?.[parseInt(teamIndex)]; + + if (team) { + const teamName = team.name || team.$id; + return `Teams "${teamName}"${remainder}`; + } + } + + // Handle topics + const topicMatch = path.match(/^topics\[(\d+)\](.*)$/); + if (topicMatch) { + const [, topicIndex, remainder] = topicMatch; + const topic = contextData.topics?.[parseInt(topicIndex)]; + + if (topic) { + const topicName = topic.name || topic.$id; + return `Topics "${topicName}"${remainder}`; + } + } + + return path; + } + + private static extractAllowedEnumValues(unionErrors: any[]): string[] { + const allowedValues = new Set(); + + for (const error of unionErrors) { + if (error.issues) { + for (const issue of error.issues) { + if ( + issue.code === "invalid_literal" && + issue.expected !== undefined + ) { + allowedValues.add(String(issue.expected)); + } else if (issue.code === "invalid_enum_value" && issue.options) { + issue.options.forEach((option: any) => + allowedValues.add(String(option)), + ); + } + } + } + } + + return Array.from(allowedValues).sort(); + } +} + +/** + * Helper function to wrap Zod parse calls with better error formatting + * This function outputs the error directly to console and exits the process + */ +export function parseWithBetterErrors( + schema: z.ZodTypeAny, + data: unknown, + context?: string, + contextData?: any, +): T { + try { + return schema.parse(data) as T; + } catch (error) { + if (error instanceof ZodError) { + const formattedMessage = ZodErrorFormatter.formatError( + error, + contextData, + ); + const errorMessage = context + ? `❌ ${context}: ${formattedMessage}` + : `❌ ${formattedMessage}`; + + console.error(errorMessage); + process.exit(1); + } + throw error; + } +} From 6d2841b02c2cfa2613a9b17e4790ab3571644349 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 16:56:36 +0530 Subject: [PATCH 35/41] db --- CHANGELOG.md | 4 +- README.md | 4 +- install.ps1 | 4 +- install.sh | 2 +- lib/client.ts | 4 +- lib/commands/db.ts | 324 +++++++++++++++++++++++++++++++++++++ lib/commands/schema.ts | 3 + lib/parser.ts | 2 +- lib/sdks.ts | 8 +- package.json | 2 +- scoop/appwrite.config.json | 6 +- 11 files changed, 345 insertions(+), 18 deletions(-) create mode 100644 lib/commands/db.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index e9a30d38..f22b67e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,10 @@ # Change Log -## 13.0.0-rc.2 +## 13.0.0-rc.3 - Fixes a lot of typescript errors throughout the codebase -## 13.0.0-rc.2 +## 13.0.0-rc.3 - Migrates codebase from JavaScript to TypeScript diff --git a/README.md b/README.md index 608fb2ca..1ccd2686 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ Once the installation is complete, you can verify the install using ```sh $ appwrite -v -13.0.0-rc.2 +13.0.0-rc.3 ``` ### Install using prebuilt binaries @@ -69,7 +69,7 @@ Once the installation completes, you can verify your install using ``` $ appwrite -v -13.0.0-rc.2 +13.0.0-rc.3 ``` ## Getting Started diff --git a/install.ps1 b/install.ps1 index 86ca00bb..4d6b07fd 100644 --- a/install.ps1 +++ b/install.ps1 @@ -13,8 +13,8 @@ # You can use "View source" of this page to see the full script. # REPO -$GITHUB_x64_URL = "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.2/appwrite-cli-win-x64.exe" -$GITHUB_arm64_URL = "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.2/appwrite-cli-win-arm64.exe" +$GITHUB_x64_URL = "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.3/appwrite-cli-win-x64.exe" +$GITHUB_arm64_URL = "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.3/appwrite-cli-win-arm64.exe" $APPWRITE_BINARY_NAME = "appwrite.exe" diff --git a/install.sh b/install.sh index faa9c248..2d0b089c 100644 --- a/install.sh +++ b/install.sh @@ -96,7 +96,7 @@ printSuccess() { downloadBinary() { echo "[2/4] Downloading executable for $OS ($ARCH) ..." - GITHUB_LATEST_VERSION="13.0.0-rc.2" + GITHUB_LATEST_VERSION="13.0.0-rc.3" GITHUB_FILE="appwrite-cli-${OS}-${ARCH}" GITHUB_URL="https://github.com/$GITHUB_REPOSITORY_NAME/releases/download/$GITHUB_LATEST_VERSION/$GITHUB_FILE" diff --git a/lib/client.ts b/lib/client.ts index 566b6aeb..5f043e14 100644 --- a/lib/client.ts +++ b/lib/client.ts @@ -26,8 +26,8 @@ class Client { "x-sdk-name": "Command Line", "x-sdk-platform": "console", "x-sdk-language": "cli", - "x-sdk-version": "13.0.0-rc.2", - "user-agent": `AppwriteCLI/13.0.0-rc.2 (${os.type()} ${os.version()}; ${os.arch()})`, + "x-sdk-version": "13.0.0-rc.3", + "user-agent": `AppwriteCLI/13.0.0-rc.3 (${os.type()} ${os.version()}; ${os.arch()})`, "X-Appwrite-Response-Format": "1.8.1", }; } diff --git a/lib/commands/db.ts b/lib/commands/db.ts new file mode 100644 index 00000000..1149bee4 --- /dev/null +++ b/lib/commands/db.ts @@ -0,0 +1,324 @@ +import { ConfigType, AttributeSchema } from "./config.js"; +import * as fs from "fs"; +import * as path from "path"; +import { z } from "zod"; + +export interface GenerateOptions { + strict?: boolean; +} + +export interface GenerateResult { + dbContent: string; + typesContent: string; +} + +export class Db { + private getType( + attribute: z.infer, + collections: NonNullable, + ): string { + let type = ""; + + switch (attribute.type) { + case "string": + case "datetime": + type = "string"; + if (attribute.format === "enum") { + type = this.toPascalCase(attribute.key); + } + break; + case "integer": + type = "number"; + break; + case "double": + type = "number"; + break; + case "boolean": + type = "boolean"; + break; + case "relationship": + const relatedCollection = collections.find( + (c) => c.$id === attribute.relatedCollection, + ); + if (!relatedCollection) { + throw new Error( + `Related collection with ID '${attribute.relatedCollection}' not found.`, + ); + } + type = this.toPascalCase(relatedCollection.name); + if ( + (attribute.relationType === "oneToMany" && + attribute.side === "parent") || + (attribute.relationType === "manyToOne" && + attribute.side === "child") || + attribute.relationType === "manyToMany" + ) { + type = `${type}[]`; + } + break; + default: + throw new Error(`Unknown attribute type: ${attribute.type}`); + } + + if (attribute.array) { + type += "[]"; + } + + if (!attribute.required && attribute.default === null) { + type += " | null"; + } + + return type; + } + + private toPascalCase(str: string): string { + return str + .replace(/[-_\s]+(.)?/g, (_, char) => (char ? char.toUpperCase() : "")) + .replace(/^(.)/, (char) => char.toUpperCase()); + } + + private toCamelCase(str: string): string { + return str + .replace(/[-_\s]+(.)?/g, (_, char) => (char ? char.toUpperCase() : "")) + .replace(/^(.)/, (char) => char.toLowerCase()); + } + + private toUpperSnakeCase(str: string): string { + return str + .replace(/([a-z])([A-Z])/g, "$1_$2") + .replace(/[-\s]+/g, "_") + .toUpperCase(); + } + + private generateCollectionType( + collection: NonNullable[number], + collections: NonNullable, + options: GenerateOptions = {}, + ): string { + if (!collection.attributes) { + return ""; + } + + const { strict = false } = options; + const typeName = this.toPascalCase(collection.name); + const attributes = collection.attributes + .map((attr: z.infer) => { + const key = strict ? this.toCamelCase(attr.key) : attr.key; + return ` ${key}: ${this.getType(attr, collections)};`; + }) + .join("\n"); + + return `export type ${typeName} = Models.Row & {\n${attributes}\n}`; + } + + private generateEnums( + collections: NonNullable, + ): string { + const enumTypes: string[] = []; + + for (const collection of collections) { + if (!collection.attributes) continue; + + for (const attribute of collection.attributes) { + if (attribute.format === "enum" && attribute.elements) { + const enumName = this.toPascalCase(attribute.key); + const enumValues = attribute.elements + .map((element, index) => { + const key = this.toUpperSnakeCase(element); + const isLast = index === attribute.elements!.length - 1; + return ` ${key} = "${element}"${isLast ? "" : ","}`; + }) + .join("\n"); + + enumTypes.push(`export enum ${enumName} {\n${enumValues}\n}`); + } + } + } + + return enumTypes.join("\n\n"); + } + + private generateTypesFile( + config: ConfigType, + options: GenerateOptions = {}, + ): string { + if (!config.collections || config.collections.length === 0) { + return "// No collections found in configuration\n"; + } + + const appwriteDep = this.getAppwriteDependency(); + const enums = this.generateEnums(config.collections); + const types = config.collections + .map((collection) => + this.generateCollectionType(collection, config.collections!, options), + ) + .join("\n\n"); + + const parts = [`import { type Models } from '${appwriteDep}';`, ""]; + + if (enums) { + parts.push(enums); + parts.push(""); + } + + parts.push(types); + parts.push(""); + + return parts.join("\n"); + } + + private getAppwriteDependency(): string { + const cwd = process.cwd(); + + if (fs.existsSync(path.resolve(cwd, "package.json"))) { + try { + const packageJsonRaw = fs.readFileSync( + path.resolve(cwd, "package.json"), + "utf-8", + ); + const packageJson = JSON.parse(packageJsonRaw); + return packageJson.dependencies?.["appwrite"] + ? "appwrite" + : "node-appwrite"; + } catch { + // Fallback if package.json is invalid + } + } + + if (fs.existsSync(path.resolve(cwd, "deno.json"))) { + return "https://deno.land/x/appwrite/mod.ts"; + } + + return "appwrite"; + } + + private generateDbFile( + config: ConfigType, + options: GenerateOptions = {}, + ): string { + const { strict = false } = options; + const typesFileName = "appwrite.types.ts"; + + if (!config.collections || config.collections.length === 0) { + return "// No collections found in configuration\n"; + } + + const typeNames = config.collections.map((c) => this.toPascalCase(c.name)); + const importPath = typesFileName + .replace(/\.d\.ts$/, "") + .replace(/\.ts$/, ""); + const appwriteDep = this.getAppwriteDependency(); + + const collectionsCode = config.collections + .map((collection) => { + const collectionName = strict + ? this.toCamelCase(collection.name) + : collection.name; + const typeName = this.toPascalCase(collection.name); + + return ` ${collectionName}: { + create: (data: Omit<${typeName}, keyof Models.Row>, options?: { rowId?: string; permissions?: string[] }) => + tablesDB.createRow<${typeName}>({ + databaseId: process.env.APPWRITE_DB_ID!, + tableId: '${collection.$id}', + rowId: options?.rowId ?? ID.unique(), + data, + permissions: [ + Permission.write(Role.user(data.createdBy)), + Permission.read(Role.user(data.createdBy)), + Permission.update(Role.user(data.createdBy)), + Permission.delete(Role.user(data.createdBy)) + ] + }), + get: (id: string) => + tablesDB.getRow<${typeName}>({ + databaseId: process.env.APPWRITE_DB_ID!, + tableId: '${collection.$id}', + rowId: id, + }), + update: (id: string, data: Partial>, options?: { permissions?: string[] }) => + tablesDB.updateRow<${typeName}>({ + databaseId: process.env.APPWRITE_DB_ID!, + tableId: '${collection.$id}', + rowId: id, + data, + ...(options?.permissions ? { permissions: options.permissions } : {}), + }), + delete: (id: string) => + tablesDB.deleteRow({ + databaseId: process.env.APPWRITE_DB_ID!, + tableId: '${collection.$id}', + rowId: id, + }), + list: (queries?: string[]) => + tablesDB.listRows<${typeName}>({ + databaseId: process.env.APPWRITE_DB_ID!, + tableId: '${collection.$id}', + queries, + }), + }`; + }) + .join(",\n"); + + return `import { Client, TablesDB, ID, type Models, Permission, Role } from '${appwriteDep}'; +import type { ${typeNames.join(", ")} } from './${importPath}'; + +const client = new Client() + .setEndpoint(process.env.APPWRITE_ENDPOINT!) + .setProject(process.env.APPWRITE_PROJECT_ID!) + .setKey(process.env.APPWRITE_API_KEY!); + +const tablesDB = new TablesDB(client); + + +export const db = { +${collectionsCode} +}; +`; + } + + /** + * Generates TypeScript code for Appwrite database collections and types based on the provided configuration. + * + * This method returns generated content as strings: + * 1. A types string containing TypeScript interfaces for each collection. + * 2. A database client string with helper methods for CRUD operations on each collection. + * + * @param config - The Appwrite project configuration, including collections and project details. + * @param options - Optional settings for code generation: + * - strict: Whether to use strict naming conventions for collections (default: false). + * @returns A Promise that resolves with an object containing dbContent and typesContent strings. + * @throws If the configuration is missing a projectId or contains no collections. + */ + public async generate( + config: ConfigType, + options: GenerateOptions = {}, + ): Promise { + const { strict = false } = options; + + if (!config.projectId) { + throw new Error("Project ID is required in configuration"); + } + + if (!config.collections || config.collections.length === 0) { + console.log( + "No collections found in configuration. Skipping database generation.", + ); + return { + dbContent: "// No collections found in configuration\n", + typesContent: "// No collections found in configuration\n", + }; + } + + // Generate types content + const typesContent = this.generateTypesFile(config, { strict }); + + // Generate database client content + const dbContent = this.generateDbFile(config, { strict }); + + return { + dbContent, + typesContent, + }; + } +} diff --git a/lib/commands/schema.ts b/lib/commands/schema.ts index 3a4adfc7..7b7a21a8 100644 --- a/lib/commands/schema.ts +++ b/lib/commands/schema.ts @@ -6,12 +6,14 @@ import { Push, PushOptions } from "./push.js"; import { parseWithBetterErrors } from "./utils/error-formatter.js"; import JSONbig from "json-bigint"; import * as fs from "fs"; +import { Db } from "./db.js"; const JSONBig = JSONbig({ storeAsString: false }); export class Schema { private pullCommand: Pull; private pushCommand: Push; + public db: Db; constructor({ projectClient, @@ -22,6 +24,7 @@ export class Schema { }) { this.pullCommand = new Pull(projectClient, consoleClient); this.pushCommand = new Push(projectClient, consoleClient); + this.db = new Db(); } /** diff --git a/lib/parser.ts b/lib/parser.ts index 2e4717b0..d7b05bfb 100644 --- a/lib/parser.ts +++ b/lib/parser.ts @@ -128,7 +128,7 @@ export const parseError = (err: Error): void => { // Silently fail } - const version = "13.0.0-rc.2"; + const version = "13.0.0-rc.3"; const stepsToReproduce = `Running \`appwrite ${(cliConfig.reportData as any).data.args.join(" ")}\``; const yourEnvironment = `CLI version: ${version}\nOperation System: ${os.type()}\nAppwrite version: ${appwriteVersion}\nIs Cloud: ${isCloud()}`; diff --git a/lib/sdks.ts b/lib/sdks.ts index acc60223..781aaf70 100644 --- a/lib/sdks.ts +++ b/lib/sdks.ts @@ -20,8 +20,8 @@ export const sdkForConsole = async ( "x-sdk-name": "Command Line", "x-sdk-platform": "console", "x-sdk-language": "cli", - "x-sdk-version": "13.0.0-rc.2", - "user-agent": `AppwriteCLI/13.0.0-rc.2 (${os.type()} ${os.version()}; ${os.arch()})`, + "x-sdk-version": "13.0.0-rc.3", + "user-agent": `AppwriteCLI/13.0.0-rc.3 (${os.type()} ${os.version()}; ${os.arch()})`, }; client @@ -60,8 +60,8 @@ export const sdkForProject = async (): Promise => { "x-sdk-name": "Command Line", "x-sdk-platform": "console", "x-sdk-language": "cli", - "x-sdk-version": "13.0.0-rc.2", - "user-agent": `AppwriteCLI/13.0.0-rc.2 (${os.type()} ${os.version()}; ${os.arch()})`, + "x-sdk-version": "13.0.0-rc.3", + "user-agent": `AppwriteCLI/13.0.0-rc.3 (${os.type()} ${os.version()}; ${os.arch()})`, }; client diff --git a/package.json b/package.json index 301a58ad..4f1c14ef 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "type": "module", "homepage": "https://appwrite.io/support", "description": "Appwrite is an open-source self-hosted backend server that abstract and simplify complex and repetitive development tasks behind a very simple REST API", - "version": "13.0.0-rc.2", + "version": "13.0.0-rc.3", "license": "BSD-3-Clause", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/scoop/appwrite.config.json b/scoop/appwrite.config.json index f5ae51f8..3f10a64a 100644 --- a/scoop/appwrite.config.json +++ b/scoop/appwrite.config.json @@ -1,16 +1,16 @@ { "$schema": "https://raw.githubusercontent.com/ScoopInstaller/Scoop/master/schema.json", - "version": "13.0.0-rc.2", + "version": "13.0.0-rc.3", "description": "The Appwrite CLI is a command-line application that allows you to interact with Appwrite and perform server-side tasks using your terminal.", "homepage": "https://github.com/appwrite/sdk-for-cli", "license": "BSD-3-Clause", "architecture": { "64bit": { - "url": "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.2/appwrite-cli-win-x64.exe", + "url": "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.3/appwrite-cli-win-x64.exe", "bin": [["appwrite-cli-win-x64.exe", "appwrite"]] }, "arm64": { - "url": "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.2/appwrite-cli-win-arm64.exe", + "url": "https://github.com/appwrite/sdk-for-cli/releases/download/13.0.0-rc.3/appwrite-cli-win-arm64.exe", "bin": [["appwrite-cli-win-arm64.exe", "appwrite"]] } }, From 8aa05033eadaefee957936dd0308cd7ab8a25608 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 17:02:26 +0530 Subject: [PATCH 36/41] fix options --- lib/commands/pull.ts | 24 ++++++++------------ lib/commands/push.ts | 54 +++++++++++++++++++++++++++++++++++++------- 2 files changed, 55 insertions(+), 23 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 8b234ad5..e04df025 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -13,16 +13,10 @@ import { TablesDB, Teams, Client, - AppwriteException, Query, Models, } from "@appwrite.io/console"; -import { - getFunctionsService, - getSitesService, - getDatabasesService, - getTablesDBService, -} from "../services.js"; +import { getFunctionsService, getSitesService } from "../services.js"; import { sdkForProject, sdkForConsole } from "../sdks.js"; import { localConfig } from "../config.js"; import { paginate } from "../paginate.js"; @@ -58,6 +52,7 @@ export interface PullOptions { buckets?: boolean; teams?: boolean; topics?: boolean; + skipDeprecated?: boolean; withVariables?: boolean; noCode?: boolean; } @@ -74,10 +69,6 @@ interface PullSitesOptions { siteIds?: string[]; } -interface PullResourcesOptions { - skipDeprecated?: boolean; -} - export interface PullSettingsResult { projectName: string; settings: SettingsType; @@ -149,8 +140,9 @@ export class Pull { */ public async pullResources( config: ConfigType, - options: PullOptions = { all: true }, + options: PullOptions = { all: true, skipDeprecated: true }, ): Promise { + const { skipDeprecated = true } = options; if (!config.projectId) { throw new ProjectNotInitializedError(); } @@ -186,7 +178,7 @@ export class Pull { updatedConfig.tables = tables; } - if (options.collections) { + if (!skipDeprecated && (shouldPullAll || options.collections)) { const { databases, collections } = await this.pullCollections(); updatedConfig.databases = databases; updatedConfig.collections = collections; @@ -678,8 +670,10 @@ export class Pull { /** Helper methods for CLI commands */ export const pullResources = async ({ - skipDeprecated = false, -}: PullResourcesOptions = {}): Promise => { + skipDeprecated = true, +}: { + skipDeprecated?: boolean; +} = {}): Promise => { const project = localConfig.getProject(); if (!project.projectId) { error( diff --git a/lib/commands/push.ts b/lib/commands/push.ts index 1e080e34..123c0d63 100644 --- a/lib/commands/push.ts +++ b/lib/commands/push.ts @@ -76,6 +76,15 @@ const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_DEFAULT_VALUE = 30; export interface PushOptions { + all?: boolean; + settings?: boolean; + functions?: boolean; + sites?: boolean; + collections?: boolean; + tables?: boolean; + buckets?: boolean; + teams?: boolean; + topics?: boolean; skipDeprecated?: boolean; functionOptions?: { async?: boolean; @@ -118,7 +127,7 @@ export class Push { public async pushResources( config: ConfigType, - options: PushOptions = { skipDeprecated: true }, + options: PushOptions = { all: true, skipDeprecated: true }, ): Promise<{ results: Record; errors: any[]; @@ -126,9 +135,13 @@ export class Push { const { skipDeprecated = true } = options; const results: Record = {}; const allErrors: any[] = []; + const shouldPushAll = options.all === true; // Push settings - if (config.projectName || config.settings) { + if ( + (shouldPushAll || options.settings) && + (config.projectName || config.settings) + ) { try { log("Pushing settings ..."); await this.pushSettings({ @@ -144,7 +157,11 @@ export class Push { } // Push buckets - if (config.buckets && config.buckets.length > 0) { + if ( + (shouldPushAll || options.buckets) && + config.buckets && + config.buckets.length > 0 + ) { try { log("Pushing buckets ..."); const result = await this.pushBuckets(config.buckets); @@ -157,7 +174,11 @@ export class Push { } // Push teams - if (config.teams && config.teams.length > 0) { + if ( + (shouldPushAll || options.teams) && + config.teams && + config.teams.length > 0 + ) { try { log("Pushing teams ..."); const result = await this.pushTeams(config.teams); @@ -170,7 +191,11 @@ export class Push { } // Push messaging topics - if (config.topics && config.topics.length > 0) { + if ( + (shouldPushAll || options.topics) && + config.topics && + config.topics.length > 0 + ) { try { log("Pushing topics ..."); const result = await this.pushMessagingTopics(config.topics); @@ -183,7 +208,11 @@ export class Push { } // Push functions - if (config.functions && config.functions.length > 0) { + if ( + (shouldPushAll || options.functions) && + config.functions && + config.functions.length > 0 + ) { try { log("Pushing functions ..."); const result = await this.pushFunctions( @@ -204,7 +233,11 @@ export class Push { } // Push sites - if (config.sites && config.sites.length > 0) { + if ( + (shouldPushAll || options.sites) && + config.sites && + config.sites.length > 0 + ) { try { log("Pushing sites ..."); const result = await this.pushSites(config.sites, options.siteOptions); @@ -222,7 +255,11 @@ export class Push { } // Push tables - if (config.tables && config.tables.length > 0) { + if ( + (shouldPushAll || options.tables) && + config.tables && + config.tables.length > 0 + ) { try { log("Pushing tables ..."); const result = await this.pushTables(config.tables); @@ -237,6 +274,7 @@ export class Push { // Push collections (unless skipDeprecated is true) if ( !skipDeprecated && + (shouldPushAll || options.collections) && config.collections && config.collections.length > 0 ) { From 6e4fb9f4155d0ebeb4cc7e844acaa6133275d5f4 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 17:02:46 +0530 Subject: [PATCH 37/41] unused import --- lib/commands/pull.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index e04df025..7f6dbabf 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -1,6 +1,5 @@ import fs from "fs"; import chalk from "chalk"; -import tar from "tar"; import { Command } from "commander"; import inquirer from "inquirer"; import { From 1729b78889f2cbee76f2255eef97717dc2132082 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 17:26:44 +0530 Subject: [PATCH 38/41] fix exports --- index.ts | 17 +++++++++++++++++ lib/types.ts | 18 ------------------ 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/index.ts b/index.ts index 09dcbf4a..2923a0e6 100644 --- a/index.ts +++ b/index.ts @@ -171,3 +171,20 @@ if (process.argv.includes("-v") || process.argv.includes("--version")) { } export { Schema, Push, Pull }; +export type { + ConfigType, + SettingsType, + FunctionType, + SiteType, + DatabaseType, + CollectionType, + TableType, + TopicType, + TeamType, + MessageType, + BucketType, + AttributeType, + IndexType, + ColumnType, + TableIndexType, +} from "./lib/commands/config.js"; diff --git a/lib/types.ts b/lib/types.ts index 7d654bce..928b0ef8 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -73,21 +73,3 @@ export interface GlobalConfigData extends ConfigData { current: string; cookie?: string; } - -export type { - ConfigType, - SettingsType, - FunctionType, - SiteType, - DatabaseType, - CollectionType, - TableType, - TopicType, - TeamType, - MessageType, - BucketType, - AttributeType, - IndexType, - ColumnType, - TableIndexType, -} from "./commands/config.js"; From a15c154c69cf52a3787d71d7a8426ac711aa3b7e Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 17:28:05 +0530 Subject: [PATCH 39/41] fix exports --- lib/commands/pull.ts | 2 +- lib/config.ts | 14 ++++++++------ lib/emulation/docker.ts | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/commands/pull.ts b/lib/commands/pull.ts index 7f6dbabf..b01bf9ea 100644 --- a/lib/commands/pull.ts +++ b/lib/commands/pull.ts @@ -38,7 +38,7 @@ import { import type { ConfigType } from "./config.js"; import { createSettingsObject } from "../utils.js"; import { ProjectNotInitializedError } from "./errors.js"; -import type { SettingsType, FunctionType, SiteType } from "../types.js"; +import type { SettingsType, FunctionType, SiteType } from "./config.js"; import { downloadDeploymentCode } from "./utils/deployment.js"; export interface PullOptions { diff --git a/lib/config.ts b/lib/config.ts index 026f24d4..6ebd5a67 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -7,17 +7,19 @@ import type { Models } from "@appwrite.io/console"; import type { BucketType, CollectionType, - ConfigData, - Entity, FunctionType, - GlobalConfigData, ConfigType, SettingsType, - SessionData, SiteType, TableType, TeamType, TopicType, +} from "./commands/config.js"; +import type { + SessionData, + ConfigData, + Entity, + GlobalConfigData, } from "./types.js"; import { createSettingsObject } from "./utils.js"; @@ -348,11 +350,11 @@ class Local extends Config { } getEndpoint(): string { - return (this.get("endpoint" as keyof ConfigType) as string) || ""; + return this.get("endpoint") || ""; } setEndpoint(endpoint: string): void { - this.set("endpoint" as any, endpoint); + this.set("endpoint", endpoint); } getSites(): SiteType[] { diff --git a/lib/emulation/docker.ts b/lib/emulation/docker.ts index 6a5bab66..18047ae6 100644 --- a/lib/emulation/docker.ts +++ b/lib/emulation/docker.ts @@ -10,7 +10,7 @@ import fs from "fs"; import { log, error, success } from "../parser.js"; import { openRuntimesVersion, systemTools, Queue } from "./utils.js"; import { getAllFiles } from "../utils.js"; -import type { FunctionType } from "../types.js"; +import type { FunctionType } from "../commands/config.js"; export async function dockerStop(id: string): Promise { const stopProcess = childProcess.spawn("docker", ["rm", "--force", id], { From c3cf582fe0996adc3b3ab79b681627e7a3f7252e Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 17:29:10 +0530 Subject: [PATCH 40/41] fix optionals --- CHANGELOG.md | 3 ++- lib/commands/config.ts | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f22b67e1..a2d779ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,10 @@ ## 13.0.0-rc.3 +- Push, Pull and Schema classes are now exported as part of the package - Fixes a lot of typescript errors throughout the codebase -## 13.0.0-rc.3 +## 13.0.0-rc.2 - Migrates codebase from JavaScript to TypeScript diff --git a/lib/commands/config.ts b/lib/commands/config.ts index 1249130b..6e3b3a21 100644 --- a/lib/commands/config.ts +++ b/lib/commands/config.ts @@ -176,7 +176,7 @@ const FunctionSchema = z name: z.string(), enabled: z.boolean().optional(), logging: z.boolean().optional(), - runtime: z.string().optional(), + runtime: z.string(), specification: z.string().optional(), scopes: z.array(z.string()).optional(), events: z.array(z.string()).optional(), From 0a26d6b9b2164f7bb954fe562e0a608d956f76ad Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Wed, 7 Jan 2026 18:20:36 +0530 Subject: [PATCH 41/41] fix package --- cli.ts | 168 ++++++++++++++++++++++++++++++ index.ts | 172 +------------------------------ lib/commands/utils/deployment.ts | 3 +- package.json | 16 +-- 4 files changed, 183 insertions(+), 176 deletions(-) create mode 100644 cli.ts diff --git a/cli.ts b/cli.ts new file mode 100644 index 00000000..108957b8 --- /dev/null +++ b/cli.ts @@ -0,0 +1,168 @@ +#! /usr/bin/env node + +/** Required to set max width of the help commands */ +const oldWidth = process.stdout.columns; +process.stdout.columns = 100; +/** ---------------------------------------------- */ + +import { program } from "commander"; +import chalk from "chalk"; +import packageJson from "./package.json" with { type: "json" }; +const { version } = packageJson; +import { commandDescriptions, cliConfig } from "./lib/parser.js"; +import { client } from "./lib/commands/generic.js"; +import { getLatestVersion, compareVersions } from "./lib/utils.js"; +import inquirer from "inquirer"; +import { + login, + logout, + whoami, + migrate, + register, +} from "./lib/commands/generic.js"; +import { init } from "./lib/commands/init.js"; +import { types } from "./lib/commands/types.js"; +import { pull } from "./lib/commands/pull.js"; +import { run } from "./lib/commands/run.js"; +import { push, deploy } from "./lib/commands/push.js"; +import { update } from "./lib/commands/update.js"; +import { account } from "./lib/commands/services/account.js"; +import { console } from "./lib/commands/services/console.js"; +import { databases } from "./lib/commands/services/databases.js"; +import { functions } from "./lib/commands/services/functions.js"; +import { graphql } from "./lib/commands/services/graphql.js"; +import { health } from "./lib/commands/services/health.js"; +import { locale } from "./lib/commands/services/locale.js"; +import { messaging } from "./lib/commands/services/messaging.js"; +import { migrations } from "./lib/commands/services/migrations.js"; +import { project } from "./lib/commands/services/project.js"; +import { projects } from "./lib/commands/services/projects.js"; +import { proxy } from "./lib/commands/services/proxy.js"; +import { sites } from "./lib/commands/services/sites.js"; +import { storage } from "./lib/commands/services/storage.js"; +import { tablesdb } from "./lib/commands/services/tablesdb.js"; +import { teams } from "./lib/commands/services/teams.js"; +import { tokens } from "./lib/commands/services/tokens.js"; +import { users } from "./lib/commands/services/users.js"; +import { vcs } from "./lib/commands/services/vcs.js"; +import searchList from "inquirer-search-list"; + +inquirer.registerPrompt("search-list", searchList); + +/** + * Check for updates and show version information + */ +async function checkVersion(): Promise { + process.stdout.write(chalk.bold(`appwrite version ${version}`) + "\n"); + + try { + const latestVersion = await getLatestVersion(); + const comparison = compareVersions(version, latestVersion); + + if (comparison > 0) { + // Current version is older than latest + process.stdout.write( + chalk.yellow( + `\n⚠️ A newer version is available: ${chalk.bold(latestVersion)}`, + ) + "\n", + ); + process.stdout.write( + chalk.cyan( + `💡 Run '${chalk.bold("appwrite update")}' to update to the latest version.`, + ) + "\n", + ); + } else if (comparison === 0) { + process.stdout.write( + chalk.green("\n✅ You are running the latest version!") + "\n", + ); + } else { + // Current version is newer than latest (pre-release/dev) + process.stdout.write( + chalk.blue( + "\n🚀 You are running a pre-release or development version.", + ) + "\n", + ); + } + } catch (error) { + // Silently fail version check, just show current version + process.stdout.write(chalk.gray("\n(Unable to check for updates)") + "\n"); + } +} + +// Intercept version flag before Commander.js processes it +if (process.argv.includes("-v") || process.argv.includes("--version")) { + (async () => { + await checkVersion(); + process.exit(0); + })(); +} else { + program + .description(commandDescriptions["main"]) + .configureHelp({ + helpWidth: process.stdout.columns || 80, + sortSubcommands: true, + }) + .helpOption("-h, --help", "Display help for command") + .version(version, "-v, --version", "Output the version number") + .option("-V, --verbose", "Show complete error log") + .option("-j, --json", "Output in JSON format") + .hook("preAction", migrate) + .option("-f,--force", "Flag to confirm all warnings") + .option("-a,--all", "Flag to push all resources") + .option("--id [id...]", "Flag to pass a list of ids for a given action") + .option("--report", "Enable reporting in case of CLI errors") + .on("option:json", () => { + cliConfig.json = true; + }) + .on("option:verbose", () => { + cliConfig.verbose = true; + }) + .on("option:report", function () { + cliConfig.report = true; + cliConfig.reportData = { data: this }; + }) + .on("option:force", () => { + cliConfig.force = true; + }) + .on("option:all", () => { + cliConfig.all = true; + }) + .on("option:id", function () { + cliConfig.ids = (this as any).opts().id; + }) + .showSuggestionAfterError() + .addCommand(whoami) + .addCommand(register) + .addCommand(login) + .addCommand(init) + .addCommand(pull) + .addCommand(push) + .addCommand(types) + .addCommand(deploy) + .addCommand(run) + .addCommand(update) + .addCommand(logout) + .addCommand(account) + .addCommand(console) + .addCommand(databases) + .addCommand(functions) + .addCommand(graphql) + .addCommand(health) + .addCommand(locale) + .addCommand(messaging) + .addCommand(migrations) + .addCommand(project) + .addCommand(projects) + .addCommand(proxy) + .addCommand(sites) + .addCommand(storage) + .addCommand(tablesdb) + .addCommand(teams) + .addCommand(tokens) + .addCommand(users) + .addCommand(vcs) + .addCommand(client) + .parse(process.argv); + + process.stdout.columns = oldWidth; +} diff --git a/index.ts b/index.ts index 2923a0e6..1a9ecce5 100644 --- a/index.ts +++ b/index.ts @@ -1,175 +1,13 @@ -#! /usr/bin/env node - -/** Required to set max width of the help commands */ -const oldWidth = process.stdout.columns; -process.stdout.columns = 100; -/** ---------------------------------------------- */ +/** + * Library exports for programmatic use of the Appwrite CLI + * + * For CLI usage, run the 'appwrite' command directly. + */ -import { program } from "commander"; -import chalk from "chalk"; -import packageJson from "./package.json" with { type: "json" }; -const { version } = packageJson; -import { commandDescriptions, cliConfig } from "./lib/parser.js"; -import { client } from "./lib/commands/generic.js"; -import { getLatestVersion, compareVersions } from "./lib/utils.js"; -import inquirer from "inquirer"; -import { - login, - logout, - whoami, - migrate, - register, -} from "./lib/commands/generic.js"; -import { init } from "./lib/commands/init.js"; -import { types } from "./lib/commands/types.js"; -import { pull } from "./lib/commands/pull.js"; -import { run } from "./lib/commands/run.js"; -import { push, deploy } from "./lib/commands/push.js"; -import { update } from "./lib/commands/update.js"; -import { account } from "./lib/commands/services/account.js"; -import { console } from "./lib/commands/services/console.js"; -import { databases } from "./lib/commands/services/databases.js"; -import { functions } from "./lib/commands/services/functions.js"; -import { graphql } from "./lib/commands/services/graphql.js"; -import { health } from "./lib/commands/services/health.js"; -import { locale } from "./lib/commands/services/locale.js"; -import { messaging } from "./lib/commands/services/messaging.js"; -import { migrations } from "./lib/commands/services/migrations.js"; -import { project } from "./lib/commands/services/project.js"; -import { projects } from "./lib/commands/services/projects.js"; -import { proxy } from "./lib/commands/services/proxy.js"; -import { sites } from "./lib/commands/services/sites.js"; -import { storage } from "./lib/commands/services/storage.js"; -import { tablesdb } from "./lib/commands/services/tablesdb.js"; -import { teams } from "./lib/commands/services/teams.js"; -import { tokens } from "./lib/commands/services/tokens.js"; -import { users } from "./lib/commands/services/users.js"; -import { vcs } from "./lib/commands/services/vcs.js"; -import searchList from "inquirer-search-list"; import { Push } from "./lib/commands/push.js"; import { Pull } from "./lib/commands/pull.js"; import { Schema } from "./lib/commands/schema.js"; -inquirer.registerPrompt("search-list", searchList); - -/** - * Check for updates and show version information - */ -async function checkVersion(): Promise { - process.stdout.write(chalk.bold(`appwrite version ${version}`) + "\n"); - - try { - const latestVersion = await getLatestVersion(); - const comparison = compareVersions(version, latestVersion); - - if (comparison > 0) { - // Current version is older than latest - process.stdout.write( - chalk.yellow( - `\n⚠️ A newer version is available: ${chalk.bold(latestVersion)}`, - ) + "\n", - ); - process.stdout.write( - chalk.cyan( - `💡 Run '${chalk.bold("appwrite update")}' to update to the latest version.`, - ) + "\n", - ); - } else if (comparison === 0) { - process.stdout.write( - chalk.green("\n✅ You are running the latest version!") + "\n", - ); - } else { - // Current version is newer than latest (pre-release/dev) - process.stdout.write( - chalk.blue( - "\n🚀 You are running a pre-release or development version.", - ) + "\n", - ); - } - } catch (error) { - // Silently fail version check, just show current version - process.stdout.write(chalk.gray("\n(Unable to check for updates)") + "\n"); - } -} - -// Intercept version flag before Commander.js processes it -if (process.argv.includes("-v") || process.argv.includes("--version")) { - (async () => { - await checkVersion(); - process.exit(0); - })(); -} else { - program - .description(commandDescriptions["main"]) - .configureHelp({ - helpWidth: process.stdout.columns || 80, - sortSubcommands: true, - }) - .helpOption("-h, --help", "Display help for command") - .version(version, "-v, --version", "Output the version number") - .option("-V, --verbose", "Show complete error log") - .option("-j, --json", "Output in JSON format") - .hook("preAction", migrate) - .option("-f,--force", "Flag to confirm all warnings") - .option("-a,--all", "Flag to push all resources") - .option("--id [id...]", "Flag to pass a list of ids for a given action") - .option("--report", "Enable reporting in case of CLI errors") - .on("option:json", () => { - cliConfig.json = true; - }) - .on("option:verbose", () => { - cliConfig.verbose = true; - }) - .on("option:report", function () { - cliConfig.report = true; - cliConfig.reportData = { data: this }; - }) - .on("option:force", () => { - cliConfig.force = true; - }) - .on("option:all", () => { - cliConfig.all = true; - }) - .on("option:id", function () { - cliConfig.ids = (this as any).opts().id; - }) - .showSuggestionAfterError() - .addCommand(whoami) - .addCommand(register) - .addCommand(login) - .addCommand(init) - .addCommand(pull) - .addCommand(push) - .addCommand(types) - .addCommand(deploy) - .addCommand(run) - .addCommand(update) - .addCommand(logout) - .addCommand(account) - .addCommand(console) - .addCommand(databases) - .addCommand(functions) - .addCommand(graphql) - .addCommand(health) - .addCommand(locale) - .addCommand(messaging) - .addCommand(migrations) - .addCommand(project) - .addCommand(projects) - .addCommand(proxy) - .addCommand(sites) - .addCommand(storage) - .addCommand(tablesdb) - .addCommand(teams) - .addCommand(tokens) - .addCommand(users) - .addCommand(vcs) - .addCommand(client) - .parse(process.argv); - - process.stdout.columns = oldWidth; -} - export { Schema, Push, Pull }; export type { ConfigType, diff --git a/lib/commands/utils/deployment.ts b/lib/commands/utils/deployment.ts index e5746c7d..fff7161b 100644 --- a/lib/commands/utils/deployment.ts +++ b/lib/commands/utils/deployment.ts @@ -2,6 +2,7 @@ import fs from "fs"; import path from "path"; import tar from "tar"; import { Client, AppwriteException } from "@appwrite.io/console"; +import { error } from "../../parser.js"; const POLL_DEBOUNCE = 2000; // Milliseconds @@ -59,7 +60,7 @@ export async function downloadDeploymentCode(params: { } } catch (e: unknown) { if (e instanceof AppwriteException) { - this.error(e.message); + error(e.message); return; } else { throw e; diff --git a/package.json b/package.json index 4f1c14ef..5fd1c039 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ "main": "dist/index.js", "types": "dist/index.d.ts", "bin": { - "appwrite": "dist/index.js" + "appwrite": "dist/cli.js" }, "repository": { "type": "git", @@ -20,12 +20,12 @@ "generate": "tsx scripts/generate-commands.ts", "prepublishOnly": "npm run build", "test": "echo \"Error: no test specified\" && exit 1", - "linux-x64": "bun run build && bun build ./dist/index.js --compile --sourcemap=inline --target=bun-linux-x64 --outfile build/appwrite-cli-linux-x64", - "linux-arm64": "bun run build && bun build ./dist/index.js --compile --sourcemap=inline --target=bun-linux-arm64 --outfile build/appwrite-cli-linux-arm64", - "mac-x64": "bun run build && bun build ./dist/index.js --compile --sourcemap=inline --target=bun-darwin-x64 --outfile build/appwrite-cli-darwin-x64", - "mac-arm64": "bun run build && bun build ./dist/index.js --compile --sourcemap=inline --target=bun-darwin-arm64 --outfile build/appwrite-cli-darwin-arm64", - "windows-x64": "bun run build && bun build ./dist/index.js --compile --sourcemap=inline --target=bun-windows-x64 --outfile build/appwrite-cli-win-x64.exe", - "windows-arm64": "bun run build && esbuild dist/index.js --bundle --platform=node --format=cjs --outfile=dist/bundle.cjs --external:@appwrite.io/console --external:fsevents && pkg dist/bundle.cjs -t node18-win-arm64 -o build/appwrite-cli-win-arm64.exe" + "linux-x64": "bun run build && bun build ./dist/cli.js --compile --sourcemap=inline --target=bun-linux-x64 --outfile build/appwrite-cli-linux-x64", + "linux-arm64": "bun run build && bun build ./dist/cli.js --compile --sourcemap=inline --target=bun-linux-arm64 --outfile build/appwrite-cli-linux-arm64", + "mac-x64": "bun run build && bun build ./dist/cli.js --compile --sourcemap=inline --target=bun-darwin-x64 --outfile build/appwrite-cli-darwin-x64", + "mac-arm64": "bun run build && bun build ./dist/cli.js --compile --sourcemap=inline --target=bun-darwin-arm64 --outfile build/appwrite-cli-darwin-arm64", + "windows-x64": "bun run build && bun build ./dist/cli.js --compile --sourcemap=inline --target=bun-windows-x64 --outfile build/appwrite-cli-win-x64.exe", + "windows-arm64": "bun run build && esbuild dist/cli.js --bundle --platform=node --format=cjs --outfile=dist/bundle.cjs --external:@appwrite.io/console --external:fsevents && pkg dist/bundle.cjs -t node18-win-arm64 -o build/appwrite-cli-win-arm64.exe" }, "dependencies": { "@appwrite.io/console": "^2.1.0", @@ -61,7 +61,7 @@ }, "pkg": { "scripts": [ - "dist/index.js", + "dist/cli.js", "dist/lib/**/*.js" ] }