From 7675af3221d1fd86fecd68be6c5e42369e751ce5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 24 Sep 2025 00:05:00 +0200 Subject: [PATCH 01/83] feat: initial db pull implementation --- packages/cli/src/actions/action-utils.ts | 14 + packages/cli/src/actions/db.ts | 70 ++++- packages/cli/src/actions/pull/index.ts | 238 +++++++++++++++++ .../cli/src/actions/pull/provider/index.ts | 9 + .../src/actions/pull/provider/postgresql.ts | 242 ++++++++++++++++++ .../cli/src/actions/pull/provider/provider.ts | 44 ++++ .../cli/src/actions/pull/provider/sqlite.ts | 191 ++++++++++++++ packages/cli/src/actions/pull/utils.ts | 67 +++++ packages/language/src/document.ts | 3 +- pnpm-lock.yaml | 24 ++ 10 files changed, 897 insertions(+), 5 deletions(-) create mode 100644 packages/cli/src/actions/pull/index.ts create mode 100644 packages/cli/src/actions/pull/provider/index.ts create mode 100644 packages/cli/src/actions/pull/provider/postgresql.ts create mode 100644 packages/cli/src/actions/pull/provider/provider.ts create mode 100644 packages/cli/src/actions/pull/provider/sqlite.ts create mode 100644 packages/cli/src/actions/pull/utils.ts diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index d2e0ca2e9..78e4cb38b 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -55,6 +55,20 @@ export async function loadSchemaDocument(schemaFile: string) { return loadResult.model; } +export async function loadSchemaDocumentWithServices(schemaFile: string) { + const loadResult = await loadDocument(schemaFile); + if (!loadResult.success) { + loadResult.errors.forEach((err) => { + console.error(colors.red(err)); + }); + throw new CliError('Schema contains errors. See above for details.'); + } + loadResult.warnings.forEach((warn) => { + console.warn(colors.yellow(warn)); + }); + return { services: loadResult.services, model: loadResult.model }; +} + export function handleSubProcessError(err: unknown) { if (err instanceof Error && 'status' in err && typeof err.status === 'number') { process.exit(err.status); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 3d0108374..6c39a3529 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,25 +1,36 @@ +import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl } from './action-utils'; +import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; +import { providers } from './pull/provider'; +import { getDatasource, getDbName } from './pull/utils'; -type Options = { +type PushOptions = { schema?: string; acceptDataLoss?: boolean; forceReset?: boolean; }; +type PullOptions = { + schema?: string; +}; + /** * CLI action for db related commands */ -export async function run(command: string, options: Options) { +export async function run(command: string, options: PushOptions) { switch (command) { case 'push': await runPush(options); break; + case 'pull': + await runPull(options); + break; } } -async function runPush(options: Options) { +async function runPush(options: PushOptions) { const schemaFile = getSchemaFile(options.schema); // validate datasource url exists @@ -49,3 +60,54 @@ async function runPush(options: Options) { } } } + +async function runPull(options: PullOptions) { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] + const datasource = getDatasource(model) + + if (!datasource) { + throw new Error('No datasource found in the schema.') + } + + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + } + + const provider = providers[datasource.provider]; + + if (!provider) { + throw new Error( + `No introspection provider found for: ${datasource.provider}` + ) + } + + const { enums, tables } = await provider.introspect(datasource.url) + + syncEnums(enums, model) + + const resolveRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model, provider }) + resolveRelations.push(...relations) + } + + for (const rel of resolveRelations) { + syncRelation(model, rel, services); + } + + for (const d of model.declarations) { + if (d.$type !== 'DataModel') continue + const found = tables.find((t) => getDbName(d) === t.name) + if (!found) { + delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] + } + } + + model.declarations = model.declarations.filter((d) => d !== undefined) + + const zmpdelSchema = await new ZModelCodeGenerator().generate(model) + fs.writeFileSync(schemaFile, zmpdelSchema) +} diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts new file mode 100644 index 000000000..4651225e4 --- /dev/null +++ b/packages/cli/src/actions/pull/index.ts @@ -0,0 +1,238 @@ +import type { ZModelServices } from '@zenstackhq/language' +import type { + Attribute, + AttributeArg, + DataField, + DataFieldAttribute, + DataFieldType, + DataModel, + Enum, + EnumField, + Model, + UnsupportedFieldType +} from '@zenstackhq/language/ast' +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' +import { getAttributeRef, getDbName } from './utils' + +export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { + for (const dbEnum of dbEnums) { + let schemaEnum = model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type + ) as Enum | undefined + + if (!schemaEnum) { + schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + fields: [], + } + model.declarations.push(schemaEnum) + } + schemaEnum.fields = dbEnum.values.map((v) => { + const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) + if (!existingValue) { + const enumField: EnumField = { + $type: 'EnumField' as const, + $container: schemaEnum, + name: v, + attributes: [], + comments: [], + } + return enumField + } + return existingValue + }) + } +} + +export type Relation = { + schema: string + table: string + column: string + type: 'one' | 'many' + fk_name: string + nullable: boolean + references: { + schema: string | null + table: string | null + column: string | null + } +} + +export function syncTable({ + model, + provider, + table, +}: { + table: IntrospectedTable + model: Model + provider: IntrospectionProvider +}) { + const relations: Relation[] = [] + let modelTable = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === table.name + ) as DataModel | undefined + + if (!modelTable) { + modelTable = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], + } + model.declarations.push(modelTable) + } + + modelTable.fields = table.columns.map((col) => { + if (col.foreign_key_table) { + relations.push({ + schema: table.schema, + table: table.name, + column: col.name, + type: col.unique ? 'one' : 'many', + fk_name: col.foreign_key_name!, + nullable: col.nullable, + references: { + schema: col.foreign_key_schema, + table: col.foreign_key_table, + column: col.foreign_key_column, + }, + }) + } + + const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' + const fieldName = `${fieldPrefix}${col.name}` + + const existingField = modelTable!.fields.find( + (f) => getDbName(f) === fieldName + ) + if (!existingField) { + const builtinType = provider.getBuiltinType(col.datatype) + const unsupported: UnsupportedFieldType = { + get $container() { + return type + }, + $type: 'UnsupportedFieldType' as const, + value: { + get $container() { + return unsupported + }, + $type: 'StringLiteral', + value: col.datatype, + }, + } + + const type: DataFieldType = { + get $container() { + return field + }, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + unsupported: + builtinType.type === 'Unsupported' ? unsupported : undefined, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } + + const field: DataField = { + $type: 'DataField' as const, + type, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const attr: DataFieldAttribute = { + $type: 'DataFieldAttribute' as const, + get $container() { + return field + }, + decl: { + $refText: '@map', + ref: model.$document?.references.find( + (r) => + //@ts-ignore + r.ref.$type === 'Attribute' && r.ref.name === '@map' + )?.ref as Attribute, + }, + get args() { + const arg: AttributeArg = { + $type: 'AttributeArg' as const, + get $container() { + return attr + }, + name: 'name', + $resolvedParam: { + name: 'name', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: arg, + value: col.name, + } + }, + } + + return [arg] + }, + } + + return [attr] + }, + comments: [], + } + return field + } + return existingField + }) + + return relations +} + +export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + + const sourceModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.table + ) as DataModel | undefined + if (!sourceModel) return + + const sourceField = sourceModel.fields.find( + (f) => getDbName(f) === relation.column + ) as DataField | undefined + if (!sourceField) return + + const targetModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table + ) as DataModel | undefined + if (!targetModel) return + + const targetField = targetModel.fields.find( + (f) => getDbName(f) === relation.references.column + ) + if (!targetField) return + + //TODO: Finish relation sync +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts new file mode 100644 index 000000000..82ee2ac38 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -0,0 +1,9 @@ +export * from './provider' + +import { postgresql } from "./postgresql"; +import { sqlite } from "./sqlite"; + +export const providers = { + postgresql, + sqlite +}; \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts new file mode 100644 index 000000000..10a9642a3 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -0,0 +1,242 @@ +import { Client } from 'pg' +import type { + IntrospectedEnum, + IntrospectedSchema, + IntrospectedTable, + IntrospectionProvider, +} from './provider' + +export const postgresql: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase() + + const isArray = t.startsWith('_') + + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray } + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray } + + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray } + + // uuid + case 'uuid': + return { type: 'String', isArray } + + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray } + + // binary + case 'bytea': + return { type: 'Bytes', isArray } + + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray } + + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray } + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }) + await client.connect() + + const { rows: tables } = await client.query( + tableIntrospectionQuery + ) + const { rows: enums } = await client.query( + enumIntrospectionQuery + ) + + return { + enums, + tables, + } + }, +} + +const enumIntrospectionQuery = ` +SELECT + n.nspname AS schema_name, + t.typname AS enum_type, + coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values +FROM pg_type t +JOIN pg_enum e ON t.oid = e.enumtypid +JOIN pg_namespace n ON n.oid = t.typnamespace +GROUP BY schema_name, enum_type +ORDER BY schema_name, enum_type;` + +const tableIntrospectionQuery = ` +SELECT +"ns"."nspname" AS "schema", +"cls"."relname" AS "name", +CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' + ELSE NULL +END AS "type", +( +SELECT +coalesce(json_agg(agg), '[]') +FROM +( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT + json_agg("enm"."enumlabel") AS "o" + FROM + "pg_catalog"."pg_enum" AS "enm" + WHERE + "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM + "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" +) AS agg +) AS "columns" +FROM +"pg_catalog"."pg_class" AS "cls" +INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" +WHERE +"ns"."nspname" !~ '^pg_' +AND "ns"."nspname" != 'information_schema' +AND "cls"."relkind" IN ('r', 'v') +AND "cls"."relname" !~ '^pg_' +AND "cls"."relname" !~ '_prisma_migrations' +` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts new file mode 100644 index 000000000..d8bd09288 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -0,0 +1,44 @@ +import type { BuiltinType } from '@zenstackhq/language/ast' + +export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; + +export interface IntrospectedTable { + schema: string + name: string + type: 'table' | 'view' + columns: { + name: string + datatype: string + datatype_schema: string + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: Cascade + foreign_key_on_delete: Cascade + pk: boolean + computed: boolean + nullable: boolean + options: string[] + unique: boolean + }[] +} + +export type IntrospectedEnum = { + schema_name: string + enum_type: string + values: string[] +} + +export type IntrospectedSchema = { + tables: IntrospectedTable[] + enums: IntrospectedEnum[] +} + +export interface IntrospectionProvider { + introspect(connectionString: string): Promise + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported' + isArray: boolean + } +} diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts new file mode 100644 index 000000000..61883ef90 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -0,0 +1,191 @@ +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' + +// Note: We dynamically import better-sqlite3 inside the async function to avoid +// requiring it at module load time for environments that don't use SQLite. + +export const sqlite: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase().trim() + + // SQLite has no array types + const isArray = false + + switch (t) { + // integers + case 'int': + case 'integer': + case 'tinyint': + case 'smallint': + case 'mediumint': + return { type: 'Int', isArray } + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'real': + case 'double': + case 'double precision': + case 'float': + return { type: 'Float', isArray } + + // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'character varying': + case 'char': + case 'character': + case 'clob': + case 'uuid': // often stored as TEXT + return { type: 'String', isArray } + + // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) + case 'date': + case 'datetime': + return { type: 'DateTime', isArray } + + // binary + case 'blob': + return { type: 'Bytes', isArray } + + // json (not a native type, but commonly used) + case 'json': + return { type: 'Json', isArray } + + default: { + // Fallbacks based on SQLite type affinity rules + if (t.includes('int')) return { type: 'Int', isArray } + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } + if (t.includes('blob')) return { type: 'Bytes', isArray } + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } + return { type: 'Unsupported' as const, isArray } + } + } + }, + + async introspect(connectionString: string): Promise { + const SQLite = (await import('better-sqlite3')).default + const db = new SQLite(connectionString, { readonly: true }) + + try { + const all = (sql: string): T[] => { + const stmt: any = db.prepare(sql) + return stmt.all() as T[] + } + + // List user tables and views (exclude internal sqlite_*) + const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( + "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + ) + + const tables: IntrospectedTable[] = [] + + for (const t of tablesRaw) { + const tableName = t.name + const schema = 'main' + + // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) + const columnsInfo = all<{ + cid: number + name: string + type: string + notnull: number + dflt_value: string | null + pk: number + hidden?: number + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + + // Unique columns detection via unique indexes with single column + const uniqueIndexRows = all<{ name: string; unique: number }>( + `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` + ).filter((r) => r.unique === 1) + + const uniqueSingleColumn = new Set() + for (const idx of uniqueIndexRows) { + const idxCols = all<{ name: string }>( + `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` + ) + if (idxCols.length === 1 && idxCols[0]?.name) { + uniqueSingleColumn.add(idxCols[0].name) + } + } + + // Foreign keys mapping by column name + const fkRows = all<{ + id: number + seq: number + table: string + from: string + to: string | null + on_update: any + on_delete: any + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + + const fkByColumn = new Map< + string, + { + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + } + >() + + for (const fk of fkRows) { + fkByColumn.set(fk.from, { + foreign_key_schema: 'main', + foreign_key_table: fk.table || null, + foreign_key_column: fk.to || null, + foreign_key_name: null, + foreign_key_on_update: (fk.on_update as any) ?? null, + foreign_key_on_delete: (fk.on_delete as any) ?? null, + }) + } + + const columns: IntrospectedTable['columns'] = [] + for (const c of columnsInfo) { + // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed + const hidden = c.hidden ?? 0 + if (hidden === 1) continue + + const fk = fkByColumn.get(c.name) + + columns.push({ + name: c.name, + datatype: c.type || '', + datatype_schema: schema, + foreign_key_schema: fk?.foreign_key_schema ?? null, + foreign_key_table: fk?.foreign_key_table ?? null, + foreign_key_column: fk?.foreign_key_column ?? null, + foreign_key_name: fk?.foreign_key_name ?? null, + foreign_key_on_update: fk?.foreign_key_on_update ?? null, + foreign_key_on_delete: fk?.foreign_key_on_delete ?? null, + pk: !!c.pk, + computed: hidden === 2, + nullable: c.notnull !== 1, + options: [], + unique: uniqueSingleColumn.has(c.name), + }) + } + + tables.push({ schema, name: tableName, columns, type: t.type }) + } + + const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + + return { tables, enums } + } finally { + db.close() + } + }, +} diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts new file mode 100644 index 000000000..b611fbca6 --- /dev/null +++ b/packages/cli/src/actions/pull/utils.ts @@ -0,0 +1,67 @@ +import type { ZModelServices } from '@zenstackhq/language' +import { + DataField, + EnumField, + isInvocationExpr, + type AbstractDeclaration, + type Attribute, + type Model, +} from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' +import type { + DataSourceProviderType +} from '@zenstackhq/sdk/schema' +import type { Reference } from 'langium' + +export function getAttribute(model: Model, attrName: string) { + const references = model.$document! + .references as Reference[] + return references.find( + (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName + )?.ref as Attribute | undefined +} + +export function getDatasource(model: Model) { + const datasource = model.declarations.find((d) => d.$type === 'DataSource') + if (!datasource) { + throw new Error('No datasource declaration found in the schema.') + } + + const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) + + if (!url && isInvocationExpr(urlField.value)) { + url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + } + + if (!url) { + throw new Error('The url field must be a string literal or an env().') + } + + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value + ) as DataSourceProviderType, + url, + } +} + +export function getDbName( + decl: AbstractDeclaration | DataField | EnumField +): string { + if (!('attributes' in decl)) return decl.name + const nameAttr = decl.attributes.find( + (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' + ) + if (!nameAttr) return decl.name + const attrValue = nameAttr.args[0]?.value + + if (attrValue?.$type !== 'StringLiteral') return decl.name + + return attrValue.value +} + +export function getAttributeRef(name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined +} \ No newline at end of file diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 9642e61d5..569e04202 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -33,7 +33,7 @@ export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], ): Promise< - { success: true; model: Model; warnings: string[] } | { success: false; errors: string[]; warnings: string[] } + { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -143,6 +143,7 @@ export async function loadDocument( return { success: true, model: document.parseResult.value as Model, + services, warnings, }; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3f519e5c3..52c28cb6d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6915,6 +6915,9 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-connection-string@2.9.1: + resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -6948,6 +6951,15 @@ packages: pg-native: optional: true + pg@8.16.3: + resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} + engines: {node: '>= 16.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -15058,6 +15070,8 @@ snapshots: pg-connection-string@2.9.1: {} + pg-connection-string@2.9.1: {} + pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -15096,6 +15110,16 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 + pg@8.16.3: + dependencies: + pg-connection-string: 2.9.1 + pg-pool: 3.10.1(pg@8.16.3) + pg-protocol: 1.10.3 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.2.7 + pgpass@1.0.5: dependencies: split2: 4.2.0 From 5652448ead750e631901cbeb78e41a450f70ea03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:49:11 +0200 Subject: [PATCH 02/83] fix: generate imports and attributes for zmodel-code-generator --- packages/language/src/zmodel-code-generator.ts | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 1e0366ede..21bb5cad1 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -103,10 +103,18 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} }`; } + @gen(ModelImport) + private _generateModelImport(ast: ModelImport) { + return `import '${ast.path}'`; + } + @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 + ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') + : '' + } }`; } From 6b165641007246b6ca08fc00224a231fc87915d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:50:08 +0200 Subject: [PATCH 03/83] fix: add option to not exclude imports in loadDocument --- packages/language/src/document.ts | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 569e04202..2fdce233d 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -32,8 +32,10 @@ import type { ZModelFormatter } from './zmodel-formatter'; export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], + keepImports: boolean = false, ): Promise< - { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } + | { success: true; model: Model; warnings: string[]; services: ZModelServices } + | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -121,14 +123,16 @@ export async function loadDocument( const model = document.parseResult.value as Model; - // merge all declarations into the main document - const imported = mergeImportsDeclarations(langiumDocuments, model); + if (keepImports === false) { + // merge all declarations into the main document + const imported = mergeImportsDeclarations(langiumDocuments, model); - // remove imported documents - imported.forEach((model) => { - langiumDocuments.deleteDocument(model.$document!.uri); - services.shared.workspace.IndexManager.remove(model.$document!.uri); - }); + // remove imported documents + imported.forEach((model) => { + langiumDocuments.deleteDocument(model.$document!.uri); + services.shared.workspace.IndexManager.remove(model.$document!.uri); + }); + } // extra validation after merging imported declarations const additionalErrors = validationAfterImportMerge(model); From b13e15f815c7f7520df74c29ccf059ce29384784 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:53:50 +0200 Subject: [PATCH 04/83] fix: continue work on db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 2 +- packages/cli/src/actions/db.ts | 24 ++-- packages/cli/src/actions/pull/index.ts | 132 +++++++++++---------- packages/cli/src/actions/pull/utils.ts | 33 +++++- packages/cli/src/index.ts | 8 ++ pnpm-lock.yaml | 141 +++++++++++++++++++---- 7 files changed, 241 insertions(+), 100 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 750de5c68..5f7f2985c 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -36,6 +36,7 @@ "./package.json": "./package.json" }, "dependencies": { + "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 78e4cb38b..033cbdd48 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -56,7 +56,7 @@ export async function loadSchemaDocument(schemaFile: string) { } export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); + const loadResult = await loadDocument(schemaFile, [], true); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6c39a3529..e79073e33 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; +import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; @@ -14,6 +15,7 @@ type PushOptions = { type PullOptions = { schema?: string; + out?: string; }; /** @@ -64,7 +66,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - + await import("@dotenvx/dotenvx/config") const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] const datasource = getDatasource(model) @@ -86,16 +88,16 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums(enums, model) + syncEnums({ dbEnums: enums, model, services }) const resolveRelations: Relation[] = [] for (const table of tables) { - const relations = syncTable({ table, model, provider }) + const relations = syncTable({ table, model, provider, services }) resolveRelations.push(...relations) } - for (const rel of resolveRelations) { - syncRelation(model, rel, services); + for (const relation of resolveRelations) { + syncRelation({ model, relation, services }); } for (const d of model.declarations) { @@ -108,6 +110,14 @@ async function runPull(options: PullOptions) { model.declarations = model.declarations.filter((d) => d !== undefined) - const zmpdelSchema = await new ZModelCodeGenerator().generate(model) - fs.writeFileSync(schemaFile, zmpdelSchema) + const generator = await new ZModelCodeGenerator(); + + const zmodelSchema = await generator.generate(model) + + console.log(options.out ? `Writing to ${options.out}` : schemaFile); + + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); + + fs.writeFileSync(outPath, zmodelSchema) } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4651225e4..6a7e2ba23 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { + ArrayExpr, Attribute, AttributeArg, DataField, @@ -9,18 +10,20 @@ import type { Enum, EnumField, Model, + ReferenceExpr, + StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' +import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' -export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { +export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type - ) as Enum | undefined + let schemaEnum = getEnumRef(dbEnum.enum_type, services); if (!schemaEnum) { + console.log(`Adding enum for type ${dbEnum.enum_type}`); schemaEnum = { $type: 'Enum' as const, $container: model, @@ -66,17 +69,29 @@ export function syncTable({ model, provider, table, + services }: { table: IntrospectedTable model: Model provider: IntrospectionProvider + services: ZModelServices }) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + const relations: Relation[] = [] - let modelTable = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === table.name - ) as DataModel | undefined + let modelTable = getModelRef(table.name, services) if (!modelTable) { + console.log(`Adding model for table ${table.name}`); + modelTable = { $type: 'DataModel' as const, $container: model, @@ -96,7 +111,7 @@ export function syncTable({ schema: table.schema, table: table.name, column: col.name, - type: col.unique ? 'one' : 'many', + type: 'one', fk_name: col.foreign_key_name!, nullable: col.nullable, references: { @@ -115,67 +130,54 @@ export function syncTable({ ) if (!existingField) { const builtinType = provider.getBuiltinType(col.datatype) - const unsupported: UnsupportedFieldType = { - get $container() { - return type - }, - $type: 'UnsupportedFieldType' as const, - value: { - get $container() { - return unsupported - }, - $type: 'StringLiteral', - value: col.datatype, - }, - } - - const type: DataFieldType = { - get $container() { - return field - }, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - unsupported: - builtinType.type === 'Unsupported' ? unsupported : undefined, - optional: col.nullable, - reference: col.options.length - ? { + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { $refText: col.datatype, ref: model.declarations.find( (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } - - const field: DataField = { - $type: 'DataField' as const, - type, + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, $container: modelTable!, name: fieldName, get attributes() { if (fieldPrefix !== '') return [] - const attr: DataFieldAttribute = { + return [{ $type: 'DataFieldAttribute' as const, - get $container() { - return field - }, + $container: this, decl: { $refText: '@map', - ref: model.$document?.references.find( - (r) => - //@ts-ignore - r.ref.$type === 'Attribute' && r.ref.name === '@map' - )?.ref as Attribute, + ref: fieldMapAttribute, }, get args() { - const arg: AttributeArg = { + return [{ $type: 'AttributeArg' as const, - get $container() { - return attr - }, + $container: this, name: 'name', $resolvedParam: { name: 'name', @@ -183,17 +185,13 @@ export function syncTable({ get value() { return { $type: 'StringLiteral' as const, - $container: arg, + $container: this, value: col.name, } }, - } - - return [arg] + }] satisfies AttributeArg[] }, - } - - return [attr] + }] satisfies DataFieldAttribute[] }, comments: [], } @@ -205,10 +203,16 @@ export function syncTable({ return relations } -export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { +export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } if (!idAttribute || !uniqueAttribute || !relationAttribute) { throw new Error('Cannot find required attributes in the model.') diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index b611fbca6..defd0f307 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language' import { + AbstractDeclaration, DataField, + DataModel, + Enum, EnumField, isInvocationExpr, - type AbstractDeclaration, type Attribute, - type Model, + type Model } from '@zenstackhq/language/ast' import { getStringLiteral } from '@zenstackhq/language/utils' import type { @@ -28,10 +30,20 @@ export function getDatasource(model: Model) { } const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) if (!url && isInvocationExpr(urlField.value)) { - url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + const envName = getStringLiteral(urlField.value.args[0]?.value) + if (!envName) { + throw new Error('The url field must be a string literal or an env().') + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.` + ) + } + url = process.env[envName] } if (!url) { @@ -62,6 +74,19 @@ export function getDbName( return attrValue.value } + +export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +} + +export function getEnumRef(name: string, services: ZModelServices) { + return getDeclarationRef('Enum', name, services); +} + +export function getModelRef(name: string, services: ZModelServices) { + return getDeclarationRef('DataModel', name, services); +} + export function getAttributeRef(name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined + return getDeclarationRef('Attribute', name, services); } \ No newline at end of file diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 4efc86fd9..2444b10b5 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -143,6 +143,14 @@ function createProgram() { .addOption(new Option('--force-reset', 'force a reset of the database before push')) .action((options) => dbAction('push', options)); + dbCommand + .command('pull') + .description('Introspect your database.') + .addOption(schemaOption) + .addOption(noVersionCheckOption) + .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .action((options) => dbAction('pull', options)); + dbCommand .command('seed') .description('Seed the database') diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 52c28cb6d..8abc943c3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -192,6 +192,9 @@ importers: packages/cli: dependencies: + '@dotenvx/dotenvx': + specifier: ^1.51.0 + version: 1.51.4 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -1558,12 +1561,22 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} + '@dotenvx/dotenvx@1.51.4': + resolution: {integrity: sha512-AoziS8lRQ3ew/lY5J4JSlzYSN9Fo0oiyMBY37L3Bwq4mOQJT5GSrdZYLFPt6pH1LApDI3ZJceNyx+rHRACZSeQ==} + hasBin: true + '@dxup/nuxt@0.2.2': resolution: {integrity: sha512-RNpJjDZs9+JcT9N87AnOuHsNM75DEd58itADNd/s1LIF6BZbTLZV0xxilJZb55lntn4TYvscTaXLCBX2fq9CXg==} '@dxup/unimport@0.1.2': resolution: {integrity: sha512-/B8YJGPzaYq1NbsQmwgP8EZqg40NpTw4ZB3suuI0TplbxKHeK94jeaawLmVhCv+YwUnOpiWEz9U6SeThku/8JQ==} + '@ecies/ciphers@0.2.5': + resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + peerDependencies: + '@noble/ciphers': ^1.0.0 + '@edge-runtime/primitives@6.0.0': resolution: {integrity: sha512-FqoxaBT+prPBHBwE1WXS1ocnu/VLTQyZ6NMUBAdbP7N2hsFTTxMC/jMu2D/8GAlMQfxeuppcPuCUk/HO3fpIvA==} engines: {node: '>=18'} @@ -2383,14 +2396,26 @@ packages: cpu: [x64] os: [win32] + '@noble/ciphers@1.3.0': + resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} + engines: {node: ^14.21.3 || >=16} + '@noble/ciphers@2.0.1': resolution: {integrity: sha512-xHK3XHPUW8DTAobU+G0XT+/w+JLM7/8k1UFdB5xg/zTFPnFCobhftzw8wl4Lw2aq/Rvir5pxfZV5fEazmeCJ2g==} engines: {node: '>= 20.19.0'} + '@noble/curves@1.9.7': + resolution: {integrity: sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.7.1': resolution: {integrity: sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ==} engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@2.0.1': resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} engines: {node: '>= 20.19.0'} @@ -5087,6 +5112,10 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + eciesjs@0.4.16: + resolution: {integrity: sha512-dS5cbA9rA2VR4Ybuvhg6jvdmp46ubLn3E+px8cG/35aEDNclrqoCjg6mt0HYZ/M+OoESS3jSkCrqk1kWAEhWAw==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -5376,6 +5405,10 @@ packages: '@sinclair/typebox': optional: true + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + execa@8.0.1: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} @@ -5593,6 +5626,10 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + get-stream@8.0.1: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} @@ -5756,6 +5793,10 @@ packages: httpxy@0.1.7: resolution: {integrity: sha512-pXNx8gnANKAndgga5ahefxc++tJvNL87CXoRwxn1cJE2ZkWEojF3tNfQIEhZX/vfpt+wzeAzpUI4qkediX1MLQ==} + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + human-signals@5.0.0: resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} engines: {node: '>=16.17.0'} @@ -6677,6 +6718,10 @@ packages: engines: {node: '>= 4'} hasBin: true + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + npm-run-path@5.3.0: resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -6718,6 +6763,10 @@ packages: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + object.assign@4.1.7: resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} engines: {node: '>= 0.4'} @@ -6915,9 +6964,6 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-connection-string@2.9.1: - resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -6951,15 +6997,6 @@ packages: pg-native: optional: true - pg@8.16.3: - resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} - engines: {node: '>= 16.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -7886,6 +7923,10 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} @@ -8725,6 +8766,11 @@ packages: engines: {node: '>= 8'} hasBin: true + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + which@5.0.0: resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==} engines: {node: ^18.17.0 || >=20.5.0} @@ -9352,6 +9398,18 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true + '@dotenvx/dotenvx@1.51.4': + dependencies: + commander: 11.1.0 + dotenv: 17.2.3 + eciesjs: 0.4.16 + execa: 5.1.1 + fdir: 6.5.0(picomatch@4.0.3) + ignore: 5.3.2 + object-treeify: 1.1.33 + picomatch: 4.0.3 + which: 4.0.0 + '@dxup/nuxt@0.2.2(magicast@0.5.1)': dependencies: '@dxup/unimport': 0.1.2 @@ -9364,6 +9422,10 @@ snapshots: '@dxup/unimport@0.1.2': {} + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': + dependencies: + '@noble/ciphers': 1.3.0 + '@edge-runtime/primitives@6.0.0': {} '@edge-runtime/vm@5.0.0': @@ -9935,10 +9997,18 @@ snapshots: '@next/swc-win32-x64-msvc@16.0.10': optional: true + '@noble/ciphers@1.3.0': {} + '@noble/ciphers@2.0.1': {} + '@noble/curves@1.9.7': + dependencies: + '@noble/hashes': 1.8.0 + '@noble/hashes@1.7.1': {} + '@noble/hashes@1.8.0': {} + '@noble/hashes@2.0.1': {} '@nodelib/fs.scandir@2.1.5': @@ -12732,6 +12802,13 @@ snapshots: eastasianwidth@0.2.0: {} + eciesjs@0.4.16: + dependencies: + '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) + '@noble/ciphers': 1.3.0 + '@noble/curves': 1.9.7 + '@noble/hashes': 1.8.0 + ee-first@1.1.1: {} effect@3.18.4: @@ -13218,6 +13295,18 @@ snapshots: optionalDependencies: '@sinclair/typebox': 0.34.41 + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + execa@8.0.1: dependencies: cross-spawn: 7.0.6 @@ -13511,6 +13600,8 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 + get-stream@6.0.1: {} + get-stream@8.0.1: {} get-stream@9.0.1: @@ -13701,6 +13792,8 @@ snapshots: httpxy@0.1.7: {} + human-signals@2.1.0: {} + human-signals@5.0.0: {} human-signals@8.0.1: {} @@ -14665,6 +14758,10 @@ snapshots: shell-quote: 1.8.3 string.prototype.padend: 3.1.6 + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + npm-run-path@5.3.0: dependencies: path-key: 4.0.0 @@ -14815,6 +14912,8 @@ snapshots: object-keys@1.1.1: {} + object-treeify@1.1.33: {} + object.assign@4.1.7: dependencies: call-bind: 1.0.8 @@ -15070,8 +15169,6 @@ snapshots: pg-connection-string@2.9.1: {} - pg-connection-string@2.9.1: {} - pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -15110,16 +15207,6 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 - pg@8.16.3: - dependencies: - pg-connection-string: 2.9.1 - pg-pool: 3.10.1(pg@8.16.3) - pg-protocol: 1.10.3 - pg-types: 2.2.0 - pgpass: 1.0.5 - optionalDependencies: - pg-cloudflare: 1.2.7 - pgpass@1.0.5: dependencies: split2: 4.2.0 @@ -16087,6 +16174,8 @@ snapshots: strip-bom@3.0.0: {} + strip-final-newline@2.0.0: {} + strip-final-newline@3.0.0: {} strip-final-newline@4.0.0: {} @@ -17070,6 +17159,10 @@ snapshots: dependencies: isexe: 2.0.0 + which@4.0.0: + dependencies: + isexe: 3.1.1 + which@5.0.0: dependencies: isexe: 3.1.1 From 585b5cd82022a5afa8cce26a52bd82aaa32791df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:59:00 +0200 Subject: [PATCH 05/83] fix: missing import --- packages/language/src/zmodel-code-generator.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 21bb5cad1..5730fc5b7 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -28,6 +28,7 @@ import { LiteralExpr, MemberAccessExpr, Model, + ModelImport, NullExpr, NumberLiteral, ObjectExpr, From 3c928946023bd4399e7fbd72a20e03b9b09708a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 26 Sep 2025 02:57:44 +0200 Subject: [PATCH 06/83] fix: rewrite model generation generate model from ground up and diff later --- packages/cli/src/actions/db.ts | 39 +- packages/cli/src/actions/pull/index.ts | 423 ++++++++++++++---- .../src/actions/pull/provider/postgresql.ts | 295 ++++++++---- .../cli/src/actions/pull/provider/provider.ts | 9 +- .../cli/src/actions/pull/provider/sqlite.ts | 11 +- 5 files changed, 570 insertions(+), 207 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e79073e33..61e05956d 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,3 +1,4 @@ +import type { Model } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -5,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource } from './pull/utils'; type PushOptions = { schema?: string; @@ -88,31 +89,35 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums({ dbEnums: enums, model, services }) + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + imports: [], + }; + + + syncEnums({ dbEnums: enums, model: newModel, services }) - const resolveRelations: Relation[] = [] - for (const table of tables) { - const relations = syncTable({ table, model, provider, services }) - resolveRelations.push(...relations) - } - for (const relation of resolveRelations) { - syncRelation({ model, relation, services }); + + const resolvedRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services }) + resolvedRelations.push(...relations) } - for (const d of model.declarations) { - if (d.$type !== 'DataModel') continue - const found = tables.find((t) => getDbName(d) === t.name) - if (!found) { - delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services }); } - model.declarations = model.declarations.filter((d) => d !== undefined) + //TODO: diff models and apply changes only const generator = await new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(model) + const zmodelSchema = await generator.generate(newModel) console.log(options.out ? `Writing to ${options.out}` : schemaFile); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 6a7e2ba23..75225c956 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { ArrayExpr, - Attribute, AttributeArg, DataField, DataFieldAttribute, @@ -14,40 +13,28 @@ import type { StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' +import { getAttributeRef, getDbName } from './utils' -export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = getEnumRef(dbEnum.enum_type, services); - - if (!schemaEnum) { - console.log(`Adding enum for type ${dbEnum.enum_type}`); - schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - fields: [], - } - model.declarations.push(schemaEnum) - } - schemaEnum.fields = dbEnum.values.map((v) => { - const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) - if (!existingValue) { - const enumField: EnumField = { + const schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + get fields() { + return dbEnum.values.map((v): EnumField => ({ $type: 'EnumField' as const, $container: schemaEnum, name: v, attributes: [], comments: [], - } - return enumField + })); } - return existingValue - }) + } + model.declarations.push(schemaEnum) } } @@ -62,6 +49,7 @@ export type Relation = { schema: string | null table: string | null column: string | null + type: 'one' | 'many' } } @@ -78,6 +66,7 @@ export function syncTable({ }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) + const modelUniqueAttribute = getAttributeRef('@@unique', services) const relationAttribute = getAttributeRef('@relation', services) const fieldMapAttribute = getAttributeRef('@map', services) const tableMapAttribute = getAttributeRef('@@map', services) @@ -87,25 +76,21 @@ export function syncTable({ } const relations: Relation[] = [] - let modelTable = getModelRef(table.name, services) - - if (!modelTable) { - console.log(`Adding model for table ${table.name}`); - - modelTable = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], - } - model.declarations.push(modelTable) + const modelTable: DataModel = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], } + model.declarations.push(modelTable) modelTable.fields = table.columns.map((col) => { + if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (col.foreign_key_table) { relations.push({ schema: table.schema, @@ -118,6 +103,7 @@ export function syncTable({ schema: col.foreign_key_schema, table: col.foreign_key_table, column: col.foreign_key_column, + type: col.unique ? 'one' : 'many', }, }) } @@ -125,49 +111,100 @@ export function syncTable({ const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' const fieldName = `${fieldPrefix}${col.name}` - const existingField = modelTable!.fields.find( - (f) => getDbName(f) === fieldName - ) - if (!existingField) { - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { + const builtinType = provider.getBuiltinType(col.datatype) + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const getDefaultAttrs = () => { + if (!col.default) return []; + + const defaultValue = col.default && provider.getDefaultValue({ + fieldName: col.name, + defaultValue: col.default, + container: this, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + + if (!defaultValue) return []; + + if (Array.isArray(defaultValue)) { + return defaultValue; + } + + if (defaultValue?.$type === 'DataFieldAttribute') { + return [defaultValue]; + } + + return [{ + $type: 'DataFieldAttribute' as const, $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { + decl: { + $refText: 'default', + ref: getAttributeRef('@default', services) + }, + get args() { + return [{ + $type: 'AttributeArg' as const, $container: this, - $type: 'UnsupportedFieldType' as const, + name: '', + $resolvedParam: { + name: '', + }, get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral + return { ...defaultValue, $container: this } }, - } satisfies UnsupportedFieldType : undefined + }] satisfies AttributeArg[] }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] + } satisfies DataFieldAttribute]; + } - return [{ + return [ + ...(col.pk ? [{ + $type: 'DataFieldAttribute' as const, + $container: this, + args: [], + decl: { + $refText: '@id', + ref: idAttribute, + }, + }] : []) satisfies DataFieldAttribute[], + ...getDefaultAttrs(), + { $type: 'DataFieldAttribute' as const, $container: this, decl: { @@ -178,9 +215,9 @@ export function syncTable({ return [{ $type: 'AttributeArg' as const, $container: this, - name: 'name', + name: '', $resolvedParam: { - name: 'name', + name: '', }, get value() { return { @@ -189,17 +226,58 @@ export function syncTable({ value: col.name, } }, - }] satisfies AttributeArg[] + } + ] satisfies AttributeArg[] }, - }] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field + } + ] satisfies DataFieldAttribute[] + }, + comments: [], } - return existingField + return field }) + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (uniqieColumns.length > 0) { + modelTable.attributes.push({ + $type: 'DataModelAttribute' as const, + $container: modelTable, + decl: { + $refText: '@unique', + ref: modelUniqueAttribute, + }, + get args() { + return uniqieColumns.map((c) => ({ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + $refText: c, + ref: modelTable.fields.find((f) => f.name === c), + }, + args: [], + }] satisfies ReferenceExpr[] + } + } as ArrayExpr + }, + })) satisfies AttributeArg[] + }, + }) + + return relations + } + return relations } @@ -214,10 +292,6 @@ export function syncRelation({ model, relation, services }: { model: Model, rela throw new Error('Cannot find required attributes in the model.') } - if (!idAttribute || !uniqueAttribute || !relationAttribute) { - throw new Error('Cannot find required attributes in the model.') - } - const sourceModel = model.declarations.find( (d) => d.$type === 'DataModel' && getDbName(d) === relation.table ) as DataModel | undefined @@ -239,4 +313,169 @@ export function syncRelation({ model, relation, services }: { model: Model, rela if (!targetField) return //TODO: Finish relation sync + + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' + + sourceModel.fields.push({ + $type: 'DataField' as const, + $container: sourceModel, + name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + comments: [], + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: targetModel, + $refText: targetModel.name, + }, + optional: relation.nullable, + //TODO + array: relation.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [{ + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }, + { + $type: 'AttributeArg' as const, + $container: this, + name: 'fields', + $resolvedParam: { + name: 'fields', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: sourceField, + $refText: sourceField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'references', + $resolvedParam: { + name: 'references', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: targetField, + $refText: targetField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'map', + $resolvedParam: { + name: 'map', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + }, + }] satisfies DataFieldAttribute[] + }, + }) + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' + const oppositeFieldName = relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + + targetModel.fields.push({ + $type: 'DataField' as const, + $container: targetModel, + name: oppositeFieldName, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: sourceModel, + $refText: sourceModel.name, + }, + optional: relation.references.type === 'one' && relation.nullable, + array: relation.references.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [ + { + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + } + } + ] satisfies DataFieldAttribute[] + }, + comments: [], + }) } \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 10a9642a3..be882be6e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,4 +1,6 @@ +import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' import { Client } from 'pg' +import { getAttributeRef, getDbName } from '../utils' import type { IntrospectedEnum, IntrospectedSchema, @@ -126,6 +128,114 @@ export const postgresql: IntrospectionProvider = { tables, } }, + getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { + // Handle common cases + console.log(defaultValue); + + const val = defaultValue.trim() + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + const attrs: DataFieldAttribute[] = []; + + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: '@default', + ref: getAttributeRef('@default', services) + }, + get args(): AttributeArg[] { + return [{ + $type: 'AttributeArg' as const, + $container: this as any, + get value(): Expression { + return { + $type: 'InvocationExpr' as const, + $container: this, + function: { + $refText: 'now', + ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl + }, + args: [], + } satisfies InvocationExpr + } + }] + } + }); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + // for updatedAt, use @updatedAt attribute + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: 'updatedAt', + ref: getAttributeRef('@updatedAt', services) + }, + args: [], + }); + } + + return attrs.length === 1 ? attrs[0] : attrs; + } + + if (val.includes('::')) { + const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] + const enumDef = enums.find((e) => getDbName(e) === enumName) + if (!enumDef) { + throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) + if (!enumField) { + throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) + } + + return { + $type: 'ReferenceExpr' as const, + $container: $container as any, + target: { + $refText: enumField!.name, + ref: enumField, + }, + args: [], + } + } + + if (val === 'true' || val === 'false') { + return { + $type: 'BooleanLiteral' as const, + $container: $container as any, + value: val === 'true', + } + } + + if (/^\d+$/.test(val)) { + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (/^-?\d+(\.\d+)?$/.test(val)) { + // float + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (val.startsWith("'") && val.endsWith("'")) { + // string + return { + $container: $container as any, + $type: 'StringLiteral' as const, + value: val.slice(1, -1).replace(/''/g, "'"), + } + } + return undefined + }, } const enumIntrospectionQuery = ` @@ -141,102 +251,101 @@ ORDER BY schema_name, enum_type;` const tableIntrospectionQuery = ` SELECT -"ns"."nspname" AS "schema", -"cls"."relname" AS "name", -CASE "cls"."relkind" - WHEN 'r' THEN 'table' - WHEN 'v' THEN 'view' - ELSE NULL -END AS "type", -( -SELECT -coalesce(json_agg(agg), '[]') -FROM -( - SELECT - "att"."attname" AS "name", - "typ"."typname" AS "datatype", - "tns"."nspname" AS "datatype_schema", - "fk_ns"."nspname" AS "foreign_key_schema", - "fk_cls"."relname" AS "foreign_key_table", - "fk_att"."attname" AS "foreign_key_column", - "fk_con"."conname" AS "foreign_key_name", - CASE "fk_con"."confupdtype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + "ns"."nspname" AS "schema", + "cls"."relname" AS "name", + CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' ELSE NULL - END AS "foreign_key_on_update", - CASE "fk_con"."confdeltype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + END AS "type", + CASE + WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true) ELSE NULL - END AS "foreign_key_on_delete", - "pk_con"."conkey" IS NOT NULL AS "pk", - ( - EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_constraint" AS "u_con" - WHERE "u_con"."contype" = 'u' - AND "u_con"."conrelid" = "cls"."oid" - AND array_length("u_con"."conkey", 1) = 1 - AND "att"."attnum" = ANY ("u_con"."conkey") - ) - OR EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_index" AS "u_idx" - WHERE "u_idx"."indrelid" = "cls"."oid" - AND "u_idx"."indisunique" = TRUE - AND "u_idx"."indnkeyatts" = 1 - AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) - ) - ) AS "unique", - "att"."attgenerated" != '' AS "computed", - "att"."attnotnull" != TRUE AS "nullable", - coalesce( + END AS "definition", ( - SELECT - json_agg("enm"."enumlabel") AS "o" - FROM - "pg_catalog"."pg_enum" AS "enm" - WHERE - "enm"."enumtypid" = "typ"."oid" - ), - '[]' - ) AS "options" - FROM - "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' - AND "pk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("pk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' - AND "fk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("fk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" - LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" - LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" - AND "fk_att"."attnum" = ANY ("fk_con"."confkey") - WHERE - "att"."attrelid" = "cls"."oid" - AND "att"."attnum" >= 0 - AND "att"."attisdropped" != TRUE - ORDER BY "att"."attnum" -) AS agg -) AS "columns" -FROM -"pg_catalog"."pg_class" AS "cls" + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + pg_get_expr("def"."adbin", "def"."adrelid") AS "default", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT json_agg("enm"."enumlabel") AS "o" + FROM "pg_catalog"."pg_enum" AS "enm" + WHERE "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum" + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" + ) AS agg + ) AS "columns" +FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE -"ns"."nspname" !~ '^pg_' -AND "ns"."nspname" != 'information_schema' -AND "cls"."relkind" IN ('r', 'v') -AND "cls"."relname" !~ '^pg_' -AND "cls"."relname" !~ '_prisma_migrations' + "ns"."nspname" !~ '^pg_' + AND "ns"."nspname" != 'information_schema' + AND "cls"."relkind" IN ('r', 'v') + AND "cls"."relname" !~ '^pg_' + AND "cls"."relname" !~ '_prisma_migrations' ` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index d8bd09288..b6f76b98e 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,11 +1,14 @@ -import type { BuiltinType } from '@zenstackhq/language/ast' +import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' +import type { AstNode } from '../../../../../language/dist/ast.cjs'; +import type { ZModelServices } from '@zenstackhq/language'; -export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; export interface IntrospectedTable { schema: string name: string type: 'table' | 'view' + definition: string | null columns: { name: string datatype: string @@ -21,6 +24,7 @@ export interface IntrospectedTable { nullable: boolean options: string[] unique: boolean + default: string | null }[] } @@ -41,4 +45,5 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported' isArray: boolean } + getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 61883ef90..3feaa5abc 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -82,8 +82,8 @@ export const sqlite: IntrospectionProvider = { } // List user tables and views (exclude internal sqlite_*) - const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( - "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" ) const tables: IntrospectedTable[] = [] @@ -173,12 +173,13 @@ export const sqlite: IntrospectionProvider = { pk: !!c.pk, computed: hidden === 2, nullable: c.notnull !== 1, + default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), }) } - tables.push({ schema, name: tableName, columns, type: t.type }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) } const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums @@ -188,4 +189,8 @@ export const sqlite: IntrospectionProvider = { db.close() } }, + + getDefaultValue(_args) { + throw new Error('Not implemented yet for SQLite') + } } From eb8b68285dd4bcc88aa6348852ccf8d34cef27c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 00:56:27 +0200 Subject: [PATCH 07/83] feat: add ast factory --- packages/cli/src/actions/db.ts | 42 +- packages/cli/src/actions/pull/index.ts | 754 ++++++++---------- .../cli/src/actions/pull/provider/index.ts | 10 +- .../src/actions/pull/provider/postgresql.ts | 428 +++++----- .../cli/src/actions/pull/provider/provider.ts | 96 ++- .../cli/src/actions/pull/provider/sqlite.ts | 173 ++-- packages/cli/src/actions/pull/utils.ts | 133 +-- packages/language/package.json | 10 + packages/language/src/factory/attribute.ts | 275 +++++++ packages/language/src/factory/declaration.ts | 363 +++++++++ packages/language/src/factory/expression.ts | 303 +++++++ packages/language/src/factory/index.ts | 61 ++ packages/language/src/factory/primitives.ts | 61 ++ packages/language/tsup.config.ts | 1 + 14 files changed, 1858 insertions(+), 852 deletions(-) create mode 100644 packages/language/src/factory/attribute.ts create mode 100644 packages/language/src/factory/declaration.ts create mode 100644 packages/language/src/factory/expression.ts create mode 100644 packages/language/src/factory/index.ts create mode 100644 packages/language/src/factory/primitives.ts diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 61e05956d..8dea5cd90 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -7,6 +7,7 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource } from './pull/utils'; +import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -14,9 +15,11 @@ type PushOptions = { forceReset?: boolean; }; -type PullOptions = { +export type PullOptions = { schema?: string; out?: string; + naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap?: boolean; }; /** @@ -67,62 +70,57 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - await import("@dotenvx/dotenvx/config") - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] - const datasource = getDatasource(model) + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); if (!datasource) { - throw new Error('No datasource found in the schema.') + throw new Error('No datasource found in the schema.'); } if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); } const provider = providers[datasource.provider]; if (!provider) { - throw new Error( - `No introspection provider found for: ${datasource.provider}` - ) + throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url) + const { enums, tables } = await provider.introspect(datasource.url); const newModel: Model = { $type: 'Model', $container: undefined, $containerProperty: undefined, $containerIndex: undefined, - declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; + syncEnums({ dbEnums: enums, model: newModel, services, options }); - syncEnums({ dbEnums: enums, model: newModel, services }) - - - - const resolvedRelations: Relation[] = [] + const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services }) - resolvedRelations.push(...relations) + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services }); + syncRelation({ model: newModel, relation, services, options }); } //TODO: diff models and apply changes only - const generator = await new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(newModel) + const zmodelSchema = generator.generate(newModel); console.log(options.out ? `Writing to ${options.out}` : schemaFile); const outPath = options.out ? path.resolve(options.out) : schemaFile; console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema) + fs.writeFileSync(outPath, zmodelSchema); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75225c956..708244a35 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,481 +1,371 @@ -import type { ZModelServices } from '@zenstackhq/language' -import type { - ArrayExpr, - AttributeArg, - DataField, - DataFieldAttribute, - DataFieldType, - DataModel, - Enum, - EnumField, - Model, - ReferenceExpr, - StringLiteral, - UnsupportedFieldType -} from '@zenstackhq/language/ast' -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' - -export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +import type { ZModelServices } from '@zenstackhq/language'; +import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import type { PullOptions } from '../db'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { getAttributeRef, getDbName } from './utils'; + +export function syncEnums({ + dbEnums, + model, + options: options, + services, +}: { + dbEnums: IntrospectedEnum[]; + model: Model; + services: ZModelServices; + options: PullOptions; +}) { for (const dbEnum of dbEnums) { - const schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - get fields() { - return dbEnum.values.map((v): EnumField => ({ - $type: 'EnumField' as const, - $container: schemaEnum, - name: v, - attributes: [], - comments: [], - })); - } - } - model.declarations.push(schemaEnum) + const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); + + dbEnum.values.map((v) => { + const { name, modified } = resolveNameCasing(options, v); + factory.addField((builder) => { + builder.setName(name); + if (modified) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); + }); + model.declarations.push(factory.get({ $container: model })); } } -export type Relation = { - schema: string - table: string - column: string - type: 'one' | 'many' - fk_name: string - nullable: boolean - references: { - schema: string | null - table: string | null - column: string | null - type: 'one' | 'many' +function resolveNameCasing(options: PullOptions, originalName: string) { + let name: string; + + switch (options.naming) { + case 'pascal': + name = toPascalCase(originalName); + break; + case 'camel': + name = toCamelCase(originalName); + break; + case 'snake': + name = toSnakeCase(originalName); + break; + case 'kebab': + name = toKebabCase(originalName); + break; + case 'none': + default: + name = originalName; + break; } + + return { + modified: options.alwaysMap ? true : name !== originalName, + name, + }; } +function toPascalCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase()); +} + +function toCamelCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase()); +} + +function toSnakeCase(str: string): string { + return str + .replace(/[- ]+/g, '_') + .replace(/([a-z0-9])([A-Z])/g, '$1_$2') + .toLowerCase(); +} + +function toKebabCase(str: string): string { + return str + .replace(/[_ ]+/g, '-') + .replace(/([a-z0-9])([A-Z])/g, '$1-$2') + .toLowerCase(); +} + +export type Relation = { + schema: string; + table: string; + column: string; + type: 'one' | 'many'; + fk_name: string; + nullable: boolean; + references: { + schema: string | null; + table: string | null; + column: string | null; + type: 'one' | 'many'; + }; +}; + export function syncTable({ model, provider, table, - services + services, + options, }: { - table: IntrospectedTable - model: Model - provider: IntrospectionProvider - services: ZModelServices + table: IntrospectedTable; + model: Model; + provider: IntrospectionProvider; + services: ZModelServices; + options: PullOptions; }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const modelUniqueAttribute = getAttributeRef('@@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) + const idAttribute = getAttributeRef('@id', services); + const modelIdAttribute = getAttributeRef('@@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const modelUniqueAttribute = getAttributeRef('@@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); + const modelindexAttribute = getAttributeRef('@@index', services); - if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + if ( + !idAttribute || + !uniqueAttribute || + !relationAttribute || + !fieldMapAttribute || + !tableMapAttribute || + !modelIdAttribute || + !modelUniqueAttribute || + !modelindexAttribute + ) { + throw new Error('Cannot find required attributes in the model.'); } - const relations: Relation[] = [] - const modelTable: DataModel = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], + const relations: Relation[] = []; + const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const multiPk = table.columns.filter((c) => c.pk).length > 1; + + const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); + modelFactory.setContainer(model); + if (modified) { + modelFactory.addAttribute((builder) => + builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), + ); } - model.declarations.push(modelTable) - modelTable.fields = table.columns.map((col) => { - if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (multiPk) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + modelFactory.addAttribute((builder) => + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + pkColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + } - if (col.foreign_key_table) { + table.columns.forEach((column) => { + if (column.foreign_key_table) { relations.push({ schema: table.schema, table: table.name, - column: col.name, + column: column.name, type: 'one', - fk_name: col.foreign_key_name!, - nullable: col.nullable, + fk_name: column.foreign_key_name!, + nullable: column.nullable, references: { - schema: col.foreign_key_schema, - table: col.foreign_key_table, - column: col.foreign_key_column, - type: col.unique ? 'one' : 'many', + schema: column.foreign_key_schema, + table: column.foreign_key_table, + column: column.foreign_key_column, + type: column.unique ? 'one' : 'many', }, - }) + }); } - const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' - const fieldName = `${fieldPrefix}${col.name}` - - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { - $container: this, - $type: 'UnsupportedFieldType' as const, - get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral - }, - } satisfies UnsupportedFieldType : undefined - }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] - - const getDefaultAttrs = () => { - if (!col.default) return []; - - const defaultValue = col.default && provider.getDefaultValue({ - fieldName: col.name, - defaultValue: col.default, - container: this, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - - if (!defaultValue) return []; - - if (Array.isArray(defaultValue)) { - return defaultValue; - } + const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; + const { name: _name, modified } = resolveNameCasing(options, column.name); + const name = `${fieldPrefix}${_name}`; - if (defaultValue?.$type === 'DataFieldAttribute') { - return [defaultValue]; - } + const builtinType = provider.getBuiltinType(column.datatype); + + modelFactory.addField((builder) => { + builder.setName(name); + builder.setType((typeBuilder) => { + typeBuilder.setArray(builtinType.isArray); + typeBuilder.setOptional(column.nullable); - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: 'default', - ref: getAttributeRef('@default', services) - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { ...defaultValue, $container: this } - }, - }] satisfies AttributeArg[] - }, - } satisfies DataFieldAttribute]; + if (builtinType.type != 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } - return [ - ...(col.pk ? [{ - $type: 'DataFieldAttribute' as const, - $container: this, - args: [], - decl: { - $refText: '@id', - ref: idAttribute, - }, - }] : []) satisfies DataFieldAttribute[], - ...getDefaultAttrs(), - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@map', - ref: fieldMapAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: col.name, - } - }, - } - ] satisfies AttributeArg[] - }, + if (column.options.length > 0) { + const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as + | Enum + | undefined; + + if (ref) { + typeBuilder.setReference(ref); } - ] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field - }) + } + + return typeBuilder; + }); + + if (column.default) { + const defaultValuesAttrs = column.default + ? provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + : []; + defaultValuesAttrs.forEach(builder.addAttribute); + } + + if (column.pk && !multiPk) { + builder.addAttribute((b) => b.setDecl(idAttribute)); + } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (column.unique) + builder.addAttribute((b) => { + b.setDecl(uniqueAttribute); + if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + + return b; + }); + if (modified) + builder.addAttribute((ab) => + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ); + + return builder; + }); + }); + + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); if (uniqieColumns.length > 0) { - modelTable.attributes.push({ - $type: 'DataModelAttribute' as const, - $container: modelTable, - decl: { - $refText: '@unique', - ref: modelUniqueAttribute, - }, - get args() { - return uniqieColumns.map((c) => ({ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - $refText: c, - ref: modelTable.fields.find((f) => f.name === c), - }, - args: [], - }] satisfies ReferenceExpr[] - } - } as ArrayExpr - }, - })) satisfies AttributeArg[] - }, - }) - - return relations + modelFactory.addAttribute((builder) => + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); } - return relations + model.declarations.push(modelFactory.node); + + table.indexes.forEach((index) => { + modelFactory.addAttribute((builder) => + builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + }); + + return relations; } -export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) +export function syncRelation({ + model, + relation, + services, +}: { + model: Model; + relation: Relation; + services: ZModelServices; + options: PullOptions; +}) { + const idAttribute = getAttributeRef('@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + throw new Error('Cannot find required attributes in the model.'); } - const sourceModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.table - ) as DataModel | undefined - if (!sourceModel) return + const sourceModel = model.declarations.find((d) => d.$type === 'DataModel' && getDbName(d) === relation.table) as + | DataModel + | undefined; + if (!sourceModel) return; - const sourceField = sourceModel.fields.find( - (f) => getDbName(f) === relation.column - ) as DataField | undefined - if (!sourceField) return + const sourceField = sourceModel.fields.find((f) => getDbName(f) === relation.column) as DataField | undefined; + if (!sourceField) return; const targetModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table - ) as DataModel | undefined - if (!targetModel) return + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table, + ) as DataModel | undefined; + if (!targetModel) return; - const targetField = targetModel.fields.find( - (f) => getDbName(f) === relation.references.column - ) - if (!targetField) return + const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); + if (!targetField) return; //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' - - sourceModel.fields.push({ - $type: 'DataField' as const, - $container: sourceModel, - name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - comments: [], - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: targetModel, - $refText: targetModel.name, - }, - optional: relation.nullable, - //TODO - array: relation.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }, - { - $type: 'AttributeArg' as const, - $container: this, - name: 'fields', - $resolvedParam: { - name: 'fields', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: sourceField, - $refText: sourceField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'references', - $resolvedParam: { - name: 'references', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: targetField, - $refText: targetField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'map', - $resolvedParam: { - name: 'map', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - }, - }] satisfies DataFieldAttribute[] - }, - }) - - const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' - const oppositeFieldName = relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - - targetModel.fields.push({ - $type: 'DataField' as const, - $container: targetModel, - name: oppositeFieldName, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: sourceModel, - $refText: sourceModel.name, - }, - optional: relation.references.type === 'one' && relation.nullable, - array: relation.references.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [ - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - } - } - ] satisfies DataFieldAttribute[] - }, - comments: [], - }) -} \ No newline at end of file + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; + + const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + + const sourceFieldFactory = new DataFieldFactory() + .setContainer(sourceModel) + .setName( + `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + ) + .setType((tb) => + tb + .setOptional(relation.nullable) + .setArray(relation.type === 'many') + .setReference(targetModel), + ) + .addAttribute((ab) => + ab + .setDecl(relationAttribute) + .addArg((ab) => ab.StringLiteral.setValue(relationName)) + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + ); + + sourceModel.fields.push(sourceFieldFactory.node); + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; + const oppositeFieldName = + relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + const targetFieldFactory = new DataFieldFactory() + .setContainer(targetModel) + .setName(oppositeFieldName) + .setType((tb) => + tb + .setOptional(relation.references.type === 'one') + .setArray(relation.references.type === 'many') + .setReference(sourceModel), + ) + .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + + targetModel.fields.push(targetFieldFactory.node); +} diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 82ee2ac38..4c9a0fe8d 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,9 @@ -export * from './provider' +export * from './provider'; -import { postgresql } from "./postgresql"; -import { sqlite } from "./sqlite"; +import { postgresql } from './postgresql'; +import { sqlite } from './sqlite'; export const providers = { postgresql, - sqlite -}; \ No newline at end of file + sqlite, +}; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index be882be6e..07dcee913 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,242 +1,176 @@ -import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' -import { Client } from 'pg' -import { getAttributeRef, getDbName } from '../utils' -import type { - IntrospectedEnum, - IntrospectedSchema, - IntrospectedTable, - IntrospectionProvider, -} from './provider' +import { Client } from 'pg'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { - getBuiltinType(type) { - const t = (type || '').toLowerCase() + getBuiltinType(type) { + const t = (type || '').toLowerCase(); - const isArray = t.startsWith('_') + const isArray = t.startsWith('_'); - switch (t.replace(/^_/, '')) { - // integers - case 'int2': - case 'smallint': - case 'int4': - case 'integer': - return { type: 'Int', isArray } - case 'int8': - case 'bigint': - return { type: 'BigInt', isArray } + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray }; + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray }; - // decimals and floats - case 'numeric': - case 'decimal': - return { type: 'Decimal', isArray } - case 'float4': - case 'real': - case 'float8': - case 'double precision': - return { type: 'Float', isArray } + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray }; + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray }; - // boolean - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray } + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray }; - // strings - case 'text': - case 'varchar': - case 'bpchar': - case 'character varying': - case 'character': - return { type: 'String', isArray } + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray }; - // uuid - case 'uuid': - return { type: 'String', isArray } + // uuid + case 'uuid': + return { type: 'String', isArray }; - // dates/times - case 'date': - case 'timestamp': - case 'timestamptz': - return { type: 'DateTime', isArray } + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray }; - // binary - case 'bytea': - return { type: 'Bytes', isArray } + // binary + case 'bytea': + return { type: 'Bytes', isArray }; - // json - case 'json': - case 'jsonb': - return { type: 'Json', isArray } + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray }; - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': - default: - return { type: 'Unsupported' as const, isArray } - } - }, - async introspect(connectionString: string): Promise { - const client = new Client({ connectionString }) - await client.connect() + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray }; + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }); + await client.connect(); - const { rows: tables } = await client.query( - tableIntrospectionQuery - ) - const { rows: enums } = await client.query( - enumIntrospectionQuery - ) + const { rows: tables } = await client.query(tableIntrospectionQuery); + const { rows: enums } = await client.query(enumIntrospectionQuery); - return { - enums, - tables, - } - }, - getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { - // Handle common cases - console.log(defaultValue); + return { + enums, + tables, + }; + }, + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; - const val = defaultValue.trim() + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - const attrs: DataFieldAttribute[] = []; + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: '@default', - ref: getAttributeRef('@default', services) - }, - get args(): AttributeArg[] { - return [{ - $type: 'AttributeArg' as const, - $container: this as any, - get value(): Expression { - return { - $type: 'InvocationExpr' as const, - $container: this, - function: { - $refText: 'now', - ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl - }, - args: [], - } satisfies InvocationExpr + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); } - }] + return factories; } - }); - - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - // for updatedAt, use @updatedAt attribute - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: 'updatedAt', - ref: getAttributeRef('@updatedAt', services) - }, - args: [], - }); - } - - return attrs.length === 1 ? attrs[0] : attrs; - } - if (val.includes('::')) { - const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] - const enumDef = enums.find((e) => getDbName(e) === enumName) - if (!enumDef) { - throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) - if (!enumField) { - throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) - } - - return { - $type: 'ReferenceExpr' as const, - $container: $container as any, - target: { - $refText: enumField!.name, - ref: enumField, - }, - args: [], - } - } + if (val.includes('::')) { + const [enumValue, enumName] = val + .replace(/'|"/g, '') + .split('::') + .map((s) => s.trim()) as [string, string]; + const enumDef = enums.find((e) => getDbName(e) === enumName); + if (!enumDef) { + return []; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); + if (!enumField) { + throw new Error( + `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, + ); + } - if (val === 'true' || val === 'false') { - return { - $type: 'BooleanLiteral' as const, - $container: $container as any, - value: val === 'true', - } - } + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + return factories; + } - if (/^\d+$/.test(val)) { - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(val === 'true'))); + return factories; + } - if (/^-?\d+(\.\d+)?$/.test(val)) { - // float - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (/^\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + return factories; + } - if (val.startsWith("'") && val.endsWith("'")) { - // string - return { - $container: $container as any, - $type: 'StringLiteral' as const, - value: val.slice(1, -1).replace(/''/g, "'"), - } - } - return undefined - }, -} + if (val.startsWith("'") && val.endsWith("'")) { + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")))); + return factories; + } + return []; + }, +}; const enumIntrospectionQuery = ` SELECT @@ -247,7 +181,7 @@ FROM pg_type t JOIN pg_enum e ON t.oid = e.enumtypid JOIN pg_namespace n ON n.oid = t.typnamespace GROUP BY schema_name, enum_type -ORDER BY schema_name, enum_type;` +ORDER BY schema_name, enum_type;`; const tableIntrospectionQuery = ` SELECT @@ -308,6 +242,29 @@ SELECT AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) ) ) AS "unique", + ( + SELECT COALESCE( + ( + SELECT "u_con"."conname" + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + LIMIT 1 + ), + ( + SELECT "u_idx_cls"."relname" + FROM "pg_catalog"."pg_index" AS "u_idx" + JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + LIMIT 1 + ) + ) + ) AS "unique_name", "att"."attgenerated" != '' AS "computed", pg_get_expr("def"."adbin", "def"."adrelid") AS "default", "att"."attnotnull" != TRUE AS "nullable", @@ -339,7 +296,41 @@ SELECT AND "att"."attisdropped" != TRUE ORDER BY "att"."attnum" ) AS agg - ) AS "columns" + ) AS "columns", + ( + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "idx_cls"."relname" AS "name", + "am"."amname" AS "method", + "idx"."indisunique" AS "unique", + "idx"."indisprimary" AS "primary", + "idx"."indisvalid" AS "valid", + "idx"."indisready" AS "ready", + ("idx"."indpred" IS NOT NULL) AS "partial", + pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", + ( + SELECT json_agg( + json_build_object( + 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)), + 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END, + 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END, + 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END + ) + ORDER BY "s"."i" + ) + FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i") + LEFT JOIN "pg_catalog"."pg_attribute" AS "att" + ON "att"."attrelid" = "cls"."oid" + AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"] + ) AS "columns" + FROM "pg_catalog"."pg_index" AS "idx" + JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" + JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" + WHERE "idx"."indrelid" = "cls"."oid" + ORDER BY "idx_cls"."relname" + ) AS agg + ) AS "indexes" FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE @@ -348,4 +339,5 @@ WHERE AND "cls"."relkind" IN ('r', 'v') AND "cls"."relname" !~ '^pg_' AND "cls"."relname" !~ '_prisma_migrations' -` + ORDER BY "ns"."nspname", "cls"."relname" ASC; +`; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index b6f76b98e..c03c39fcd 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,49 +1,71 @@ -import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' -import type { AstNode } from '../../../../../language/dist/ast.cjs'; import type { ZModelServices } from '@zenstackhq/language'; +import type { BuiltinType, Enum } from '@zenstackhq/language/ast'; +import type { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; -export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = 'NO ACTION' | 'RESTRICT' | 'CASCADE' | 'SET NULL' | 'SET DEFAULT' | null; export interface IntrospectedTable { - schema: string - name: string - type: 'table' | 'view' - definition: string | null - columns: { - name: string - datatype: string - datatype_schema: string - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: Cascade - foreign_key_on_delete: Cascade - pk: boolean - computed: boolean - nullable: boolean - options: string[] - unique: boolean - default: string | null - }[] + schema: string; + name: string; + type: 'table' | 'view'; + definition: string | null; + columns: { + name: string; + datatype: string; + datatype_schema: string; + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; + pk: boolean; + computed: boolean; + nullable: boolean; + options: string[]; + unique: boolean; + unique_name: string | null; + default: string | null; + }[]; + indexes: { + name: string; + method: string | null; + unique: boolean; + primary: boolean; + valid: boolean; + ready: boolean; + partial: boolean; + predicate: string | null; + columns: { + name: string; + expression: string | null; + order: 'ASC' | 'DESC' | null; + nulls: string | null; + }[]; + }[]; } export type IntrospectedEnum = { - schema_name: string - enum_type: string - values: string[] -} + schema_name: string; + enum_type: string; + values: string[]; +}; export type IntrospectedSchema = { - tables: IntrospectedTable[] - enums: IntrospectedEnum[] -} + tables: IntrospectedTable[]; + enums: IntrospectedEnum[]; +}; export interface IntrospectionProvider { - introspect(connectionString: string): Promise - getBuiltinType(type: string): { - type: BuiltinType | 'Unsupported' - isArray: boolean - } - getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined + introspect(connectionString: string): Promise; + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported'; + isArray: boolean; + }; + getDefaultValue(args: { + fieldName: string; + defaultValue: string; + services: ZModelServices; + enums: Enum[]; + }): DataFieldAttributeFactory[]; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 3feaa5abc..160a3096e 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,14 +1,14 @@ -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { getBuiltinType(type) { - const t = (type || '').toLowerCase().trim() + const t = (type || '').toLowerCase().trim(); // SQLite has no array types - const isArray = false + const isArray = false; switch (t) { // integers @@ -17,24 +17,24 @@ export const sqlite: IntrospectionProvider = { case 'tinyint': case 'smallint': case 'mediumint': - return { type: 'Int', isArray } + return { type: 'Int', isArray }; case 'bigint': - return { type: 'BigInt', isArray } + return { type: 'BigInt', isArray }; // decimals and floats case 'numeric': case 'decimal': - return { type: 'Decimal', isArray } + return { type: 'Decimal', isArray }; case 'real': case 'double': case 'double precision': case 'float': - return { type: 'Float', isArray } + return { type: 'Float', isArray }; // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) case 'bool': case 'boolean': - return { type: 'Boolean', isArray } + return { type: 'Boolean', isArray }; // strings case 'text': @@ -44,102 +44,128 @@ export const sqlite: IntrospectionProvider = { case 'character': case 'clob': case 'uuid': // often stored as TEXT - return { type: 'String', isArray } + return { type: 'String', isArray }; // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) case 'date': case 'datetime': - return { type: 'DateTime', isArray } + return { type: 'DateTime', isArray }; // binary case 'blob': - return { type: 'Bytes', isArray } + return { type: 'Bytes', isArray }; // json (not a native type, but commonly used) case 'json': - return { type: 'Json', isArray } + return { type: 'Json', isArray }; default: { // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray } - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } - if (t.includes('blob')) return { type: 'Bytes', isArray } - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } - return { type: 'Unsupported' as const, isArray } + if (t.includes('int')) return { type: 'Int', isArray }; + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; + if (t.includes('blob')) return { type: 'Bytes', isArray }; + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; + return { type: 'Unsupported' as const, isArray }; } } }, async introspect(connectionString: string): Promise { - const SQLite = (await import('better-sqlite3')).default - const db = new SQLite(connectionString, { readonly: true }) + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(connectionString, { readonly: true }); try { const all = (sql: string): T[] => { - const stmt: any = db.prepare(sql) - return stmt.all() as T[] - } + const stmt: any = db.prepare(sql); + return stmt.all() as T[]; + }; // List user tables and views (exclude internal sqlite_*) const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( - "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" - ) + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", + ); - const tables: IntrospectedTable[] = [] + const tables: IntrospectedTable[] = []; for (const t of tablesRaw) { - const tableName = t.name - const schema = 'main' + const tableName = t.name; + const schema = 'main'; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ - cid: number - name: string - type: string - notnull: number - dflt_value: string | null - pk: number - hidden?: number - }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + cid: number; + name: string; + type: string; + notnull: number; + dflt_value: string | null; + pk: number; + hidden?: number; + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`); + + // Index list (used for both unique inference and index collection) + const tableNameEsc = tableName.replace(/'/g, "''"); + const idxList = all<{ + seq: number; + name: string; + unique: number; + origin: string; + partial: number; + }>(`PRAGMA index_list('${tableNameEsc}')`); // Unique columns detection via unique indexes with single column - const uniqueIndexRows = all<{ name: string; unique: number }>( - `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` - ).filter((r) => r.unique === 1) - - const uniqueSingleColumn = new Set() + const uniqueSingleColumn = new Set(); + const uniqueIndexRows = idxList.filter((r) => r.unique === 1); for (const idx of uniqueIndexRows) { - const idxCols = all<{ name: string }>( - `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` - ) + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); if (idxCols.length === 1 && idxCols[0]?.name) { - uniqueSingleColumn.add(idxCols[0].name) + uniqueSingleColumn.add(idxCols[0].name); } } + // Indexes details + const indexes: IntrospectedTable['indexes'] = idxList.map((idx) => { + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); + return { + name: idx.name, + method: null, // SQLite does not expose index method + unique: idx.unique === 1, + primary: false, // SQLite does not expose this directly; handled via pk in columns + valid: true, // SQLite does not expose index validity + ready: true, // SQLite does not expose index readiness + partial: idx.partial === 1, + predicate: null, // SQLite does not expose index predicate + columns: idxCols.map((col) => ({ + name: col.name, + expression: null, + order: null, + nulls: null, + })), + }; + }); + // Foreign keys mapping by column name const fkRows = all<{ - id: number - seq: number - table: string - from: string - to: string | null - on_update: any - on_delete: any - }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + id: number; + seq: number; + table: string; + from: string; + to: string | null; + on_update: any; + on_delete: any; + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); const fkByColumn = new Map< string, { - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] - foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update']; + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete']; } - >() + >(); for (const fk of fkRows) { fkByColumn.set(fk.from, { @@ -149,16 +175,16 @@ export const sqlite: IntrospectionProvider = { foreign_key_name: null, foreign_key_on_update: (fk.on_update as any) ?? null, foreign_key_on_delete: (fk.on_delete as any) ?? null, - }) + }); } - const columns: IntrospectedTable['columns'] = [] + const columns: IntrospectedTable['columns'] = []; for (const c of columnsInfo) { // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed - const hidden = c.hidden ?? 0 - if (hidden === 1) continue + const hidden = c.hidden ?? 0; + if (hidden === 1) continue; - const fk = fkByColumn.get(c.name) + const fk = fkByColumn.get(c.name); columns.push({ name: c.name, @@ -176,21 +202,22 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - }) + unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + }); } - tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition, indexes }); } - const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - return { tables, enums } + return { tables, enums }; } finally { - db.close() + db.close(); } }, getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite') - } -} + throw new Error('Not implemented yet for SQLite'); + }, +}; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index defd0f307..234629740 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,92 +1,95 @@ -import type { ZModelServices } from '@zenstackhq/language' +import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - isInvocationExpr, - type Attribute, - type Model -} from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' -import type { - DataSourceProviderType -} from '@zenstackhq/sdk/schema' -import type { Reference } from 'langium' + AbstractDeclaration, + DataField, + DataModel, + Enum, + EnumField, + FunctionDecl, + isInvocationExpr, + type Attribute, + type Model, +} from '@zenstackhq/language/ast'; +import { getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document! - .references as Reference[] - return references.find( - (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName - )?.ref as Attribute | undefined + const references = model.$document!.references as Reference[]; + return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + | Attribute + | undefined; } export function getDatasource(model: Model) { - const datasource = model.declarations.find((d) => d.$type === 'DataSource') - if (!datasource) { - throw new Error('No datasource declaration found in the schema.') - } + const datasource = model.declarations.find((d) => d.$type === 'DataSource'); + if (!datasource) { + throw new Error('No datasource declaration found in the schema.'); + } - const urlField = datasource.fields.find((f) => f.name === 'url')! + const urlField = datasource.fields.find((f) => f.name === 'url')!; - let url = getStringLiteral(urlField.value) + let url = getStringLiteral(urlField.value); - if (!url && isInvocationExpr(urlField.value)) { - const envName = getStringLiteral(urlField.value.args[0]?.value) - if (!envName) { - throw new Error('The url field must be a string literal or an env().') - } - if (!process.env[envName]) { - throw new Error( - `Environment variable ${envName} is not set, please set it to the database connection string.` - ) + if (!url && isInvocationExpr(urlField.value)) { + const envName = getStringLiteral(urlField.value.args[0]?.value); + if (!envName) { + throw new Error('The url field must be a string literal or an env().'); + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.`, + ); + } + url = process.env[envName]; } - url = process.env[envName] - } - if (!url) { - throw new Error('The url field must be a string literal or an env().') - } + if (!url) { + throw new Error('The url field must be a string literal or an env().'); + } - return { - name: datasource.name, - provider: getStringLiteral( - datasource.fields.find((f) => f.name === 'provider')?.value - ) as DataSourceProviderType, - url, - } + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value, + ) as DataSourceProviderType, + url, + }; } -export function getDbName( - decl: AbstractDeclaration | DataField | EnumField -): string { - if (!('attributes' in decl)) return decl.name - const nameAttr = decl.attributes.find( - (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' - ) - if (!nameAttr) return decl.name - const attrValue = nameAttr.args[0]?.value +export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { + if (!('attributes' in decl)) return decl.name; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); + if (!nameAttr) return decl.name; + const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name + if (attrValue?.$type !== 'StringLiteral') return decl.name; - return attrValue.value + return attrValue.value; } - -export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +export function getDeclarationRef( + type: T['$type'], + name: string, + services: ZModelServices, +) { + return services.shared.workspace.IndexManager.allElements(type).find( + (m) => m.node && getDbName(m.node as T) === name, + )?.node as T | undefined; } export function getEnumRef(name: string, services: ZModelServices) { - return getDeclarationRef('Enum', name, services); + return getDeclarationRef('Enum', name, services); } export function getModelRef(name: string, services: ZModelServices) { - return getDeclarationRef('DataModel', name, services); + return getDeclarationRef('DataModel', name, services); } export function getAttributeRef(name: string, services: ZModelServices) { - return getDeclarationRef('Attribute', name, services); -} \ No newline at end of file + return getDeclarationRef('Attribute', name, services); +} + +export function getFunctionRef(name: string, services: ZModelServices) { + return getDeclarationRef('FunctionDecl', name, services); +} diff --git a/packages/language/package.json b/packages/language/package.json index f5bef4ac3..5d809e7a1 100644 --- a/packages/language/package.json +++ b/packages/language/package.json @@ -49,6 +49,16 @@ "default": "./dist/utils.cjs" } }, + "./factory": { + "import": { + "types": "./dist/factory.d.ts", + "default": "./dist/factory.js" + }, + "require": { + "types": "./dist/factory.d.cts", + "default": "./dist/factory.cjs" + } + }, "./package.json": { "import": "./package.json", "require": "./package.json" diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts new file mode 100644 index 000000000..a42c5e50e --- /dev/null +++ b/packages/language/src/factory/attribute.ts @@ -0,0 +1,275 @@ +import { AstFactory } from '.'; +import { + Attribute, + AttributeArg, + AttributeParam, + AttributeParamType, + DataFieldAttribute, + DataModelAttribute, + Expression, + InternalAttribute, + TypeDeclaration, + type Reference, + type RegularID, +} from '../ast'; +import { ExpressionBuilder } from './expression'; + +export class DataFieldAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataFieldAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class DataModelAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataModelAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeArgFactory extends AstFactory { + name?: RegularID = ''; + value?: AstFactory; + + constructor() { + super({ type: AttributeArg }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (b: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class InternalAttributeFactory extends AstFactory { + decl?: Reference; + args: AttributeArgFactory[] = []; + + constructor() { + super({ type: InternalAttribute }); + } + + setDecl(decl: Attribute) { + this.decl = { + $refText: decl.name, + ref: decl, + }; + this.update({ + decl: this.decl, + }); + return this; + } + + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeParamFactory extends AstFactory { + attributes: InternalAttributeFactory[] = []; + comments: string[] = []; + default?: boolean; + name?: RegularID; + type?: AttributeParamTypeFactory; + + constructor() { + super({ + type: AttributeParam, + node: { + comments: [], + attributes: [], + }, + }); + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setDefault(defaultValue: boolean) { + this.default = defaultValue; + this.update({ + default: this.default, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (b: AttributeParamTypeFactory) => AttributeParamTypeFactory) { + this.type = builder(new AttributeParamTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeParamTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: AttributeParamType['type']; + constructor() { + super({ type: AttributeParamType }); + } + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: AttributeParamType['type']) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeFactory extends AstFactory { + name?: string; + comments: string[] = []; + attributes: InternalAttributeFactory[] = []; + params: AttributeParamFactory[] = []; + + constructor() { + super({ type: Attribute, node: { comments: [], attributes: [], params: [] } }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addParam(builder: (b: AttributeParamFactory) => AttributeParamFactory) { + this.params.push(builder(new AttributeParamFactory())); + this.update({ + params: this.params, + }); + return this; + } +} diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts new file mode 100644 index 000000000..1f514982b --- /dev/null +++ b/packages/language/src/factory/declaration.ts @@ -0,0 +1,363 @@ +import { AstFactory } from '.'; +import { AbstractDeclaration, type Reference } from '../ast'; +import { + type BuiltinType, + DataField, + DataFieldType, + DataModel, + Enum, + EnumField, + LiteralExpr, + Model, + ModelImport, + type RegularID, + type RegularIDWithTypeNames, + TypeDeclaration, + type TypeDef, + UnsupportedFieldType, +} from '../generated/ast'; +import { AttributeFactory, DataFieldAttributeFactory, DataModelAttributeFactory } from './attribute'; +import { ExpressionBuilder } from './expression'; +export const DeclarationBuilder = () => + ({ + get Attribute() { + return new AttributeFactory(); + }, + get DataModel() { + return new DataModelFactory(); + }, + get DataSource(): any { + throw new Error('DataSource is not implemented'); + }, + get Enum() { + return new EnumFactory(); + }, + get FunctionDecl(): any { + throw new Error('FunctionDecl is not implemented'); + }, + get GeneratorDecl(): any { + throw new Error('GeneratorDecl is not implemented'); + }, + get Plugin(): any { + throw new Error('Plugin is not implemented'); + }, + get Procedure(): any { + throw new Error('Procedure is not implemented'); + }, + get TypeDef(): any { + throw new Error('TypeDef is not implemented'); + }, + }) satisfies DeclarationBuilderType; +type DeclarationBuilderType = { + [K in T['$type']]: AstFactory>; +}; +type DeclarationBuilderMap = ReturnType; + +export type DeclarationBuilder = Pick< + DeclarationBuilderMap, + Extract +>; + +export class DataModelFactory extends AstFactory { + attributes: DataModelAttributeFactory[] = []; + baseModel?: Reference; + comments: string[] = []; + fields: DataFieldFactory[] = []; + isView?: boolean; + mixins: Reference[] = []; + name?: RegularID; + + constructor() { + super({ + type: DataModel, + node: { + attributes: [], + comments: [], + fields: [], + mixins: [], + }, + }); + } + + addAttribute(builder: (attr: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setBaseModel(model: Reference) { + this.baseModel = model; + this.update({ + baseModel: this.baseModel, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addField(builder: (field: DataFieldFactory) => DataFieldFactory) { + this.fields.push(builder(new DataFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + setIsView(isView: boolean) { + this.isView = isView; + this.update({ + isView: this.isView, + }); + return this; + } + + addMixin(mixin: Reference) { + this.mixins.push(mixin); + this.update({ + mixins: this.mixins, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class DataFieldFactory extends AstFactory { + attributes: DataFieldAttributeFactory[] = []; + comments: string[] = []; + name?: string; + type?: DataFieldTypeFactory; + + constructor() { + super({ type: DataField, node: { attributes: [], comments: [] } }); + } + + addAttribute( + builder: ((attr: DataFieldAttributeFactory) => DataFieldAttributeFactory) | DataFieldAttributeFactory, + ) { + if (builder instanceof DataFieldAttributeFactory) { + builder.setContainer(this.node); + this.attributes.push(builder); + } else { + this.attributes.push(builder(new DataFieldAttributeFactory())); + } + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (type: DataFieldTypeFactory) => DataFieldTypeFactory) { + this.type = builder(new DataFieldTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class DataFieldTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: BuiltinType; + unsupported?: UnsupportedFieldTypeFactory; + + constructor() { + super({ type: DataFieldType }); + } + + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: BuiltinType) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } + + setUnsupported(builder: (a: UnsupportedFieldTypeFactory) => UnsupportedFieldTypeFactory) { + this.unsupported = builder(new UnsupportedFieldTypeFactory()); + this.update({ + unsupported: this.unsupported, + }); + return this; + } +} + +export class UnsupportedFieldTypeFactory extends AstFactory { + value?: AstFactory; + constructor() { + super({ type: UnsupportedFieldType }); + } + setValue(builder: (value: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ModelFactory extends AstFactory { + declarations: AstFactory[] = []; + imports: ModelImportFactory[] = []; + constructor() { + super({ type: Model, node: { declarations: [], imports: [] } }); + } + addImport(builder: (b: ModelImportFactory) => ModelImportFactory) { + this.imports.push(builder(new ModelImportFactory())); + this.update({ + imports: this.imports, + }); + return this; + } + addDeclaration(builder: (b: DeclarationBuilder) => AstFactory) { + this.declarations.push(builder(DeclarationBuilder())); + this.update({ + declarations: this.declarations, + }); + return this; + } +} + +export class ModelImportFactory extends AstFactory { + path?: string | undefined; + + constructor() { + super({ type: ModelImport }); + } + + setPath(path: string) { + this.path = path; + this.update({ + path: this.path, + }); + return this; + } +} + +export class EnumFactory extends AstFactory { + name?: string; + comments: string[] = []; + fields: EnumFieldFactory[] = []; + attributes: DataModelAttributeFactory[] = []; + + constructor() { + super({ type: Enum, node: { comments: [], fields: [], attributes: [] } }); + } + + addField(builder: (b: EnumFieldFactory) => EnumFieldFactory) { + this.fields.push(builder(new EnumFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + addAttribute(builder: (b: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class EnumFieldFactory extends AstFactory { + name?: RegularIDWithTypeNames; + comments: string[] = []; + attributes: DataFieldAttributeFactory[] = []; + + constructor() { + super({ type: EnumField, node: { comments: [], attributes: [] } }); + } + + setName(name: RegularIDWithTypeNames) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + addAttribute(builder: (b: DataFieldAttributeFactory) => DataFieldAttributeFactory) { + this.attributes.push(builder(new DataFieldAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addComment(comment: string) { + this.comments.push(comment); + this.update({ + comments: this.comments, + }); + return this; + } +} diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts new file mode 100644 index 000000000..ea8e984d8 --- /dev/null +++ b/packages/language/src/factory/expression.ts @@ -0,0 +1,303 @@ +import type { Reference } from 'langium'; +import { AstFactory } from '.'; +import { + Argument, + ArrayExpr, + BinaryExpr, + FieldInitializer, + FunctionDecl, + InvocationExpr, + MemberAccessExpr, + MemberAccessTarget, + ObjectExpr, + ReferenceArg, + ReferenceExpr, + ReferenceTarget, + UnaryExpr, + type Expression, + type RegularID, +} from '../ast'; +import { + BooleanLiteralFactory, + NullExprFactory, + NumberLiteralFactory, + StringLiteralFactory, + ThisExprFactory, +} from './primitives'; + +export const ExpressionBuilder = () => + ({ + get ArrayExpr() { + return new ArrayExprFactory(); + }, + get BinaryExpr() { + return new BinaryExprFactory(); + }, + get BooleanLiteral() { + return new BooleanLiteralFactory(); + }, + get InvocationExpr() { + return new InvocationExprFactory(); + }, + get MemberAccessExpr() { + return new MemberAccessExprFactory(); + }, + get NullExpr() { + return new NullExprFactory(); + }, + get NumberLiteral() { + return new NumberLiteralFactory(); + }, + get ObjectExpr() { + return new ObjectExprFactory(); + }, + get ReferenceExpr() { + return new ReferenceExprFactory(); + }, + get StringLiteral() { + return new StringLiteralFactory(); + }, + get ThisExpr() { + return new ThisExprFactory(); + }, + get UnaryExpr() { + return new UnaryExprFactory(); + }, + }) satisfies ExpressionBuilderType; +type ExpressionBuilderType = { + [K in T['$type']]: AstFactory>; +}; + +type ExpressionFactoryMap = ReturnType; + +export type ExpressionBuilder = Pick< + ExpressionFactoryMap, + Extract +>; + +export class UnaryExprFactory extends AstFactory { + operand?: AstFactory; + + constructor() { + super({ type: UnaryExpr, node: { operator: '!' } }); + } + + setOperand(builder: (a: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ReferenceExprFactory extends AstFactory { + target?: Reference; + args: ReferenceArgFactory[] = []; + + constructor() { + super({ type: ReferenceExpr, node: { args: [] } }); + } + + setTarget(target: ReferenceTarget) { + this.target = { + $refText: target.name, + ref: target, + }; + this.update({ + target: this.target, + }); + return this; + } + + addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { + this.args.push(builder(new ReferenceArgFactory())); + this.update({ + args: this.args, + }); + return this; + } +} + +export class ReferenceArgFactory extends AstFactory { + name?: string; + value?: AstFactory; + + constructor() { + super({ type: ReferenceArg }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class MemberAccessExprFactory extends AstFactory { + member?: Reference; + operand?: AstFactory; + + constructor() { + super({ type: MemberAccessExpr }); + } + + setMember(target: Reference) { + this.member = target; + this.update({ + member: this.member, + }); + return this; + } + + setOperand(builder: (b: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ObjectExprFactory extends AstFactory { + fields: FieldInitializerFactory[] = []; + + constructor() { + super({ type: ObjectExpr, node: { fields: [] } }); + } + + addField(builder: (b: FieldInitializerFactory) => FieldInitializerFactory) { + this.fields.push(builder(new FieldInitializerFactory())); + this.update({ + fields: this.fields, + }); + return this; + } +} + +export class FieldInitializerFactory extends AstFactory { + name?: RegularID; + value?: AstFactory; + + constructor() { + super({ type: FieldInitializer }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name!, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class InvocationExprFactory extends AstFactory { + args: ArgumentFactory[] = []; + function?: Reference; + + constructor() { + super({ type: InvocationExpr, node: { args: [] } }); + } + + addArg(builder: (arg: ArgumentFactory) => ArgumentFactory) { + this.args.push(builder(new ArgumentFactory())); + this.update({ + args: this.args, + }); + return this; + } + + setFunction(value: FunctionDecl) { + this.function = { + $refText: value.name, + ref: value, + }; + this.update({ + function: this.function!, + }); + return this; + } +} + +export class ArgumentFactory extends AstFactory { + value?: AstFactory; + + constructor() { + super({ type: Argument }); + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ArrayExprFactory extends AstFactory { + items: AstFactory[] = []; + + constructor() { + super({ type: ArrayExpr, node: { items: [] } }); + } + + addItem(builder: (a: ExpressionBuilder) => AstFactory) { + this.items.push(builder(ExpressionBuilder())); + this.update({ + items: this.items, + }); + return this; + } +} + +export class BinaryExprFactory extends AstFactory { + operator?: BinaryExpr['operator']; + right?: AstFactory; + left?: AstFactory; + + constructor() { + super({ type: BinaryExpr }); + } + + setOperator(operator: BinaryExpr['operator']) { + this.operator = operator; + this.update({ + operator: this.operator!, + }); + return this; + } + setRight(builder: (arg: ExpressionBuilder) => AstFactory) { + this.right = builder(ExpressionBuilder()); + this.update({ + right: this.right!, + }); + return this; + } + setLeft(builder: (arg: ExpressionBuilder) => AstFactory) { + this.left = builder(ExpressionBuilder()); + this.update({ + left: this.left!, + }); + return this; + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts new file mode 100644 index 000000000..e05891ab6 --- /dev/null +++ b/packages/language/src/factory/index.ts @@ -0,0 +1,61 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} + +export * from './primitives'; +export * from './expression'; +export * from './declaration'; +export * from './attribute'; diff --git a/packages/language/src/factory/primitives.ts b/packages/language/src/factory/primitives.ts new file mode 100644 index 000000000..1db7e0515 --- /dev/null +++ b/packages/language/src/factory/primitives.ts @@ -0,0 +1,61 @@ +import { AstFactory } from '.'; +import { BooleanLiteral, NullExpr, NumberLiteral, StringLiteral, ThisExpr } from '../ast'; + +export class ThisExprFactory extends AstFactory { + constructor() { + super({ type: ThisExpr, node: { value: 'this' } }); + } +} + +export class NullExprFactory extends AstFactory { + constructor() { + super({ type: NullExpr, node: { value: 'null' } }); + } +} + +export class NumberLiteralFactory extends AstFactory { + value?: number | string; + + constructor() { + super({ type: NumberLiteral }); + } + + setValue(value: number | string) { + this.value = value; + this.update({ + value: this.value.toString(), + }); + return this; + } +} + +export class StringLiteralFactory extends AstFactory { + value?: string; + + constructor() { + super({ type: StringLiteral }); + } + + setValue(value: string) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} +export class BooleanLiteralFactory extends AstFactory { + value?: boolean; + + constructor() { + super({ type: BooleanLiteral }); + } + + setValue(value: boolean) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} diff --git a/packages/language/tsup.config.ts b/packages/language/tsup.config.ts index 0d5d2b6c4..48282a08c 100644 --- a/packages/language/tsup.config.ts +++ b/packages/language/tsup.config.ts @@ -5,6 +5,7 @@ export default defineConfig({ index: 'src/index.ts', ast: 'src/ast.ts', utils: 'src/utils.ts', + factory: 'src/factory/index.ts', }, outDir: 'dist', splitting: false, From 126bdcf9440c8aae7698ccc6279e9172779505d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 01:33:28 +0200 Subject: [PATCH 08/83] fix: ast factory import order --- packages/language/src/factory/ast-factory.ts | 56 +++++++++++++++++++ packages/language/src/factory/index.ts | 58 +------------------- 2 files changed, 57 insertions(+), 57 deletions(-) create mode 100644 packages/language/src/factory/ast-factory.ts diff --git a/packages/language/src/factory/ast-factory.ts b/packages/language/src/factory/ast-factory.ts new file mode 100644 index 000000000..e01dd7ced --- /dev/null +++ b/packages/language/src/factory/ast-factory.ts @@ -0,0 +1,56 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts index e05891ab6..1ea2a286b 100644 --- a/packages/language/src/factory/index.ts +++ b/packages/language/src/factory/index.ts @@ -1,60 +1,4 @@ -import { type AstNode } from '../ast'; - -export type ContainerProps = { - $container: T; - $containerProperty?: string; - $containerIndex?: number; -}; - -type NodeFactoriesFor = { - [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -} & { - [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -}; - -export abstract class AstFactory { - node = {} as T; - constructor({ type, node }: { type: T['$type']; node?: Partial }) { - (this.node as any).$type = type; - if (node) { - this.update(node); - } - } - setContainer(container: T['$container']) { - (this.node as any).$container = container; - return this; - } - - get(params?: ContainerProps): T { - if (params) this.update(params as any); - return this.node; - } - update(nodeArg: Partial>): T { - const keys = Object.keys(nodeArg as object); - keys.forEach((key) => { - const child = (nodeArg as any)[key]; - if (child instanceof AstFactory) { - (this.node as any)[key] = child.get({ $container: this.node as any }); - } else if (Array.isArray(child)) { - (this.node as any)[key] = child.map((item: any) => - item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, - ); - } else { - (this.node as any)[key] = child; - } - }); - return this.node; - } - - resolveChilds(nodeArg: T | NodeFactoriesFor): T { - return this.update(nodeArg); - } -} - +export * from './ast-factory'; export * from './primitives'; export * from './expression'; export * from './declaration'; From 4914bae34f13ab07a715a1254ba754a3c389fc74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 03:05:23 +0200 Subject: [PATCH 09/83] fix: some runtime bugs --- packages/cli/src/actions/db.ts | 87 ++++++++++--------- packages/cli/src/actions/pull/index.ts | 92 ++++++++++++++------- packages/language/src/factory/attribute.ts | 6 +- packages/language/src/factory/expression.ts | 8 +- 4 files changed, 118 insertions(+), 75 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8dea5cd90..c9a3ef4cf 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -68,59 +68,64 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { - const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - config(); - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; - const datasource = getDatasource(model); - - if (!datasource) { - throw new Error('No datasource found in the schema.'); - } + try { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); + + if (!datasource) { + throw new Error('No datasource found in the schema.'); + } - if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`); - } + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); + } - const provider = providers[datasource.provider]; + const provider = providers[datasource.provider]; - if (!provider) { - throw new Error(`No introspection provider found for: ${datasource.provider}`); - } + if (!provider) { + throw new Error(`No introspection provider found for: ${datasource.provider}`); + } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums, tables } = await provider.introspect(datasource.url); - const newModel: Model = { - $type: 'Model', - $container: undefined, - $containerProperty: undefined, - $containerIndex: undefined, - declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], - imports: [], - }; + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], + imports: [], + }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options }); - const resolvedRelations: Relation[] = []; - for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); - resolvedRelations.push(...relations); - } + const resolvedRelations: Relation[] = []; + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); + } - for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services, options }); + } - //TODO: diff models and apply changes only + //TODO: diff models and apply changes only - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = generator.generate(newModel); + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(options.out ? `Writing to ${options.out}` : schemaFile); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } catch (error) { + console.log(error); + throw error; + } } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 708244a35..de06ca60b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -151,26 +151,13 @@ export function syncTable({ const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); + if (modified) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - if (multiPk) { - const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); - modelFactory.addAttribute((builder) => - builder.setDecl(modelIdAttribute).addArg((argBuilder) => { - const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; - arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); - }); - return arrayExpr; - }), - ); - } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -231,7 +218,7 @@ export function syncTable({ enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], }) : []; - defaultValuesAttrs.forEach(builder.addAttribute); + defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } if (column.pk && !multiPk) { @@ -254,12 +241,12 @@ export function syncTable({ }); }); - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + if (multiPk) { modelFactory.addAttribute((builder) => - builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { + pkColumns.map((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); @@ -268,21 +255,65 @@ export function syncTable({ ); } - model.declarations.push(modelFactory.node); - - table.indexes.forEach((index) => { + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqieColumns.length > 0) { modelFactory.addAttribute((builder) => - builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; }), ); + } + + table.indexes.forEach((index) => { + if (index.predicate) { + //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + if (index.columns.find((c) => c.expression)) { + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + + if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name))) { + //skip primary key + return; + } + + modelFactory.addAttribute((builder) => + builder + .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) + .addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + if (!ref) console.log(c, table.name); + arrayExpr.addItem((itemBuilder) => { + const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); + if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); + + return refExpr; + }); + }); + return arrayExpr; + }) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), + ); }); + model.declarations.push(modelFactory.node); + return relations; } @@ -327,12 +358,15 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { + sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; + } const sourceFieldFactory = new DataFieldFactory() .setContainer(sourceModel) - .setName( - `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - ) + .setName(sourceFieldName) .setType((tb) => tb .setOptional(relation.nullable) @@ -345,7 +379,7 @@ export function syncRelation({ .addArg((ab) => ab.StringLiteral.setValue(relationName)) .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), ); sourceModel.fields.push(sourceFieldFactory.node); diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index a42c5e50e..52aeebc7c 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -18,7 +18,7 @@ export class DataFieldAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataFieldAttribute }); + super({ type: DataFieldAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -47,7 +47,7 @@ export class DataModelAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataModelAttribute }); + super({ type: DataModelAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -102,7 +102,7 @@ export class InternalAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; constructor() { - super({ type: InternalAttribute }); + super({ type: InternalAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts index ea8e984d8..a0ba84001 100644 --- a/packages/language/src/factory/expression.ts +++ b/packages/language/src/factory/expression.ts @@ -110,8 +110,12 @@ export class ReferenceExprFactory extends AstFactory { return this; } - addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { - this.args.push(builder(new ReferenceArgFactory())); + addArg(builder: (a: ExpressionBuilder) => AstFactory, name?: string) { + const arg = new ReferenceArgFactory().setValue(builder); + if (name) { + arg.setName(name); + } + this.args.push(arg); this.update({ args: this.args, }); From 3bd3336e9eff4eee932f4249b048a2b1ef3e7566 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:58:40 +0200 Subject: [PATCH 10/83] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 38 ++++++++------- .../src/actions/pull/provider/postgresql.ts | 48 ++----------------- packages/cli/src/actions/pull/utils.ts | 28 ++++++----- 3 files changed, 41 insertions(+), 73 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index de06ca60b..14c9b477e 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -8,7 +8,7 @@ import { getAttributeRef, getDbName } from './utils'; export function syncEnums({ dbEnums, model, - options: options, + options, services, }: { dbEnums: IntrospectedEnum[]; @@ -23,18 +23,18 @@ export function syncEnums({ if (modified) factory.addAttribute((builder) => builder - .setDecl(getAttributeRef('@@map', services)!) + .setDecl(getAttributeRef('@@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), ); - dbEnum.values.map((v) => { + dbEnum.values.forEach((v) => { const { name, modified } = resolveNameCasing(options, v); factory.addField((builder) => { builder.setName(name); if (modified) builder.addAttribute((builder) => builder - .setDecl(getAttributeRef('@map', services)!) + .setDecl(getAttributeRef('@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), ); @@ -46,7 +46,7 @@ export function syncEnums({ } function resolveNameCasing(options: PullOptions, originalName: string) { - let name: string; + let name = originalName; switch (options.naming) { case 'pascal': @@ -61,10 +61,6 @@ function resolveNameCasing(options: PullOptions, originalName: string) { case 'kebab': name = toKebabCase(originalName); break; - case 'none': - default: - name = originalName; - break; } return { @@ -188,7 +184,7 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type != 'Unsupported') { + if (builtinType.type !== 'Unsupported') { typeBuilder.setType(builtinType.type); } else { typeBuilder.setUnsupported((unsupportedBuilder) => @@ -246,8 +242,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + pkColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -260,8 +259,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + uniqieColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -296,9 +298,11 @@ export function syncTable({ .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) .addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; - if (!ref) console.log(c, table.name); + index.columns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name); + if (!ref) { + throw new Error(`Column ${c.name} not found in model ${table.name}`); + } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 07dcee913..73428d37e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -61,48 +61,6 @@ export const postgresql: IntrospectionProvider = { case 'json': case 'jsonb': return { type: 'Json', isArray }; - - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': default: return { type: 'Unsupported' as const, isArray }; } @@ -123,13 +81,13 @@ export const postgresql: IntrospectionProvider = { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } return factories; } diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 234629740..33a6ace30 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - FunctionDecl, + type AbstractDeclaration, + type DataField, + type DataModel, + type Enum, + type EnumField, + type FunctionDecl, isInvocationExpr, type Attribute, type Model, @@ -15,8 +15,10 @@ import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document!.references as Reference[]; - return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + if (!model.$document) throw new Error('Model is not associated with a document.'); + + const references = model.$document.references as Reference[]; + return references.find((a) => a.ref?.$type === 'Attribute' && a.ref?.name === attrName)?.ref as | Attribute | undefined; } @@ -27,7 +29,9 @@ export function getDatasource(model: Model) { throw new Error('No datasource declaration found in the schema.'); } - const urlField = datasource.fields.find((f) => f.name === 'url')!; + const urlField = datasource.fields.find((f) => f.name === 'url'); + + if (!urlField) throw new Error(`No url field found in the datasource declaration.`); let url = getStringLiteral(urlField.value); @@ -73,9 +77,11 @@ export function getDeclarationRef( name: string, services: ZModelServices, ) { - return services.shared.workspace.IndexManager.allElements(type).find( + const node = services.shared.workspace.IndexManager.allElements(type).find( (m) => m.node && getDbName(m.node as T) === name, - )?.node as T | undefined; + )?.node; + if (!node) throw new Error(`Declaration not found: ${name}`); + return node as T; } export function getEnumRef(name: string, services: ZModelServices) { From b1633eb91ca891c864cbf8b68a453437ad2fa486 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:59:50 +0200 Subject: [PATCH 11/83] fix: update zmodel code generator - include imports in output - fix indentaions - include comments in output --- .../language/src/zmodel-code-generator.ts | 46 ++++++++++++++----- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 5730fc5b7..bdcad0150 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -71,7 +71,7 @@ function gen(name: string) { */ export class ZModelCodeGenerator { private readonly options: ZModelCodeOptions; - + private readonly quote: string; constructor(options?: Partial) { this.options = { binaryExprNumberOfSpaces: options?.binaryExprNumberOfSpaces ?? 1, @@ -79,6 +79,7 @@ export class ZModelCodeGenerator { indent: options?.indent ?? 4, quote: options?.quote ?? 'single', }; + this.quote = this.options.quote === 'double' ? '"' : "'"; } /** @@ -92,9 +93,14 @@ export class ZModelCodeGenerator { return handler.value.call(this, ast); } + private quotedStr(val: string): string { + const trimmedVal = val.replace(new RegExp(`${this.quote}`, 'g'), `\\${this.quote}`); + return `${this.quote}${trimmedVal}${this.quote}`; + } + @gen(Model) private _generateModel(ast: Model) { - return ast.declarations.map((d) => this.generate(d)).join('\n\n'); + return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) @@ -106,16 +112,17 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ModelImport) private _generateModelImport(ast: ModelImport) { - return `import '${ast.path}'`; + return `import ${this.quotedStr(ast.path)}`; } @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ + ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' - } + } }`; } @@ -135,7 +142,9 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ConfigField) private _generateConfigField(ast: ConfigField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(ConfigArrayExpr) @@ -163,15 +172,24 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(PluginField) private _generatePluginField(ast: PluginField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(DataModel) private _generateDataModel(ast: DataModel) { - return `${ast.isView ? 'view' : 'model'} ${ast.name}${ + const comments = `${ast.comments.join('\n')}\n`; + + return `${ast.comments.length > 0 ? comments : ''}${ast.isView ? 'view' : 'model'} ${ast.name}${ ast.mixins.length > 0 ? ' mixes ' + ast.mixins.map((x) => x.$refText).join(', ') : '' } { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ +${ast.fields + .map((x) => { + const comments = x.comments.map((c) => `${this.indent}${c}`).join('\n'); + return (x.comments.length ? `${comments}\n` : '') + this.indent + this.generate(x); + }) + .join('\n')}${ ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' @@ -181,7 +199,11 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(DataField) private _generateDataField(ast: DataField) { - return `${ast.name} ${this.fieldType(ast.type)}${ + const longestFieldName = Math.max(...ast.$container.fields.map((f) => f.name.length)); + const longestType = Math.max(...ast.$container.fields.map((f) => this.fieldType(f.type).length)); + const paddingLeft = longestFieldName - ast.name.length; + const paddingRight = ast.attributes.length > 0 ? longestType - this.fieldType(ast.type).length : 0; + return `${ast.name}${' '.repeat(paddingLeft)} ${this.fieldType(ast.type)}${' '.repeat(paddingRight)}${ ast.attributes.length > 0 ? ' ' + ast.attributes.map((x) => this.generate(x)).join(' ') : '' }`; } @@ -235,7 +257,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(StringLiteral) private _generateLiteralExpr(ast: LiteralExpr) { - return this.options.quote === 'single' ? `'${ast.value}'` : `"${ast.value}"`; + return this.quotedStr(ast.value as string); } @gen(NumberLiteral) @@ -280,7 +302,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(ReferenceArg) private _generateReferenceArg(ast: ReferenceArg) { - return `${ast.name}:${this.generate(ast.value)}`; + return `${ast.name}: ${this.generate(ast.value)}`; } @gen(MemberAccessExpr) From 3e398ba4856de69ecfb0b04e4abd67bc2d645a3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:31:16 +0200 Subject: [PATCH 12/83] feat: add exclude schemas option --- packages/cli/src/actions/db.ts | 7 +++++-- packages/cli/src/actions/pull/index.ts | 15 +++++++++++++++ packages/cli/src/index.ts | 3 ++- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index c9a3ef4cf..6fe4351c4 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -20,12 +20,13 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; + excludeSchemas: string[]; }; /** * CLI action for db related commands */ -export async function run(command: string, options: PushOptions) { +export async function run(command: string, options: any) { switch (command) { case 'push': await runPush(options); @@ -89,7 +90,9 @@ async function runPull(options: PullOptions) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); const newModel: Model = { $type: 'Model', diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 14c9b477e..4b070a47c 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -41,6 +41,15 @@ export function syncEnums({ return builder; }); }); + + if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + model.declarations.push(factory.get({ $container: model })); } } @@ -316,6 +325,12 @@ export function syncTable({ ); }); + if (table.schema && table.schema != '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + model.declarations.push(modelFactory.node); return relations; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 2444b10b5..7905676f9 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -148,7 +148,8 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) + .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .action((options) => dbAction('pull', options)); dbCommand From 877d3a57170aed3ada35bbac948f7cdfee9bb766 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:40 +0200 Subject: [PATCH 13/83] feat: implement initial diff update --- packages/cli/src/actions/db.ts | 93 +++++++++++++++++++++++--- packages/cli/src/actions/pull/index.ts | 30 ++++++--- 2 files changed, 104 insertions(+), 19 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6fe4351c4..682cfc534 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import type { Model } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource } from './pull/utils'; +import { getDatasource, getDbName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -115,18 +115,93 @@ async function runPull(options: PullOptions) { syncRelation({ model: newModel, relation, services, options }); } - //TODO: diff models and apply changes only + const cwd = new URL(`file://${process.cwd()}`).pathname; + const docs = services.shared.workspace.LangiumDocuments.all + .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) + .toArray(); + const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(docsSet); + newModel.declarations + .filter((d) => [DataModel, Enum].includes(d.$type)) + .forEach((_declaration) => { + const declaration = _declaration as DataModel | Enum; + const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + ?.node as DataModel | Enum | undefined; + if (!originalModel) { + model.declarations.push(declaration); + (declaration as any).$container = model; + return; + } + + declaration.fields.forEach((f) => { + const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + + if (!originalField) { + console.log(`Added field ${f.name} to ${originalModel.name}`); + (f as any).$container = originalModel; + originalModel.fields.push(f as any); + return; + } + //TODO: update field + }); + originalModel.fields + .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .forEach((f) => { + const model = f.$container; + const index = model.fields.findIndex((d) => d === f); + model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); + }); + }); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + + if (options.out && !fs.lstatSync(options.out).isFile()) { + throw new Error(`Output path ${options.out} is not a file`); + } - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator({ + //TODO: make configurable + quote: 'double', + }); - const zmodelSchema = generator.generate(newModel); + if (options.out) { + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(`Writing to ${options.out}`); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } else { + docs.forEach(({ uri, parseResult: { value: model } }) => { + const zmodelSchema = generator.generate(model); + console.log(`Writing to ${uri.path}`); + fs.writeFileSync(uri.fsPath, zmodelSchema); + }); + } } catch (error) { console.log(error); throw error; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4b070a47c..2a6a18d88 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -42,12 +42,17 @@ export function syncEnums({ }); }); - if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); + try { + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(factory.get({ $container: model })); @@ -325,10 +330,15 @@ export function syncTable({ ); }); - if (table.schema && table.schema != '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); + try { + if (table.schema && table.schema !== '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); From 2f90d8f45f9615ffa325065b3853abf0ca648332 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:56 +0200 Subject: [PATCH 14/83] fix: update format in zmodel code generator --- packages/language/src/zmodel-code-generator.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index bdcad0150..273a4b00e 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,7 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) From ed94090921a79f0dc316b9405fcca9fbc266a81f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:41:55 +0200 Subject: [PATCH 15/83] fix: typo --- packages/cli/src/actions/pull/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 2a6a18d88..56fc85452 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -268,12 +268,12 @@ export function syncTable({ ); } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const uniqueColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqueColumns.length > 0) { modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.forEach((c) => { + uniqueColumns.forEach((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); if (!ref) { throw new Error(`Field ${c} not found`); From 04235d9cc6d8498b9b7e2ba262c7de604de66da7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 00:59:38 +0200 Subject: [PATCH 16/83] feat: progress on database introspection and syncing --- packages/cli/src/actions/db.ts | 171 ++++++++++++++---- packages/cli/src/actions/pull/index.ts | 143 ++++++++++----- .../src/actions/pull/provider/postgresql.ts | 84 +++++++-- .../cli/src/actions/pull/provider/provider.ts | 2 + .../cli/src/actions/pull/provider/sqlite.ts | 2 + packages/cli/src/actions/pull/utils.ts | 33 +++- packages/language/res/stdlib.zmodel | 6 +- .../language/src/zmodel-code-generator.ts | 5 +- 8 files changed, 345 insertions(+), 101 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 682cfc534..8da323196 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -20,7 +20,7 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; - excludeSchemas: string[]; + excludeSchemas?: string[]; }; /** @@ -91,8 +91,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); + const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -112,7 +112,28 @@ async function runPull(options: PullOptions) { } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); + const simmilarRelations = resolvedRelations.filter((rr) => { + return ( + (rr.schema === relation.schema && + rr.table === relation.table && + rr.references.schema === relation.references.schema && + rr.references.table === relation.references.table) || + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table) + ); + }).length; + const selfRelation = + relation.references.schema === relation.schema && relation.references.table === relation.table; + syncRelation({ + model: newModel, + relation, + services, + options, + selfRelation, + simmilarRelations, + }); } const cwd = new URL(`file://${process.cwd()}`).pathname; @@ -120,64 +141,141 @@ async function runPull(options: PullOptions) { .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); - console.log(docsSet); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { const declaration = _declaration as DataModel | Enum; - const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const declarations = services.shared.workspace.IndexManager.allElements( + declaration.$type, + docsSet, + ).toArray(); const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) ?.node as DataModel | Enum | undefined; if (!originalModel) { model.declarations.push(declaration); (declaration as any).$container = model; + declaration.fields.forEach((f) => { + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node; + if (ref) (f.type.reference.ref as any) = ref; + } + }); return; } declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + const originalField = originalModel.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ); if (!originalField) { - console.log(`Added field ${f.name} to ${originalModel.name}`); + //console.log(`Added field ${f.name} to ${originalModel.name}`); (f as any).$container = originalModel; originalModel.fields.push(f as any); + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (f.type.reference.$refText as any) = ref.name; + (f.type.reference.ref as any) = ref; + } + } return; } - //TODO: update field + + if (originalField.$type === 'DataField') { + const field = f as DataField; + originalField.type = field.type; + if (field.type.reference) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (field.type.reference.$refText as any) = ref.name; + (field.type.reference.ref as any) = ref; + } + } + + (originalField.type.$container as any) = originalField; + } + + f.attributes.forEach((attr) => { + const originalAttribute = originalField.attributes.find( + (d) => d.decl.$refText === attr.decl.$refText, + ); + + if (!originalAttribute) { + //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); + (f as any).$container = originalField; + originalField.attributes.push(attr as any); + return; + } + + originalAttribute.args = attr.args; + attr.args.forEach((a) => { + (a.$container as any) = originalAttribute; + }); + }); + + originalField.attributes + .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .forEach((attr) => { + const field = attr.$container; + const index = field.attributes.findIndex((d) => d === attr); + field.attributes.splice(index, 1); + //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + }); }); originalModel.fields - .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .filter( + (f) => + !declaration.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ), + ) .forEach((f) => { const model = f.$container; const index = model.fields.findIndex((d) => d === f); model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + //console.log(`Delete field ${f.name}`); }); }); - services.shared.workspace.IndexManager.allElements('DataModel', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); - }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); - if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -185,6 +283,7 @@ async function runPull(options: PullOptions) { const generator = new ZModelCodeGenerator({ //TODO: make configurable quote: 'double', + indent: 2, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 56fc85452..c857fe16e 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,15 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; -import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { + DataFieldFactory, + DataModelFactory, + EnumFactory, + ModelFactory, + DataFieldAttributeFactory, +} from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; -import { getAttributeRef, getDbName } from './utils'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, @@ -111,6 +117,8 @@ export type Relation = { column: string; type: 'one' | 'many'; fk_name: string; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; nullable: boolean; references: { schema: string | null; @@ -176,6 +184,8 @@ export function syncTable({ column: column.name, type: 'one', fk_name: column.foreign_key_name!, + foreign_key_on_delete: column.foreign_key_on_delete, + foreign_key_on_update: column.foreign_key_on_update, nullable: column.nullable, references: { schema: column.foreign_key_schema, @@ -187,8 +197,9 @@ export function syncTable({ } const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified } = resolveNameCasing(options, column.name); + const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); const name = `${fieldPrefix}${_name}`; + const modified = fieldPrefix !== '' || _modified; const builtinType = provider.getBuiltinType(column.datatype); @@ -198,21 +209,22 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type !== 'Unsupported') { - typeBuilder.setType(builtinType.type); - } else { - typeBuilder.setUnsupported((unsupportedBuilder) => - unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), - ); - } - if (column.options.length > 0) { const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as | Enum | undefined; - if (ref) { - typeBuilder.setReference(ref); + if (!ref) { + throw new Error(`Enum ${column.datatype} not found`); + } + typeBuilder.setReference(ref); + } else { + if (builtinType.type !== 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } } @@ -220,14 +232,12 @@ export function syncTable({ }); if (column.default) { - const defaultValuesAttrs = column.default - ? provider.getDefaultValue({ - fieldName: column.name, - defaultValue: column.default, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - : []; + const defaultValuesAttrs = provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }); defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } @@ -235,17 +245,31 @@ export function syncTable({ builder.addAttribute((b) => b.setDecl(idAttribute)); } - if (column.unique) + if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); return b; }); - if (modified) + } + if (modified) { builder.addAttribute((ab) => - ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); + } + + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + //TODO: exclude default types like text in postgres + //because Zenstack string = text in postgres so unnecessary to map to default types + if (dbAttr && !['text'].includes(column.datatype)) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (column.length || column.precision) + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); + builder.addAttribute(dbAttrFactory); + } return builder; }); @@ -283,6 +307,11 @@ export function syncTable({ return arrayExpr; }), ); + } else { + modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); + modelFactory.comments.push( + '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', + ); } table.indexes.forEach((index) => { @@ -337,8 +366,8 @@ export function syncTable({ ); } } catch (_error: unknown) { - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); @@ -350,11 +379,16 @@ export function syncRelation({ model, relation, services, + selfRelation, + simmilarRelations, }: { model: Model; relation: Relation; services: ZModelServices; options: PullOptions; + //self included + simmilarRelations: number; + selfRelation: boolean; }) { const idAttribute = getAttributeRef('@id', services); const uniqueAttribute = getAttributeRef('@unique', services); @@ -362,6 +396,8 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); + const includeRelationName = selfRelation || simmilarRelations > 1; + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); } @@ -382,11 +418,9 @@ export function syncRelation({ const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); if (!targetField) return; - //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { @@ -402,14 +436,38 @@ export function syncRelation({ .setArray(relation.type === 'many') .setReference(targetModel), ) - .addAttribute((ab) => - ab - .setDecl(relationAttribute) - .addArg((ab) => ab.StringLiteral.setValue(relationName)) - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), - ); + .addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); + + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } + + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } + + ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); @@ -427,8 +485,11 @@ export function syncRelation({ .setOptional(relation.references.type === 'one') .setArray(relation.references.type === 'many') .setReference(sourceModel), - ) - .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + ); + if (includeRelationName) + targetFieldFactory.addAttribute((ab) => + ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName)), + ); targetModel.fields.push(targetFieldFactory.node); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 73428d37e..30bb2602e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,7 +1,7 @@ +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -49,6 +49,7 @@ export const postgresql: IntrospectionProvider = { // dates/times case 'date': + case 'time': case 'timestamp': case 'timestamptz': return { type: 'DateTime', isArray }; @@ -91,24 +92,63 @@ export const postgresql: IntrospectionProvider = { } return factories; } + if (val.startsWith('nextval(')) { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + if (val.includes('(') && val.includes(')')) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + } if (val.includes('::')) { - const [enumValue, enumName] = val - .replace(/'|"/g, '') + const [value, type] = val + .replace(/'/g, '') .split('::') .map((s) => s.trim()) as [string, string]; - const enumDef = enums.find((e) => getDbName(e) === enumName); - if (!enumDef) { - return []; - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); - if (!enumField) { - throw new Error( - `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, - ); + switch (type) { + case 'character varying': + case 'uuid': + case 'json': + case 'jsonb': + if (value === 'NULL') return []; + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); + break; + case 'real': + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(value))); + break; + default: { + const enumDef = enums.find((e) => getDbName(e, true) === type); + if (!enumDef) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + break; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === value); + if (!enumField) { + throw new Error( + `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, + ); + } + + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + break; + } } - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); return factories; } @@ -161,6 +201,8 @@ SELECT "att"."attname" AS "name", "typ"."typname" AS "datatype", "tns"."nspname" AS "datatype_schema", + "c"."character_maximum_length" AS "length", + COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", "fk_ns"."nspname" AS "foreign_key_schema", "fk_cls"."relname" AS "foreign_key_table", "fk_att"."attname" AS "foreign_key_column", @@ -234,10 +276,18 @@ SELECT ), '[]' ) AS "options" - FROM "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + + FROM "pg_catalog"."pg_attribute" AS "att" + + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + + LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname" + AND "c"."table_name" = "cls"."relname" + AND "c"."column_name" = "att"."attname" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" AND "att"."attnum" = ANY ("pk_con"."conkey") LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c03c39fcd..c04255d12 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -12,6 +12,8 @@ export interface IntrospectedTable { columns: { name: string; datatype: string; + length: number | null; + precision: number | null; datatype_schema: string; foreign_key_schema: string | null; foreign_key_table: string | null; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 160a3096e..87d6e0588 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -189,6 +189,8 @@ export const sqlite: IntrospectionProvider = { columns.push({ name: c.name, datatype: c.type || '', + length: null, + precision: null, datatype_schema: schema, foreign_key_schema: fk?.foreign_key_schema ?? null, foreign_key_table: fk?.foreign_key_table ?? null, diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 33a6ace30..9f05219c0 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,6 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, + StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; @@ -61,13 +62,39 @@ export function getDatasource(model: Model) { }; } -export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { +export function getDbName(decl: AbstractDeclaration | DataField | EnumField, includeSchema: boolean = false): string { if (!('attributes' in decl)) return decl.name; + + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + const schemaAttrValue = schemaAttr?.args[0]?.value; + let schema: string; + if (schemaAttrValue?.$type !== 'StringLiteral') schema = 'public'; + if (!schemaAttr) schema = 'public'; + else schema = (schemaAttr.args[0]?.value as any)?.value as string; + + const formatName = (name: string) => `${schema && includeSchema ? `${schema}.` : ''}${name}`; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); - if (!nameAttr) return decl.name; + if (!nameAttr) return formatName(decl.name); const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name; + if (attrValue?.$type !== 'StringLiteral') return formatName(decl.name); + + return formatName(attrValue.value); +} + +export function getRelationFkName(decl: DataField): string | undefined { + const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); + const schemaAttrValue = relationAttr?.args.find((a) => a.name === 'map')?.value as StringLiteral; + return schemaAttrValue?.value; +} + +export function getDbSchemaName(decl: DataModel | Enum): string { + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + if (!schemaAttr) return 'public'; + const attrValue = schemaAttr.args[0]?.value; + + if (attrValue?.$type !== 'StringLiteral') return 'public'; return attrValue.value; } diff --git a/packages/language/res/stdlib.zmodel b/packages/language/res/stdlib.zmodel index 4f473ed78..cb604c74a 100644 --- a/packages/language/res/stdlib.zmodel +++ b/packages/language/res/stdlib.zmodel @@ -120,7 +120,7 @@ function dbgenerated(expr: String?): Any { /** * Checks if the field value contains the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function contains(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -135,7 +135,7 @@ function contains(field: String, search: String, caseInSensitive: Boolean?): Boo /** * Checks the field value starts with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function startsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -144,7 +144,7 @@ function startsWith(field: String, search: String, caseInSensitive: Boolean?): B /** * Checks if the field value ends with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function endsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 273a4b00e..c6059ebe6 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,10 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations + .sort((d) => (d.$type === 'Enum' ? 1 : 0)) + .map((d) => this.generate(d)) + .join('\n\n')}`; } @gen(DataSource) From 00a1d786dffdf22ef625735df5af9f5f5110fef6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:00:50 +0200 Subject: [PATCH 17/83] fix: make ignore behave it does in prisma with no index models --- packages/language/src/validators/datamodel-validator.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/language/src/validators/datamodel-validator.ts b/packages/language/src/validators/datamodel-validator.ts index 6c5d18ffd..d2fcd155d 100644 --- a/packages/language/src/validators/datamodel-validator.ts +++ b/packages/language/src/validators/datamodel-validator.ts @@ -44,13 +44,15 @@ export default class DataModelValidator implements AstValidator { const uniqueFields = allFields.filter((f) => f.attributes.find((attr) => attr.decl.ref?.name === '@unique')); const modelLevelIds = getModelIdFields(dm); const modelUniqueFields = getModelUniqueFields(dm); + const ignore = hasAttribute(dm, '@@ignore'); if ( !dm.isView && idFields.length === 0 && modelLevelIds.length === 0 && uniqueFields.length === 0 && - modelUniqueFields.length === 0 + modelUniqueFields.length === 0 && + !ignore ) { accept( 'error', From b4708fedcaf6d29ef964f8deb1260870d1ecbb83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:21:31 +0200 Subject: [PATCH 18/83] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index c857fe16e..1939b9fca 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,14 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldFactory, - DataModelFactory, - EnumFactory, - ModelFactory, - DataFieldAttributeFactory, + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ From f946726d07b018ec89cb19358e24da37b3e8e330 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:47:06 +0200 Subject: [PATCH 19/83] feat: make all format options configurable --- packages/cli/src/actions/db.ts | 14 +++++----- packages/cli/src/actions/pull/index.ts | 36 ++++++++++++-------------- packages/cli/src/actions/pull/utils.ts | 2 +- packages/cli/src/index.ts | 17 ++++++++++++ 4 files changed, 43 insertions(+), 26 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8da323196..e48beb23f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -17,10 +17,13 @@ type PushOptions = { export type PullOptions = { schema?: string; - out?: string; - naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; - alwaysMap?: boolean; excludeSchemas?: string[]; + out?: string; + modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap: boolean; + quote: 'single' | 'double'; + indent: number; }; /** @@ -281,9 +284,8 @@ async function runPull(options: PullOptions) { } const generator = new ZModelCodeGenerator({ - //TODO: make configurable - quote: 'double', - indent: 2, + quote: options.quote, + indent: options.indent, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 1939b9fca..4bc7dd14d 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,10 +1,10 @@ import type { ZModelServices } from '@zenstackhq/language'; import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldAttributeFactory, - DataFieldFactory, - DataModelFactory, - EnumFactory + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -22,10 +22,10 @@ export function syncEnums({ options: PullOptions; }) { for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); const factory = new EnumFactory().setName(name); - if (modified) + if (modified || options.alwaysMap) factory.addAttribute((builder) => builder .setDecl(getAttributeRef('@@map', services)) @@ -33,10 +33,10 @@ export function syncEnums({ ); dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options, v); + const { name, modified } = resolveNameCasing(options.fieldCasing, v); factory.addField((builder) => { builder.setName(name); - if (modified) + if (modified || options.alwaysMap) builder.addAttribute((builder) => builder .setDecl(getAttributeRef('@map', services)) @@ -64,10 +64,11 @@ export function syncEnums({ } } -function resolveNameCasing(options: PullOptions, originalName: string) { +function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none', originalName: string) { let name = originalName; + const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; - switch (options.naming) { + switch (casing) { case 'pascal': name = toPascalCase(originalName); break; @@ -83,8 +84,8 @@ function resolveNameCasing(options: PullOptions, originalName: string) { } return { - modified: options.alwaysMap ? true : name !== originalName, - name, + modified: name !== originalName || fieldPrefix !== '', + name: `${fieldPrefix}${name}`, }; } @@ -163,13 +164,13 @@ export function syncTable({ } const relations: Relation[] = []; - const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const { name, modified } = resolveNameCasing(options.modelCasing, table.name); const multiPk = table.columns.filter((c) => c.pk).length > 1; const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); - if (modified) { + if (modified || options.alwaysMap) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); @@ -195,10 +196,7 @@ export function syncTable({ }); } - const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); - const name = `${fieldPrefix}${_name}`; - const modified = fieldPrefix !== '' || _modified; + const { name, modified } = resolveNameCasing(options.fieldCasing, column.name); const builtinType = provider.getBuiltinType(column.datatype); @@ -252,7 +250,7 @@ export function syncTable({ return b; }); } - if (modified) { + if (modified || options.alwaysMap) { builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 9f05219c0..2b3b9b403 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,7 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, - StringLiteral, + type StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 7905676f9..81d7b0d30 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -150,6 +150,23 @@ function createProgram() { .addOption(noVersionCheckOption) .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) + .addOption( + new Option('--model-casing ', 'set the casing of generated models').default( + 'none', + ), + ) + .addOption( + new Option('--field-casing ', 'set the casing of generated fields').default( + 'none', + ), + ) + .addOption( + new Option('--always-map', 'always add @map and @@map attributes to models and fields').default(false), + ) + .addOption( + new Option('--quote ', 'set the quote style of generated schema files').default('double'), + ) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) .action((options) => dbAction('pull', options)); dbCommand From 75ce687a53ba81215fdbde5a09132e16c6f18233 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:55:41 +0200 Subject: [PATCH 20/83] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4bc7dd14d..81a7e56ea 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -55,7 +55,8 @@ export function syncEnums({ .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } @@ -362,7 +363,8 @@ export function syncTable({ b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } From 9063c72746a596f85876bbc5adebc0f32dc48073 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 02:53:37 +0200 Subject: [PATCH 21/83] feat: Handle the database type mapping --- packages/cli/src/actions/pull/index.ts | 23 ++++++++++++++--- .../src/actions/pull/provider/postgresql.ts | 25 ++++++++++++++++++- .../cli/src/actions/pull/provider/provider.ts | 1 + .../cli/src/actions/pull/provider/sqlite.ts | 24 ++++++++++++++++++ 4 files changed, 68 insertions(+), 5 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 81a7e56ea..75f54b449 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { + Attribute, + isEnum, + type DataField, + type DataModel, + type Enum, + type Model, + type BuiltinType, +} from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, DataFieldFactory, @@ -260,9 +268,16 @@ export function syncTable({ const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, )?.node as Attribute | undefined; - //TODO: exclude default types like text in postgres - //because Zenstack string = text in postgres so unnecessary to map to default types - if (dbAttr && !['text'].includes(column.datatype)) { + + const defaultDatabaseType = provider.getDefaultDatabaseType(builtinType.type as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== column.datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (column.length || column.precision))) + ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (column.length || column.precision) dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 30bb2602e..8e1457a3b 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -2,6 +2,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -78,6 +79,28 @@ export const postgresql: IntrospectionProvider = { tables, }; }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'text' }; + case 'Boolean': + return { type: 'boolean' }; + case 'Int': + return { type: 'integer' }; + case 'BigInt': + return { type: 'bigint' }; + case 'Float': + return { type: 'double precision' }; + case 'Decimal': + return { type: 'decimal' }; + case 'DateTime': + return { type: 'timestamp', precisition: 3 }; + case 'Json': + return { type: 'jsonb' }; + case 'Bytes': + return { type: 'bytea' }; + } + }, getDefaultValue({ defaultValue, fieldName, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; @@ -276,7 +299,7 @@ SELECT ), '[]' ) AS "options" - + FROM "pg_catalog"."pg_attribute" AS "att" INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c04255d12..0bdc2d671 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -64,6 +64,7 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported'; isArray: boolean; }; + getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; getDefaultValue(args: { fieldName: string; defaultValue: string; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 87d6e0588..5c3397600 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,3 +1,4 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid @@ -71,6 +72,29 @@ export const sqlite: IntrospectionProvider = { } }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'TEXT' }; + case 'Boolean': + return { type: 'INTEGER' }; + case 'Int': + return { type: 'INTEGER' }; + case 'BigInt': + return { type: 'INTEGER' }; + case 'Float': + return { type: 'REAL' }; + case 'Decimal': + return { type: 'DECIMAL' }; + case 'DateTime': + return { type: 'NUMERIC' }; + case 'Json': + return { type: 'JSONB' }; + case 'Bytes': + return { type: 'BLOB' }; + } + }, + async introspect(connectionString: string): Promise { const SQLite = (await import('better-sqlite3')).default; const db = new SQLite(connectionString, { readonly: true }); From e7290ce07a80f02f46625468dbb03eb8f6038620 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 12 Nov 2025 21:43:46 +0100 Subject: [PATCH 22/83] fix: catch up with feature updates - improve code styling - enable schema support for db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 39 ++++++++++--------- packages/cli/src/actions/db.ts | 24 +++++++----- packages/cli/src/actions/pull/index.ts | 38 +++++++----------- .../cli/src/actions/pull/provider/index.ts | 4 +- packages/cli/src/actions/pull/utils.ts | 18 ++++++++- packages/cli/src/index.ts | 1 - 7 files changed, 71 insertions(+), 54 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 5f7f2985c..a0992a523 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,6 +38,7 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", + "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", "@zenstackhq/sdk": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 033cbdd48..86d55baa6 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -1,5 +1,5 @@ -import { loadDocument } from '@zenstackhq/language'; -import { isDataSource } from '@zenstackhq/language/ast'; +import { type ZModelServices, loadDocument } from '@zenstackhq/language'; +import { type Model, isDataSource } from '@zenstackhq/language/ast'; import { PrismaSchemaGenerator } from '@zenstackhq/sdk'; import colors from 'colors'; import fs from 'node:fs'; @@ -41,8 +41,22 @@ export function getSchemaFile(file?: string) { } } -export async function loadSchemaDocument(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); +export async function loadSchemaDocument( + schemaFile: string, + opts?: { keepImports?: boolean; returnServices?: false }, +): Promise; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices: true; keepImports?: boolean }, +): Promise<{ model: Model; services: ZModelServices }>; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices?: boolean; keepImports?: boolean } = {}, +) { + const returnServices = opts.returnServices || false; + const keepImports = opts.keepImports || false; + + const loadResult = await loadDocument(schemaFile, [], keepImports); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); @@ -52,21 +66,10 @@ export async function loadSchemaDocument(schemaFile: string) { loadResult.warnings.forEach((warn) => { console.warn(colors.yellow(warn)); }); - return loadResult.model; -} -export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile, [], true); - if (!loadResult.success) { - loadResult.errors.forEach((err) => { - console.error(colors.red(err)); - }); - throw new CliError('Schema contains errors. See above for details.'); - } - loadResult.warnings.forEach((warn) => { - console.warn(colors.yellow(warn)); - }); - return { services: loadResult.services, model: loadResult.model }; + if (returnServices) return { model: loadResult.model, services: loadResult.services }; + + return loadResult.model; } export function handleSubProcessError(err: unknown) { diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e48beb23f..77f758ea0 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ -import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; -import { ZModelCodeGenerator } from '@zenstackhq/sdk'; +import { config } from '@dotenvx/dotenvx'; +import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; @@ -7,7 +8,6 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; -import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -17,7 +17,6 @@ type PushOptions = { export type PullOptions = { schema?: string; - excludeSchemas?: string[]; out?: string; modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; @@ -74,7 +73,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); config(); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); @@ -94,8 +93,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); + const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); + const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -106,11 +105,18 @@ async function runPull(options: PullOptions) { imports: [], }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); + const relations = syncTable({ + table, + model: newModel, + provider, + services, + options, + defaultSchema: datasource.defaultSchema, + }); resolvedRelations.push(...relations); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75f54b449..19d86a22b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,7 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - Attribute, isEnum, + type Attribute, type DataField, type DataModel, type Enum, @@ -23,11 +23,13 @@ export function syncEnums({ model, options, services, + defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); @@ -55,18 +57,12 @@ export function syncEnums({ }); }); - try { - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); } model.declarations.push(factory.get({ $container: model })); @@ -143,12 +139,14 @@ export function syncTable({ table, services, options, + defaultSchema, }: { table: IntrospectedTable; model: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { const idAttribute = getAttributeRef('@id', services); const modelIdAttribute = getAttributeRef('@@id', services); @@ -372,16 +370,10 @@ export function syncTable({ ); }); - try { - if (table.schema && table.schema !== '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); } model.declarations.push(modelFactory.node); diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 4c9a0fe8d..e712ac983 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,11 @@ +import type { DataSourceProviderType } from '@zenstackhq/schema'; export * from './provider'; import { postgresql } from './postgresql'; +import type { IntrospectionProvider } from './provider'; import { sqlite } from './sqlite'; -export const providers = { +export const providers: Record = { postgresql, sqlite, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 2b3b9b403..05aa31acc 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -11,8 +11,8 @@ import { type Model, type StringLiteral, } from '@zenstackhq/language/ast'; -import { getStringLiteral } from '@zenstackhq/language/utils'; -import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { @@ -53,12 +53,26 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); + const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; + + const schemasField = datasource.fields.find((f) => f.name === 'schemas'); + const schemas = + (schemasField && + getLiteralArray(schemasField.value) + ?.map(getStringLiteral) + .filter((s) => s !== undefined)) || + []; + return { name: datasource.name, provider: getStringLiteral( datasource.fields.find((f) => f.name === 'provider')?.value, ) as DataSourceProviderType, url, + defaultSchema, + schemas, + allSchemas: [defaultSchema, ...schemas], }; } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 81d7b0d30..e454e2a2a 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -148,7 +148,6 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( new Option('--model-casing ', 'set the casing of generated models').default( From 83c277613a753bd823c6e171b347da968647ccfd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:11:08 +0100 Subject: [PATCH 23/83] fix: add sqlite e2e test and fix some bugs --- packages/cli/package.json | 1 + packages/cli/src/actions/db.ts | 178 +++++++++--------- packages/cli/src/actions/pull/index.ts | 159 +++++++++------- .../src/actions/pull/provider/postgresql.ts | 10 +- .../cli/src/actions/pull/provider/provider.ts | 3 + .../cli/src/actions/pull/provider/sqlite.ts | 137 +++++++------- packages/cli/src/actions/pull/utils.ts | 5 + packages/cli/src/index.ts | 2 +- packages/cli/src/test.ts | 9 + packages/cli/test/db/pull.test.ts | 96 ++++++++++ packages/cli/test/db/push.test.ts | 18 ++ pnpm-lock.yaml | 123 ++++++------ 12 files changed, 454 insertions(+), 287 deletions(-) create mode 100644 packages/cli/src/test.ts create mode 100644 packages/cli/test/db/pull.test.ts create mode 100644 packages/cli/test/db/push.test.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index a0992a523..7a109f5e6 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,6 +38,7 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", + "@zenstackhq/language": "workspace:*", "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 77f758ea0..6ea1bc08f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -74,10 +74,11 @@ async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); - config(); + config({ + ignore: ['MISSING_ENV_FILE'], + }); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); - if (!datasource) { throw new Error('No datasource found in the schema.'); } @@ -91,10 +92,14 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - + console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); - const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); + const enums = provider.isSupportedFeature('Schema') + ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + : allEnums; + const tables = provider.isSupportedFeature('Schema') + ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + : allTables; const newModel: Model = { $type: 'Model', @@ -104,8 +109,15 @@ async function runPull(options: PullOptions) { declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; - - syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); + syncEnums({ + dbEnums: enums, + model: newModel, + services, + options, + defaultSchema: datasource.defaultSchema, + oldModel: model, + provider, + }); const resolvedRelations: Relation[] = []; for (const table of tables) { @@ -116,21 +128,23 @@ async function runPull(options: PullOptions) { services, options, defaultSchema: datasource.defaultSchema, + oldModel: model, }); resolvedRelations.push(...relations); } - + // sync relation fields for (const relation of resolvedRelations) { const simmilarRelations = resolvedRelations.filter((rr) => { return ( - (rr.schema === relation.schema && + rr !== relation && + ((rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table) || - (rr.schema === relation.references.schema && - rr.column === relation.references.column && - rr.references.schema === relation.schema && - rr.references.table === relation.table) + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table)) ); }).length; const selfRelation = @@ -151,6 +165,7 @@ async function runPull(options: PullOptions) { .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( (declaration) => @@ -162,32 +177,35 @@ async function runPull(options: PullOptions) { model.declarations.splice(index, 1); console.log(`Delete model ${decl.name}`); }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); + // Delete Enums + if (provider.isSupportedFeature('NativeEnum')) + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + // newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { - const declaration = _declaration as DataModel | Enum; + const newDataModel = _declaration as DataModel | Enum; const declarations = services.shared.workspace.IndexManager.allElements( - declaration.$type, + newDataModel.$type, docsSet, ).toArray(); - const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + const originalDataModel = declarations.find((d) => getDbName(d.node as any) === getDbName(newDataModel)) ?.node as DataModel | Enum | undefined; - if (!originalModel) { - model.declarations.push(declaration); - (declaration as any).$container = model; - declaration.fields.forEach((f) => { + if (!originalDataModel) { + model.declarations.push(newDataModel); + (newDataModel as any).$container = model; + newDataModel.fields.forEach((f) => { if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -198,19 +216,33 @@ async function runPull(options: PullOptions) { return; } - declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find( - (d) => + newDataModel.fields.forEach((f) => { + const originalFields = originalDataModel.fields.filter((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ); + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }); + if (originalFields.length > 1) { + console.warn( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ); + return; + } + const originalField = originalFields.at(0); + Object.freeze(originalField); if (!originalField) { - //console.log(`Added field ${f.name} to ${originalModel.name}`); - (f as any).$container = originalModel; - originalModel.fields.push(f as any); + console.log(`Added field ${f.name} to ${originalDataModel.name}`); + (f as any).$container = originalDataModel; + originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -222,66 +254,42 @@ async function runPull(options: PullOptions) { } return; } - - if (originalField.$type === 'DataField') { - const field = f as DataField; - originalField.type = field.type; - if (field.type.reference) { - const ref = declarations.find( - (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), - )?.node as DataModel | undefined; - if (ref) { - (field.type.reference.$refText as any) = ref.name; - (field.type.reference.ref as any) = ref; - } - } - - (originalField.type.$container as any) = originalField; - } - - f.attributes.forEach((attr) => { - const originalAttribute = originalField.attributes.find( - (d) => d.decl.$refText === attr.decl.$refText, - ); - - if (!originalAttribute) { - //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); - (f as any).$container = originalField; - originalField.attributes.push(attr as any); - return; - } - - originalAttribute.args = attr.args; - attr.args.forEach((a) => { - (a.$container as any) = originalAttribute; - }); - }); - + if (f.name === 'profiles') console.log(f.attributes.length); originalField.attributes - .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .filter( + (attr) => + !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && + !['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText), + ) .forEach((attr) => { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); }); }); - originalModel.fields + originalDataModel.fields .filter( (f) => - !declaration.fields.find( - (d) => + !newDataModel.fields.find((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ), + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }), ) .forEach((f) => { - const model = f.$container; - const index = model.fields.findIndex((d) => d === f); - model.fields.splice(index, 1); - //console.log(`Delete field ${f.name}`); + const _model = f.$container; + const index = _model.fields.findIndex((d) => d === f); + _model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); }); }); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 19d86a22b..60c192045 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -15,57 +15,73 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, model, + oldModel, + provider, options, services, defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; + oldModel: Model; + provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; defaultSchema: string; }) { - for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); - const factory = new EnumFactory().setName(name); - if (modified || options.alwaysMap) - factory.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), - ); + if (provider.isSupportedFeature('NativeEnum')) { + for (const dbEnum of dbEnums) { + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified || options.alwaysMap) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); - dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options.fieldCasing, v); - factory.addField((builder) => { - builder.setName(name); - if (modified || options.alwaysMap) - builder.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), - ); - - return builder; + dbEnum.values.forEach((v) => { + const { name, modified } = resolveNameCasing(options.fieldCasing, v); + factory.addField((builder) => { + builder.setName(name); + if (modified || options.alwaysMap) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); }); - }); - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } - model.declarations.push(factory.get({ $container: model })); + model.declarations.push(factory.get({ $container: model })); + } + } else { + oldModel.declarations + .filter((d) => isEnum(d)) + .forEach((d) => { + const factory = new EnumFactory().setName(d.name); + d.fields.forEach((v) => { + factory.addField((builder) => builder.setName(v.name)); + }); + model.declarations.push(factory.get({ $container: model })); + }); } } @@ -140,9 +156,11 @@ export function syncTable({ services, options, defaultSchema, + oldModel, }: { table: IntrospectedTable; model: Model; + oldModel: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; @@ -182,7 +200,6 @@ export function syncTable({ builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -369,7 +386,6 @@ export function syncTable({ .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), ); }); - if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { modelFactory.addAttribute((b) => b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), @@ -377,7 +393,6 @@ export function syncTable({ } model.declarations.push(modelFactory.node); - return relations; } @@ -427,7 +442,10 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; - let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + let sourceFieldName = + simmilarRelations > 0 + ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : targetModel.name; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; @@ -441,47 +459,47 @@ export function syncRelation({ .setOptional(relation.nullable) .setArray(relation.type === 'many') .setReference(targetModel), - ) - .addAttribute((ab) => { - ab.setDecl(relationAttribute); - if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); - ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( - (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), - 'references', - ); + ); + sourceFieldFactory.addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); - if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); - } + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } - if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); - } + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } - ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + if (relation.fk_name) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); - return ab; - }); + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const oppositeFieldName = - relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + simmilarRelations > 0 + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : sourceModel.name; const targetFieldFactory = new DataFieldFactory() .setContainer(targetModel) @@ -498,4 +516,9 @@ export function syncRelation({ ); targetModel.fields.push(targetFieldFactory.node); + + targetModel.fields.sort((a, b) => { + if (a.type.reference && b.type.reference) return 0; + return a.name.localeCompare(b.name); + }); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 8e1457a3b..958b0930f 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,10 +1,18 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + return true; + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase(); diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 0bdc2d671..252a8a300 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -58,6 +58,8 @@ export type IntrospectedSchema = { enums: IntrospectedEnum[]; }; +export type DatabaseFeature = 'Schema' | 'NativeEnum'; + export interface IntrospectionProvider { introspect(connectionString: string): Promise; getBuiltinType(type: string): { @@ -71,4 +73,5 @@ export interface IntrospectionProvider { services: ZModelServices; enums: Enum[]; }): DataFieldAttributeFactory[]; + isSupportedFeature(feature: DatabaseFeature): boolean; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5c3397600..e940b3595 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,98 +1,52 @@ import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + case 'NativeEnum': + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase().trim(); - // SQLite has no array types const isArray = false; - switch (t) { - // integers - case 'int': case 'integer': - case 'tinyint': - case 'smallint': - case 'mediumint': return { type: 'Int', isArray }; + case 'text': + return { type: 'String', isArray }; case 'bigint': return { type: 'BigInt', isArray }; - - // decimals and floats + case 'blob': + return { type: 'Bytes', isArray }; + case 'real': + return { type: 'Float', isArray }; case 'numeric': case 'decimal': return { type: 'Decimal', isArray }; - case 'real': - case 'double': - case 'double precision': - case 'float': - return { type: 'Float', isArray }; - - // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray }; - - // strings - case 'text': - case 'varchar': - case 'character varying': - case 'char': - case 'character': - case 'clob': - case 'uuid': // often stored as TEXT - return { type: 'String', isArray }; - - // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) - case 'date': case 'datetime': return { type: 'DateTime', isArray }; - - // binary - case 'blob': - return { type: 'Bytes', isArray }; - - // json (not a native type, but commonly used) - case 'json': + case 'jsonb': return { type: 'Json', isArray }; - + case 'boolean': + return { type: 'Boolean', isArray }; default: { - // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray }; - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; - if (t.includes('blob')) return { type: 'Bytes', isArray }; - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; return { type: 'Unsupported' as const, isArray }; } } }, - getDefaultDatabaseType(type: BuiltinType) { - switch (type) { - case 'String': - return { type: 'TEXT' }; - case 'Boolean': - return { type: 'INTEGER' }; - case 'Int': - return { type: 'INTEGER' }; - case 'BigInt': - return { type: 'INTEGER' }; - case 'Float': - return { type: 'REAL' }; - case 'Decimal': - return { type: 'DECIMAL' }; - case 'DateTime': - return { type: 'NUMERIC' }; - case 'Json': - return { type: 'JSONB' }; - case 'Bytes': - return { type: 'BLOB' }; - } + getDefaultDatabaseType() { + return undefined; }, async introspect(connectionString: string): Promise { @@ -114,7 +68,7 @@ export const sqlite: IntrospectionProvider = { for (const t of tablesRaw) { const tableName = t.name; - const schema = 'main'; + const schema = ''; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ @@ -193,7 +147,7 @@ export const sqlite: IntrospectionProvider = { for (const fk of fkRows) { fkByColumn.set(fk.from, { - foreign_key_schema: 'main', + foreign_key_schema: '', foreign_key_table: fk.table || null, foreign_key_column: fk.to || null, foreign_key_name: null, @@ -228,7 +182,7 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + unique_name: null, }); } @@ -243,7 +197,46 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite'); + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; + + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + return factories; + } + + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); + return factories; + } + + if (!Number.isNaN(parseFloat(val)) || !Number.isNaN(parseInt(val))) { + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(val))); + return factories; + } + + if (val.startsWith("'") && val.endsWith("'")) { + const strippedName = val.slice(1, -1); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); + if (enumField) factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + } else { + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(strippedName))); + } + return factories; + } + + //TODO: add more default value factories if exists + throw new Error( + `This default value type currently is not supported. Plesase open an issue on github. Values: "${defaultValue}"`, + ); }, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 05aa31acc..e017bb9b4 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -53,6 +53,11 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + if (url.startsWith('file:')) { + url = new URL(url, `file:${model.$document!.uri.path}`).pathname; + if (process.platform === 'win32' && url[0] === '/') url = url.slice(1); + } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index e454e2a2a..a7bb403e0 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -165,7 +165,7 @@ function createProgram() { .addOption( new Option('--quote ', 'set the quote style of generated schema files').default('double'), ) - .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(4)) .action((options) => dbAction('pull', options)); dbCommand diff --git a/packages/cli/src/test.ts b/packages/cli/src/test.ts new file mode 100644 index 000000000..b83716dfa --- /dev/null +++ b/packages/cli/src/test.ts @@ -0,0 +1,9 @@ +import { URI, Utils } from 'vscode-uri'; + +const base = URI.parse('file:/d/zenstack/'); +const relative = URI.parse('file:./c/asdasd.db'); +console.log(base); +console.log(relative); +console.log(Utils.resolvePath(base, relative.path)); +// console.log(URI.parse('file:/c/asdasd.db')); +// console.log(URI.parse('file:./c/asdasd.db')); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts new file mode 100644 index 000000000..45cdde442 --- /dev/null +++ b/packages/cli/test/db/pull.test.ts @@ -0,0 +1,96 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); + +describe('DB pull', () => { + it('sqlite schema', () => { + const workDir = createProject(` +model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + +model Post { + id Int @id @default(autoincrement()) + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + title String + content String? + published Boolean @default(false) + tags PostTag[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + slug String + score Float @default(0.0) + metadata Json? + + @@unique([authorId, slug]) + @@index([authorId, published]) + @@map("posts") +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + assignedAt DateTime @default(now()) + note String? @default("initial") + + @@id([postId, tagId]) + @@map("post_tags") +} + +enum Role { + USER + ADMIN + MODERATOR +}`); + runCli('format', workDir); + runCli('db push', workDir); + + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); +}); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts new file mode 100644 index 000000000..78164aaea --- /dev/null +++ b/packages/cli/test/db/push.test.ts @@ -0,0 +1,18 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const model = ` +model User { + id String @id @default(cuid()) +} +`; + +describe('CLI db commands test', () => { + it('should generate a database with db push', () => { + const workDir = createProject(model); + runCli('db push', workDir); + expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8abc943c3..8c635fc5e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -109,7 +109,7 @@ importers: version: 20.19.24 '@vitest/coverage-v8': specifier: ^4.0.16 - version: 4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + version: 4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) eslint: specifier: ~9.29.0 version: 9.29.0(jiti@2.6.1) @@ -142,7 +142,7 @@ importers: version: 8.34.1(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^4.0.14 - version: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + version: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) yaml: specifier: ^2.8.0 version: 2.8.0 @@ -264,6 +264,9 @@ importers: ts-pattern: specifier: 'catalog:' version: 5.7.1 + vscode-uri: + specifier: ^3.1.0 + version: 3.1.0 devDependencies: '@types/better-sqlite3': specifier: 'catalog:' @@ -1171,13 +1174,13 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config bun-types: specifier: ^1.3.3 - version: 1.3.3 + version: 1.3.4 tests/runtimes/edge-runtime: dependencies: @@ -1205,7 +1208,7 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config @@ -3663,8 +3666,8 @@ packages: '@types/pg@8.11.11': resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} - '@types/pg@8.15.6': - resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} + '@types/pg@8.16.0': + resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -3968,11 +3971,11 @@ packages: '@vitest/browser': optional: true - '@vitest/expect@4.0.14': - resolution: {integrity: sha512-RHk63V3zvRiYOWAV0rGEBRO820ce17hz7cI2kDmEdfQsBjT2luEKB5tCOc91u1oSQoUOZkSv3ZyzkdkSLD7lKw==} + '@vitest/expect@4.0.15': + resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} - '@vitest/mocker@4.0.14': - resolution: {integrity: sha512-RzS5NujlCzeRPF1MK7MXLiEFpkIXeMdQ+rN3Kk3tDI9j0mtbr7Nmuq67tpkOJQpgyClbOltCXMjLZicJHsH5Cg==} + '@vitest/mocker@4.0.15': + resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -3982,23 +3985,23 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.14': - resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==} + '@vitest/pretty-format@4.0.15': + resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} '@vitest/pretty-format@4.0.16': resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} - '@vitest/runner@4.0.14': - resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==} + '@vitest/runner@4.0.15': + resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} - '@vitest/snapshot@4.0.14': - resolution: {integrity: sha512-aQVBfT1PMzDSA16Y3Fp45a0q8nKexx6N5Amw3MX55BeTeZpoC08fGqEZqVmPcqN0ueZsuUQ9rriPMhZ3Mu19Ag==} + '@vitest/snapshot@4.0.15': + resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} - '@vitest/spy@4.0.14': - resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==} + '@vitest/spy@4.0.15': + resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} - '@vitest/utils@4.0.14': - resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} + '@vitest/utils@4.0.15': + resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} '@vitest/utils@4.0.16': resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} @@ -4468,8 +4471,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bun-types@1.3.3: - resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} + bun-types@1.3.4: + resolution: {integrity: sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ==} bundle-name@4.1.0: resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} @@ -5421,8 +5424,8 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expect-type@1.2.2: - resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} engines: {node: '>=12.0.0'} express@5.1.0: @@ -8599,18 +8602,18 @@ packages: vite: optional: true - vitest@4.0.14: - resolution: {integrity: sha512-d9B2J9Cm9dN9+6nxMnnNJKJCtcyKfnHj15N6YNJfaFHRLua/d3sRKU9RuKmO9mB0XdFtUizlxfz/VPbd3OxGhw==} + vitest@4.0.15: + resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.14 - '@vitest/browser-preview': 4.0.14 - '@vitest/browser-webdriverio': 4.0.14 - '@vitest/ui': 4.0.14 + '@vitest/browser-playwright': 4.0.15 + '@vitest/browser-preview': 4.0.15 + '@vitest/browser-webdriverio': 4.0.15 + '@vitest/ui': 4.0.15 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -11263,7 +11266,7 @@ snapshots: pg-protocol: 1.10.3 pg-types: 4.0.2 - '@types/pg@8.15.6': + '@types/pg@8.16.0': dependencies: '@types/node': 20.19.24 pg-protocol: 1.10.3 @@ -11464,7 +11467,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.3 + semver: 7.7.2 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11620,7 +11623,7 @@ snapshots: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) vue: 3.5.26(typescript@5.9.3) - '@vitest/coverage-v8@4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.16 @@ -11633,22 +11636,22 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + vitest: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) transitivePeerDependencies: - supports-color - '@vitest/expect@4.0.14': + '@vitest/expect@4.0.15': dependencies: '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.2 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/mocker@4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: - '@vitest/spy': 4.0.14 + '@vitest/spy': 4.0.15 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: @@ -11671,22 +11674,22 @@ snapshots: dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.14': + '@vitest/runner@4.0.15': dependencies: - '@vitest/utils': 4.0.14 + '@vitest/utils': 4.0.15 pathe: 2.0.3 - '@vitest/snapshot@4.0.14': + '@vitest/snapshot@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.14': {} + '@vitest/spy@4.0.15': {} - '@vitest/utils@4.0.14': + '@vitest/utils@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 tinyrainbow: 3.0.3 '@vitest/utils@4.0.16': @@ -12248,7 +12251,7 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bun-types@1.3.3: + bun-types@1.3.4: dependencies: '@types/node': 20.19.24 @@ -13336,7 +13339,7 @@ snapshots: expand-template@2.0.3: {} - expect-type@1.2.2: {} + expect-type@1.3.0: {} express@5.1.0: dependencies: @@ -14200,7 +14203,7 @@ snapshots: kysely-bun-sqlite@0.4.0(kysely@0.28.8): dependencies: - bun-types: 1.3.3 + bun-types: 1.3.4 kysely: 0.28.8 kysely@0.28.8: {} @@ -16919,24 +16922,24 @@ snapshots: optionalDependencies: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) - vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): + vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): dependencies: - '@vitest/expect': 4.0.14 - '@vitest/mocker': 4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) - '@vitest/pretty-format': 4.0.14 - '@vitest/runner': 4.0.14 - '@vitest/snapshot': 4.0.14 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/expect': 4.0.15 + '@vitest/mocker': 4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.15 + '@vitest/runner': 4.0.15 + '@vitest/snapshot': 4.0.15 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 es-module-lexer: 1.7.0 - expect-type: 1.2.2 + expect-type: 1.3.0 magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 tinybench: 2.9.0 - tinyexec: 0.3.2 + tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) From a42019940e2e0bdea4f2d0b68876df0a452f841d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:25:23 +0100 Subject: [PATCH 24/83] fix: lint fix --- packages/cli/src/actions/db.ts | 10 ++++++++-- packages/cli/src/actions/pull/index.ts | 5 ++--- packages/cli/src/actions/pull/provider/sqlite.ts | 3 +-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6ea1bc08f..16fe6f196 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,10 +1,16 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; -import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { + generateTempPrismaSchema, + getSchemaFile, + handleSubProcessError, + requireDataSourceUrl, + loadSchemaDocument, +} from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 60c192045..4a661afb9 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -2,11 +2,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { isEnum, type Attribute, + type BuiltinType, type DataField, type DataModel, type Enum, type Model, - type BuiltinType, } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, @@ -15,7 +15,7 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ @@ -156,7 +156,6 @@ export function syncTable({ services, options, defaultSchema, - oldModel, }: { table: IntrospectedTable; model: Model; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index e940b3595..5825becde 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,7 +1,6 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. From 73d67b68e0a2692116c4446c33430a2a18ee6abb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:19:35 +0100 Subject: [PATCH 25/83] fix: formatting for e2e test schemas --- packages/cli/test/utils.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 2fafb2074..011eb57ec 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -5,15 +5,14 @@ import path from 'node:path'; const ZMODEL_PRELUDE = `datasource db { provider = "sqlite" - url = "file:./dev.db" -} -`; + url = "file:./dev.db" +}`; export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From d1aac0ff51a2b430f1a72a95de9a9c905129a61d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:40:41 +0100 Subject: [PATCH 26/83] test: run db pull e2e test also for postgres --- packages/cli/test/db/pull.test.ts | 6 ++-- packages/cli/test/utils.ts | 48 +++++++++++++++++++++++++++---- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 45cdde442..35378c2ea 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -6,9 +6,9 @@ import { createProject, runCli } from '../utils'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); describe('DB pull', () => { - it('sqlite schema', () => { - const workDir = createProject(` -model User { + it('simple schema', () => { + const workDir = createProject( +`model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 011eb57ec..5a93100eb 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -1,14 +1,52 @@ -import { createTestProject } from '@zenstackhq/testtools'; +import { createTestProject, getTestDbProvider } from '@zenstackhq/testtools'; +import { createHash } from 'node:crypto'; import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; +import { expect } from 'vitest'; -const ZMODEL_PRELUDE = `datasource db { - provider = "sqlite" - url = "file:./dev.db" -}`; +const TEST_PG_CONFIG = { + host: process.env['TEST_PG_HOST'] ?? 'localhost', + port: process.env['TEST_PG_PORT'] ? parseInt(process.env['TEST_PG_PORT']) : 5432, + user: process.env['TEST_PG_USER'] ?? 'postgres', + password: process.env['TEST_PG_PASSWORD'] ?? 'postgres', +}; + +function getTestDbName(provider: string) { + if (provider === 'sqlite') { + return './test.db'; + } + const testName = expect.getState().currentTestName ?? 'unnamed'; + const testPath = expect.getState().testPath ?? ''; + // digest test name + const digest = createHash('md5') + .update(testName + testPath) + .digest('hex'); + // compute a database name based on test name + return ( + 'test_' + + testName + .toLowerCase() + .replace(/[^a-z0-9_]/g, '_') + .replace(/_+/g, '_') + .substring(0, 30) + + digest.slice(0, 6) + ); +} export function createProject(zmodel: string, addPrelude = true) { + const provider = getTestDbProvider() ?? 'sqlite'; + const dbName = getTestDbName(provider); + const dbUrl = + provider === 'sqlite' + ? `file:${dbName}` + : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + + const ZMODEL_PRELUDE = `datasource db { + provider = "${provider}" + url = "${dbUrl}" +} +`; const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); From 426feb02ff1b04b09a4a193d29f8e91609cd55dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:30:26 +0100 Subject: [PATCH 27/83] fix: postgres instorspection schema filter --- packages/cli/src/actions/db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 16fe6f196..879353130 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -101,10 +101,10 @@ async function runPull(options: PullOptions) { console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); const enums = provider.isSupportedFeature('Schema') - ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; const tables = provider.isSupportedFeature('Schema') - ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; const newModel: Model = { From 8651f68cc18f872f8ac057c85b7d1e377b1c33de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:32:49 +0100 Subject: [PATCH 28/83] test: update cli tests --- packages/cli/test/check.test.ts | 7 +------ packages/cli/test/db.test.ts | 10 +++++----- packages/cli/test/db/push.test.ts | 4 ++-- packages/cli/test/migrate.test.ts | 24 ++++++++++++------------ packages/cli/test/utils.ts | 9 ++++++--- 5 files changed, 26 insertions(+), 28 deletions(-) diff --git a/packages/cli/test/check.test.ts b/packages/cli/test/check.test.ts index 287bb6b80..60f80903e 100644 --- a/packages/cli/test/check.test.ts +++ b/packages/cli/test/check.test.ts @@ -83,17 +83,12 @@ describe('CLI validate command test', () => { it('should validate schema with syntax errors', () => { const modelWithSyntaxError = ` -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} - model User { id String @id @default(cuid()) email String @unique // Missing closing brace - syntax error `; - const workDir = createProject(modelWithSyntaxError, false); + const workDir = createProject(modelWithSyntaxError); // Should throw an error due to syntax error expect(() => runCli('check', workDir)).toThrow(); diff --git a/packages/cli/test/db.test.ts b/packages/cli/test/db.test.ts index 636dcff8f..b17f92e5e 100644 --- a/packages/cli/test/db.test.ts +++ b/packages/cli/test/db.test.ts @@ -11,13 +11,13 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should seed the database with db seed with seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -36,7 +36,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should seed the database after migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -55,7 +55,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should skip seeding the database without seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db seed', workDir); }); }); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts index 78164aaea..9c688df4d 100644 --- a/packages/cli/test/db/push.test.ts +++ b/packages/cli/test/db/push.test.ts @@ -11,8 +11,8 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); }); diff --git a/packages/cli/test/migrate.test.ts b/packages/cli/test/migrate.test.ts index 56a0fec83..86abc3576 100644 --- a/packages/cli/test/migrate.test.ts +++ b/packages/cli/test/migrate.test.ts @@ -11,36 +11,36 @@ model User { describe('CLI migrate commands test', () => { it('should generate a database with migrate dev', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/migrations'))).toBe(true); }); it('should reset the database with migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); runCli('migrate reset --force', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should reset the database with migrate deploy', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - fs.rmSync(path.join(workDir, 'zenstack/dev.db')); + fs.rmSync(path.join(workDir, 'zenstack/test.db')); runCli('migrate deploy', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('supports migrate status', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); runCli('migrate status', workDir); }); it('supports migrate resolve', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); // find the migration record "timestamp_init" @@ -51,7 +51,7 @@ describe('CLI migrate commands test', () => { fs.writeFileSync(path.join(workDir, 'zenstack/migrations', migration!, 'migration.sql'), 'invalid content'); // redeploy the migration, which will fail - fs.rmSync(path.join(workDir, 'zenstack/dev.db'), { force: true }); + fs.rmSync(path.join(workDir, 'zenstack/test.db'), { force: true }); try { runCli('migrate deploy', workDir); } catch { @@ -66,7 +66,7 @@ describe('CLI migrate commands test', () => { }); it('should throw error when neither applied nor rolled-back is provided', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); expect(() => runCli('migrate resolve', workDir)).toThrow(); }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 5a93100eb..7017b622f 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -34,8 +34,11 @@ function getTestDbName(provider: string) { ); } -export function createProject(zmodel: string, addPrelude = true) { - const provider = getTestDbProvider() ?? 'sqlite'; +export function createProject( + zmodel: string, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, +) { + const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); const dbUrl = provider === 'sqlite' @@ -50,7 +53,7 @@ export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From 7e2f17dae3701401c870da03a50d99c7c41b3218 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 15 Dec 2025 22:22:22 +0100 Subject: [PATCH 29/83] feat(cli): Improves database introspection and syncing Enhances the `db pull` command with a spinner for better UX. Adds color-coded logging to highlight important steps. Provides more detailed output on schema changes, including deleted models, enums, added fields, and deleted attributes. Also includes minor improvements to enum mapping and constraint handling. --- packages/cli/src/actions/db.ts | 76 +++++++++++++++++++++----- packages/cli/src/actions/pull/index.ts | 17 +++--- 2 files changed, 73 insertions(+), 20 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 879353130..36e354b95 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,15 +1,17 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import colors from 'colors'; import fs from 'node:fs'; import path from 'node:path'; +import ora from 'ora'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, - requireDataSourceUrl, loadSchemaDocument, + requireDataSourceUrl, } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; @@ -77,6 +79,7 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { + const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); @@ -98,8 +101,11 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - console.log('Starging introspect the database...'); + + spinner.start('Introspecting database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + spinner.succeed('Database introspected'); + const enums = provider.isSupportedFeature('Schema') ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; @@ -107,6 +113,8 @@ async function runPull(options: PullOptions) { ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; + console.log(colors.blue('Syncing schema...')); + const newModel: Model = { $type: 'Model', $container: undefined, @@ -165,12 +173,22 @@ async function runPull(options: PullOptions) { }); } + console.log(colors.blue('Schema synced')); + const cwd = new URL(`file://${process.cwd()}`).pathname; const docs = services.shared.workspace.LangiumDocuments.all .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(colors.bold('\nApplying changes to ZModel...')); + + const deletedModels: string[] = []; + const deletedEnums: string[] = []; + const addedFields: string[] = []; + const deletedAttributes: string[] = []; + const deletedFields: string[] = []; + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( @@ -181,7 +199,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); + deletedModels.push(colors.red(`- Model ${decl.name} deleted`)); }); // Delete Enums @@ -195,7 +213,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); + deletedEnums.push(colors.red(`- Enum ${decl.name} deleted`)); }); // newModel.declarations @@ -239,14 +257,16 @@ async function runPull(options: PullOptions) { if (originalFields.length > 1) { console.warn( - `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + colors.yellow( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ), ); return; } const originalField = originalFields.at(0); Object.freeze(originalField); if (!originalField) { - console.log(`Added field ${f.name} to ${originalDataModel.name}`); + addedFields.push(colors.green(`+ Field ${f.name} added to ${originalDataModel.name}`)); (f as any).$container = originalDataModel; originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { @@ -260,7 +280,7 @@ async function runPull(options: PullOptions) { } return; } - if (f.name === 'profiles') console.log(f.attributes.length); + originalField.attributes .filter( (attr) => @@ -271,7 +291,9 @@ async function runPull(options: PullOptions) { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + deletedAttributes.push( + colors.yellow(`- Attribute ${attr.decl.$refText} deleted from field: ${field.name}`), + ); }); }); originalDataModel.fields @@ -295,10 +317,35 @@ async function runPull(options: PullOptions) { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); _model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + deletedFields.push(colors.red(`- Field ${f.name} deleted from ${_model.name}`)); }); }); + if (deletedModels.length > 0) { + console.log(colors.bold('\nDeleted Models:')); + deletedModels.forEach((msg) => console.log(msg)); + } + + if (deletedEnums.length > 0) { + console.log(colors.bold('\nDeleted Enums:')); + deletedEnums.forEach((msg) => console.log(msg)); + } + + if (addedFields.length > 0) { + console.log(colors.bold('\nAdded Fields:')); + addedFields.forEach((msg) => console.log(msg)); + } + + if (deletedAttributes.length > 0) { + console.log(colors.bold('\nDeleted Attributes:')); + deletedAttributes.forEach((msg) => console.log(msg)); + } + + if (deletedFields.length > 0) { + console.log(colors.bold('\nDeleted Fields:')); + deletedFields.forEach((msg) => console.log(msg)); + } + if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -311,7 +358,7 @@ async function runPull(options: PullOptions) { if (options.out) { const zmodelSchema = generator.generate(newModel); - console.log(`Writing to ${options.out}`); + console.log(colors.blue(`Writing to ${options.out}`)); const outPath = options.out ? path.resolve(options.out) : schemaFile; @@ -319,12 +366,15 @@ async function runPull(options: PullOptions) { } else { docs.forEach(({ uri, parseResult: { value: model } }) => { const zmodelSchema = generator.generate(model); - console.log(`Writing to ${uri.path}`); + console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); }); } + + console.log(colors.green.bold('\nPull completed successfully!')); } catch (error) { - console.log(error); + spinner.fail('Pull failed'); + console.error(error); throw error; } -} +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4a661afb9..685038298 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,4 +1,5 @@ import type { ZModelServices } from '@zenstackhq/language'; +import colors from 'colors'; import { isEnum, type Attribute, @@ -38,7 +39,7 @@ export function syncEnums({ if (provider.isSupportedFeature('NativeEnum')) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + if (modified) console.log(colors.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`)); const factory = new EnumFactory().setName(name); if (modified || options.alwaysMap) factory.addAttribute((builder) => @@ -344,16 +345,18 @@ export function syncTable({ table.indexes.forEach((index) => { if (index.predicate) { //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; } if (index.columns.find((c) => c.expression)) { - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; } From ea364d28d6a1f959993cb0e198a249d77b96add5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 9 Jan 2026 16:04:44 +0100 Subject: [PATCH 30/83] fix(cli): fixes field casing and sort issues --- packages/cli/src/actions/pull/index.ts | 24 +++--- packages/cli/test/db/pull.test.ts | 100 ++++++++++++++++++++++++- packages/cli/test/utils.ts | 20 +++-- 3 files changed, 123 insertions(+), 21 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 685038298..05a54e035 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -378,7 +378,8 @@ export function syncTable({ } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); - if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); + if (c.order && c.order !== 'ASC') + refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); return refExpr; }); @@ -402,6 +403,7 @@ export function syncRelation({ model, relation, services, + options, selfRelation, simmilarRelations, }: { @@ -444,10 +446,12 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; - let sourceFieldName = + let { name: sourceFieldName } = resolveNameCasing( + options.fieldCasing, simmilarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : targetModel.name; + : targetModel.name, + ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; @@ -498,10 +502,12 @@ export function syncRelation({ sourceModel.fields.push(sourceFieldFactory.node); const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; - const oppositeFieldName = + const { name: oppositeFieldName } = resolveNameCasing( + options.fieldCasing, simmilarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : sourceModel.name; + : sourceModel.name, + ); const targetFieldFactory = new DataFieldFactory() .setContainer(targetModel) @@ -519,8 +525,8 @@ export function syncRelation({ targetModel.fields.push(targetFieldFactory.node); - targetModel.fields.sort((a, b) => { - if (a.type.reference && b.type.reference) return 0; - return a.name.localeCompare(b.name); - }); + // targetModel.fields.sort((a, b) => { + // if (a.type.reference || b.type.reference) return a.name.localeCompare(b.name); + // return 0; + // }); } diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 35378c2ea..34d79036a 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,14 +1,20 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createProject, runCli } from '../utils'; +import { createProject, getDefaultPrelude, runCli } from '../utils'; +import { loadSchemaDocument } from '../../src/actions/action-utils'; +import { ZModelCodeGenerator } from '@zenstackhq/language'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); +const generator = new ZModelCodeGenerator({ + quote: 'double', + indent: 4, +}); describe('DB pull', () => { - it('simple schema', () => { + it("simple schema - pull shouldn't modify the schema", () => { const workDir = createProject( -`model User { + `model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") @@ -85,7 +91,8 @@ enum Role { USER ADMIN MODERATOR -}`); +}`, + ); runCli('format', workDir); runCli('db push', workDir); @@ -93,4 +100,89 @@ enum Role { runCli('db pull --indent 4', workDir); expect(getSchema(workDir)).toEqual(originalSchema); }); + + it('simple schema - pull shouldn recreate the schema.zmodel', async () => { + const workDir = createProject( + `model Post { + id Int @id @default(autoincrement()) + authorId String + title String + content String? + published Boolean @default(false) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + slug String + score Float @default(0.0) + metadata Json? + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + PostTag PostTag[] + + @@unique([authorId, slug]) + @@index([authorId, published]) +} +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + assignedAt DateTime @default(now()) + note String? @default("initial") + + @@id([postId, tagId]) +} +model User { + id String @id @default(cuid()) + email String @unique + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") +} + +enum Role { + USER + ADMIN + MODERATOR +}`, + ); + console.log(workDir) + runCli('format', workDir); + runCli('db push', workDir); + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const originalSchema = generator.generate(model); + fs.writeFileSync(path.join(workDir, 'zenstack/schema.zmodel'), getDefaultPrelude()); + + runCli('db pull --indent 4 --field-casing=camel', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 7017b622f..310fea122 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -34,10 +34,7 @@ function getTestDbName(provider: string) { ); } -export function createProject( - zmodel: string, - options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, -) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); const dbUrl = @@ -46,18 +43,25 @@ export function createProject( : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; const ZMODEL_PRELUDE = `datasource db { - provider = "${provider}" - url = "${dbUrl}" + provider = "${provider}" + url = "${dbUrl}" } `; + return ZMODEL_PRELUDE; +} + +export function createProject( + zmodel: string, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, +) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude()}\n${zmodel}` : zmodel); return workDir; } export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); - execSync(`node ${cli} ${command}`, { cwd }); + execSync(`node ${cli} ${command}`, { cwd, stdio: 'inherit' }); } From 5b07e8f87782fd2a78eb46961eb5c9a8cce329c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 20:55:40 +0100 Subject: [PATCH 31/83] chore(cli): remove temporary test script Deletes an unused script used for experimenting with URI path resolution. Cleans up the codebase by removing development-only artifacts. --- packages/cli/src/test.ts | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 packages/cli/src/test.ts diff --git a/packages/cli/src/test.ts b/packages/cli/src/test.ts deleted file mode 100644 index b83716dfa..000000000 --- a/packages/cli/src/test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { URI, Utils } from 'vscode-uri'; - -const base = URI.parse('file:/d/zenstack/'); -const relative = URI.parse('file:./c/asdasd.db'); -console.log(base); -console.log(relative); -console.log(Utils.resolvePath(base, relative.path)); -// console.log(URI.parse('file:/c/asdasd.db')); -// console.log(URI.parse('file:./c/asdasd.db')); From be99621191a41d85d5e731cffd4ad211e6e3c4df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 21:18:44 +0100 Subject: [PATCH 32/83] chore: update pnpm-lock.yaml --- pnpm-lock.yaml | 142 ++++++++++++++++++++++++------------------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8c635fc5e..3412d6554 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -109,7 +109,7 @@ importers: version: 20.19.24 '@vitest/coverage-v8': specifier: ^4.0.16 - version: 4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + version: 4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) eslint: specifier: ~9.29.0 version: 9.29.0(jiti@2.6.1) @@ -142,7 +142,7 @@ importers: version: 8.34.1(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^4.0.14 - version: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + version: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) yaml: specifier: ^2.8.0 version: 2.8.0 @@ -194,7 +194,7 @@ importers: dependencies: '@dotenvx/dotenvx': specifier: ^1.51.0 - version: 1.51.4 + version: 1.52.0 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -204,6 +204,9 @@ importers: '@zenstackhq/orm': specifier: workspace:* version: link:../orm + '@zenstackhq/schema': + specifier: workspace:* + version: link:../schema '@zenstackhq/sdk': specifier: workspace:* version: link:../sdk @@ -264,9 +267,6 @@ importers: ts-pattern: specifier: 'catalog:' version: 5.7.1 - vscode-uri: - specifier: ^3.1.0 - version: 3.1.0 devDependencies: '@types/better-sqlite3': specifier: 'catalog:' @@ -1174,13 +1174,13 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.16.0 + version: 8.15.6 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config bun-types: specifier: ^1.3.3 - version: 1.3.4 + version: 1.3.3 tests/runtimes/edge-runtime: dependencies: @@ -1208,7 +1208,7 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.16.0 + version: 8.15.6 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config @@ -1564,8 +1564,8 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} - '@dotenvx/dotenvx@1.51.4': - resolution: {integrity: sha512-AoziS8lRQ3ew/lY5J4JSlzYSN9Fo0oiyMBY37L3Bwq4mOQJT5GSrdZYLFPt6pH1LApDI3ZJceNyx+rHRACZSeQ==} + '@dotenvx/dotenvx@1.52.0': + resolution: {integrity: sha512-CaQcc8JvtzQhUSm9877b6V4Tb7HCotkcyud9X2YwdqtQKwgljkMRwU96fVYKnzN3V0Hj74oP7Es+vZ0mS+Aa1w==} hasBin: true '@dxup/nuxt@0.2.2': @@ -3666,8 +3666,8 @@ packages: '@types/pg@8.11.11': resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} - '@types/pg@8.16.0': - resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} + '@types/pg@8.15.6': + resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -3971,11 +3971,11 @@ packages: '@vitest/browser': optional: true - '@vitest/expect@4.0.15': - resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} + '@vitest/expect@4.0.14': + resolution: {integrity: sha512-RHk63V3zvRiYOWAV0rGEBRO820ce17hz7cI2kDmEdfQsBjT2luEKB5tCOc91u1oSQoUOZkSv3ZyzkdkSLD7lKw==} - '@vitest/mocker@4.0.15': - resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} + '@vitest/mocker@4.0.14': + resolution: {integrity: sha512-RzS5NujlCzeRPF1MK7MXLiEFpkIXeMdQ+rN3Kk3tDI9j0mtbr7Nmuq67tpkOJQpgyClbOltCXMjLZicJHsH5Cg==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -3985,23 +3985,23 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.15': - resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} + '@vitest/pretty-format@4.0.14': + resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==} '@vitest/pretty-format@4.0.16': resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} - '@vitest/runner@4.0.15': - resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} + '@vitest/runner@4.0.14': + resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==} - '@vitest/snapshot@4.0.15': - resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} + '@vitest/snapshot@4.0.14': + resolution: {integrity: sha512-aQVBfT1PMzDSA16Y3Fp45a0q8nKexx6N5Amw3MX55BeTeZpoC08fGqEZqVmPcqN0ueZsuUQ9rriPMhZ3Mu19Ag==} - '@vitest/spy@4.0.15': - resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} + '@vitest/spy@4.0.14': + resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==} - '@vitest/utils@4.0.15': - resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} + '@vitest/utils@4.0.14': + resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} '@vitest/utils@4.0.16': resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} @@ -4471,8 +4471,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bun-types@1.3.4: - resolution: {integrity: sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ==} + bun-types@1.3.3: + resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} bundle-name@4.1.0: resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} @@ -5115,8 +5115,8 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - eciesjs@0.4.16: - resolution: {integrity: sha512-dS5cbA9rA2VR4Ybuvhg6jvdmp46ubLn3E+px8cG/35aEDNclrqoCjg6mt0HYZ/M+OoESS3jSkCrqk1kWAEhWAw==} + eciesjs@0.4.17: + resolution: {integrity: sha512-TOOURki4G7sD1wDCjj7NfLaXZZ49dFOeEb5y39IXpb8p0hRzVvfvzZHOi5JcT+PpyAbi/Y+lxPb8eTag2WYH8w==} engines: {bun: '>=1', deno: '>=2', node: '>=16'} ee-first@1.1.1: @@ -5424,8 +5424,8 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expect-type@1.3.0: - resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + expect-type@1.2.2: + resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} engines: {node: '>=12.0.0'} express@5.1.0: @@ -8602,18 +8602,18 @@ packages: vite: optional: true - vitest@4.0.15: - resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} + vitest@4.0.14: + resolution: {integrity: sha512-d9B2J9Cm9dN9+6nxMnnNJKJCtcyKfnHj15N6YNJfaFHRLua/d3sRKU9RuKmO9mB0XdFtUizlxfz/VPbd3OxGhw==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.15 - '@vitest/browser-preview': 4.0.15 - '@vitest/browser-webdriverio': 4.0.15 - '@vitest/ui': 4.0.15 + '@vitest/browser-playwright': 4.0.14 + '@vitest/browser-preview': 4.0.14 + '@vitest/browser-webdriverio': 4.0.14 + '@vitest/ui': 4.0.14 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -9401,11 +9401,11 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true - '@dotenvx/dotenvx@1.51.4': + '@dotenvx/dotenvx@1.52.0': dependencies: commander: 11.1.0 dotenv: 17.2.3 - eciesjs: 0.4.16 + eciesjs: 0.4.17 execa: 5.1.1 fdir: 6.5.0(picomatch@4.0.3) ignore: 5.3.2 @@ -11266,7 +11266,7 @@ snapshots: pg-protocol: 1.10.3 pg-types: 4.0.2 - '@types/pg@8.16.0': + '@types/pg@8.15.6': dependencies: '@types/node': 20.19.24 pg-protocol: 1.10.3 @@ -11467,7 +11467,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.2 + semver: 7.7.3 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11623,7 +11623,7 @@ snapshots: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) vue: 3.5.26(typescript@5.9.3) - '@vitest/coverage-v8@4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.16 @@ -11636,22 +11636,22 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + vitest: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) transitivePeerDependencies: - supports-color - '@vitest/expect@4.0.15': + '@vitest/expect@4.0.14': dependencies: '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.2 - '@vitest/spy': 4.0.15 - '@vitest/utils': 4.0.15 + '@vitest/spy': 4.0.14 + '@vitest/utils': 4.0.14 chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/mocker@4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: - '@vitest/spy': 4.0.15 + '@vitest/spy': 4.0.14 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: @@ -11674,22 +11674,22 @@ snapshots: dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.15': + '@vitest/runner@4.0.14': dependencies: - '@vitest/utils': 4.0.15 + '@vitest/utils': 4.0.14 pathe: 2.0.3 - '@vitest/snapshot@4.0.15': + '@vitest/snapshot@4.0.14': dependencies: - '@vitest/pretty-format': 4.0.15 + '@vitest/pretty-format': 4.0.14 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.15': {} + '@vitest/spy@4.0.14': {} - '@vitest/utils@4.0.15': + '@vitest/utils@4.0.14': dependencies: - '@vitest/pretty-format': 4.0.15 + '@vitest/pretty-format': 4.0.14 tinyrainbow: 3.0.3 '@vitest/utils@4.0.16': @@ -12251,7 +12251,7 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bun-types@1.3.4: + bun-types@1.3.3: dependencies: '@types/node': 20.19.24 @@ -12805,7 +12805,7 @@ snapshots: eastasianwidth@0.2.0: {} - eciesjs@0.4.16: + eciesjs@0.4.17: dependencies: '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) '@noble/ciphers': 1.3.0 @@ -13339,7 +13339,7 @@ snapshots: expand-template@2.0.3: {} - expect-type@1.3.0: {} + expect-type@1.2.2: {} express@5.1.0: dependencies: @@ -14203,7 +14203,7 @@ snapshots: kysely-bun-sqlite@0.4.0(kysely@0.28.8): dependencies: - bun-types: 1.3.4 + bun-types: 1.3.3 kysely: 0.28.8 kysely@0.28.8: {} @@ -16922,24 +16922,24 @@ snapshots: optionalDependencies: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) - vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): + vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): dependencies: - '@vitest/expect': 4.0.15 - '@vitest/mocker': 4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) - '@vitest/pretty-format': 4.0.15 - '@vitest/runner': 4.0.15 - '@vitest/snapshot': 4.0.15 - '@vitest/spy': 4.0.15 - '@vitest/utils': 4.0.15 + '@vitest/expect': 4.0.14 + '@vitest/mocker': 4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.14 + '@vitest/runner': 4.0.14 + '@vitest/snapshot': 4.0.14 + '@vitest/spy': 4.0.14 + '@vitest/utils': 4.0.14 es-module-lexer: 1.7.0 - expect-type: 1.3.0 + expect-type: 1.2.2 magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 tinybench: 2.9.0 - tinyexec: 1.0.2 + tinyexec: 0.3.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) From 2e4c249d1b480a7bac1d8ded7745b088975773ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 22:22:31 +0100 Subject: [PATCH 33/83] feat(cli): add MySQL support for schema introspection Introduces a MySQL-specific introspection provider to support pulling existing database schemas into ZenStack. The implementation includes logic for mapping MySQL data types to ZenStack types, handling auto-incrementing fields, and parsing MySQL-specific enum definitions. It utilizes dynamic imports for database drivers to minimize the CLI footprint for users not targeting MySQL. --- .../cli/src/actions/pull/provider/index.ts | 2 + .../cli/src/actions/pull/provider/mysql.ts | 424 ++++++++++++++++++ 2 files changed, 426 insertions(+) create mode 100644 packages/cli/src/actions/pull/provider/mysql.ts diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index e712ac983..7c93746d4 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,11 +1,13 @@ import type { DataSourceProviderType } from '@zenstackhq/schema'; export * from './provider'; +import { mysql } from './mysql'; import { postgresql } from './postgresql'; import type { IntrospectionProvider } from './provider'; import { sqlite } from './sqlite'; export const providers: Record = { + mysql, postgresql, sqlite, }; diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts new file mode 100644 index 000000000..123463725 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -0,0 +1,424 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; + +// Note: We dynamically import mysql2 inside the async function to avoid +// requiring it at module load time for environments that don't use MySQL. + +export const mysql: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'NativeEnum': + return true; + case 'Schema': + default: + return false; + } + }, + getBuiltinType(type) { + const t = (type || '').toLowerCase().trim(); + + // MySQL doesn't have native array types + const isArray = false; + + switch (t) { + // integers + case 'tinyint': + case 'smallint': + case 'mediumint': + case 'int': + case 'integer': + return { type: 'Int', isArray }; + case 'bigint': + return { type: 'BigInt', isArray }; + + // decimals and floats + case 'decimal': + case 'numeric': + return { type: 'Decimal', isArray }; + case 'float': + case 'double': + case 'real': + return { type: 'Float', isArray }; + + // boolean (MySQL uses TINYINT(1) for boolean) + case 'boolean': + case 'bool': + return { type: 'Boolean', isArray }; + + // strings + case 'char': + case 'varchar': + case 'tinytext': + case 'text': + case 'mediumtext': + case 'longtext': + return { type: 'String', isArray }; + + // dates/times + case 'date': + case 'time': + case 'datetime': + case 'timestamp': + case 'year': + return { type: 'DateTime', isArray }; + + // binary + case 'binary': + case 'varbinary': + case 'tinyblob': + case 'blob': + case 'mediumblob': + case 'longblob': + return { type: 'Bytes', isArray }; + + // json + case 'json': + return { type: 'Json', isArray }; + + default: + // Handle ENUM type - MySQL returns enum values like "enum('val1','val2')" + if (t.startsWith('enum(')) { + return { type: 'String', isArray }; + } + // Handle SET type + if (t.startsWith('set(')) { + return { type: 'String', isArray }; + } + return { type: 'Unsupported' as const, isArray }; + } + }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'varchar', precisition: 191 }; + case 'Boolean': + return { type: 'tinyint', precisition: 1 }; + case 'Int': + return { type: 'int' }; + case 'BigInt': + return { type: 'bigint' }; + case 'Float': + return { type: 'double' }; + case 'Decimal': + return { type: 'decimal', precisition: 65 }; + case 'DateTime': + return { type: 'datetime', precisition: 3 }; + case 'Json': + return { type: 'json' }; + case 'Bytes': + return { type: 'longblob' }; + } + }, + async introspect(connectionString: string): Promise { + const mysql = await import('mysql2/promise'); + const connection = await mysql.createConnection(connectionString); + + try { + // Extract database name from connection string + const url = new URL(connectionString); + const databaseName = url.pathname.replace('/', ''); + + if (!databaseName) { + throw new Error('Database name not found in connection string'); + } + + // Introspect tables + const [tableRows] = (await connection.execute(getTableIntrospectionQuery(databaseName))) as [ + IntrospectedTable[], + unknown, + ]; + const tables: IntrospectedTable[] = []; + + for (const row of tableRows) { + const columns = typeof row.columns === 'string' ? JSON.parse(row.columns) : row.columns; + const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; + + tables.push({ + schema: row.schema || '', + name: row.name, + type: row.type as 'table' | 'view', + definition: row.definition, + columns: columns || [], + indexes: indexes || [], + }); + } + + // Introspect enums (MySQL stores enum values in column definitions) + const [enumRows] = (await connection.execute(getEnumIntrospectionQuery(databaseName))) as [ + { table_name: string; column_name: string; column_type: string }[], + unknown, + ]; + + const enums: IntrospectedEnum[] = enumRows.map((row) => { + // Parse enum values from column_type like "enum('val1','val2','val3')" + const values = parseEnumValues(row.column_type); + return { + schema_name: databaseName, + // Create a unique enum type name based on table and column + enum_type: `${row.table_name}_${row.column_name}`, + values, + }; + }); + + return { tables, enums }; + } finally { + await connection.end(); + } + }, + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; + + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + + // Handle CURRENT_TIMESTAMP + if (val === 'CURRENT_TIMESTAMP' || val === 'current_timestamp()' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + return factories; + } + + // Handle auto_increment + if (val === 'auto_increment') { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + + // Handle NULL + if (val.toUpperCase() === 'NULL') { + return []; + } + + // Handle boolean values + if (val === 'true' || val === '1' || val === "b'1'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); + return factories; + } + if (val === 'false' || val === '0' || val === "b'0'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); + return factories; + } + + // Handle numeric values + if (/^-?\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + return factories; + } + + // Handle string values (quoted with single quotes) + if (val.startsWith("'") && val.endsWith("'")) { + const strippedValue = val.slice(1, -1).replace(/''/g, "'"); + + // Check if it's an enum value + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); + if (enumField) { + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + return factories; + } + } + + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(strippedValue))); + return factories; + } + + // Handle function calls (e.g., uuid(), now()) + if (val.includes('(') && val.includes(')')) { + // Check for known functions + if (val.toLowerCase() === 'uuid()') { + factories.push( + defaultAttr.addArg((a) => a.InvocationExpr.setFunction(getFunctionRef('uuid', services))), + ); + return factories; + } + + // For other functions, use dbgenerated + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + } + + // For any other unhandled cases, use dbgenerated + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + }, +}; + +function getTableIntrospectionQuery(databaseName: string) { + return ` +SELECT + t.TABLE_SCHEMA AS \`schema\`, + t.TABLE_NAME AS \`name\`, + CASE t.TABLE_TYPE + WHEN 'BASE TABLE' THEN 'table' + WHEN 'VIEW' THEN 'view' + ELSE NULL + END AS \`type\`, + CASE + WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION + ELSE NULL + END AS \`definition\`, + ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', c.COLUMN_NAME, + 'datatype', c.DATA_TYPE, + 'datatype_schema', c.TABLE_SCHEMA, + 'length', c.CHARACTER_MAXIMUM_LENGTH, + 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), + 'nullable', c.IS_NULLABLE = 'YES', + 'default', c.COLUMN_DEFAULT, + 'pk', c.COLUMN_KEY = 'PRI', + 'unique', c.COLUMN_KEY = 'UNI', + 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, + 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', + 'options', JSON_ARRAY(), + 'foreign_key_schema', kcu_fk.REFERENCED_TABLE_SCHEMA, + 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, + 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, + 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, + 'foreign_key_on_update', rc.UPDATE_RULE, + 'foreign_key_on_delete', rc.DELETE_RULE + ) + ) + FROM INFORMATION_SCHEMA.COLUMNS c + LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk + ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA + AND c.TABLE_NAME = kcu_fk.TABLE_NAME + AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME + AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL + LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc + ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA + AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA + AND c.TABLE_NAME = t.TABLE_NAME + ORDER BY c.ORDINAL_POSITION + ) AS \`columns\`, + ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', s.INDEX_NAME, + 'method', s.INDEX_TYPE, + 'unique', s.NON_UNIQUE = 0, + 'primary', s.INDEX_NAME = 'PRIMARY', + 'valid', TRUE, + 'ready', TRUE, + 'partial', FALSE, + 'predicate', NULL, + 'columns', ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', s2.COLUMN_NAME, + 'expression', NULL, + 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END, + 'nulls', NULL + ) + ORDER BY s2.SEQ_IN_INDEX + ) + FROM INFORMATION_SCHEMA.STATISTICS s2 + WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA + AND s2.TABLE_NAME = s.TABLE_NAME + AND s2.INDEX_NAME = s.INDEX_NAME + ) + ) + ) + FROM ( + SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME + FROM INFORMATION_SCHEMA.STATISTICS + WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME + ) s + ) AS \`indexes\` +FROM INFORMATION_SCHEMA.TABLES t +LEFT JOIN INFORMATION_SCHEMA.VIEWS v + ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME +WHERE t.TABLE_SCHEMA = '${databaseName}' + AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') + AND t.TABLE_NAME NOT LIKE '_prisma_migrations' +ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; +`; +} + +function getEnumIntrospectionQuery(databaseName: string) { + return ` +SELECT + c.TABLE_NAME AS table_name, + c.COLUMN_NAME AS column_name, + c.COLUMN_TYPE AS column_type +FROM INFORMATION_SCHEMA.COLUMNS c +WHERE c.TABLE_SCHEMA = '${databaseName}' + AND c.DATA_TYPE = 'enum' +ORDER BY c.TABLE_NAME, c.COLUMN_NAME; +`; +} + +/** + * Parse enum values from MySQL COLUMN_TYPE string like "enum('val1','val2','val3')" + */ +function parseEnumValues(columnType: string): string[] { + // Match the content inside enum(...) + const match = columnType.match(/^enum\((.+)\)$/i); + if (!match || !match[1]) return []; + + const valuesString = match[1]; + const values: string[] = []; + + // Parse quoted values, handling escaped quotes + let current = ''; + let inQuote = false; + let i = 0; + + while (i < valuesString.length) { + const char = valuesString[i]; + + if (char === "'" && !inQuote) { + inQuote = true; + i++; + continue; + } + + if (char === "'" && inQuote) { + // Check for escaped quote ('') + if (valuesString[i + 1] === "'") { + current += "'"; + i += 2; + continue; + } + // End of value + values.push(current); + current = ''; + inQuote = false; + i++; + // Skip comma and any whitespace + while (i < valuesString.length && (valuesString[i] === ',' || valuesString[i] === ' ')) { + i++; + } + continue; + } + + if (inQuote) { + current += char; + } + i++; + } + + return values; +} From 71717a57f1313d620afd27c8c92d1f4083144f33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:41 +0100 Subject: [PATCH 34/83] fix(cli): improve field matching logic during db pull --- packages/cli/src/actions/db.ts | 66 +++++++++++++++++++++------------- 1 file changed, 42 insertions(+), 24 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 36e354b95..4bf9ca47e 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -241,19 +241,30 @@ async function runPull(options: PullOptions) { } newDataModel.fields.forEach((f) => { - const originalFields = originalDataModel.fields.filter((d) => { - return ( - getDbName(d) === getDbName(f) || - (getRelationFkName(d as any) === getRelationFkName(f as any) && + // Prioritized matching: exact db name > relation FK name > type reference + let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f)); + + if (originalFields.length === 0) { + // Try matching by relation FK name + originalFields = originalDataModel.fields.filter( + (d) => + getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)) || - (f.$type === 'DataField' && + !!getRelationFkName(f as any), + ); + } + + if (originalFields.length === 0) { + // Try matching by type reference + originalFields = originalDataModel.fields.filter( + (d) => + f.$type === 'DataField' && d.$type === 'DataField' && f.type.reference?.ref && d.type.reference?.ref && - getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref), ); - }); + } if (originalFields.length > 1) { console.warn( @@ -297,22 +308,29 @@ async function runPull(options: PullOptions) { }); }); originalDataModel.fields - .filter( - (f) => - !newDataModel.fields.find((d) => { - return ( - getDbName(d) === getDbName(f) || - (getRelationFkName(d as any) === getRelationFkName(f as any) && - !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)) || - (f.$type === 'DataField' && - d.$type === 'DataField' && - f.type.reference?.ref && - d.type.reference?.ref && - getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) - ); - }), - ) + .filter((f) => { + // Prioritized matching: exact db name > relation FK name > type reference + const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f)); + if (matchByDbName) return false; + + const matchByFkName = newDataModel.fields.find( + (d) => + getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any), + ); + if (matchByFkName) return false; + + const matchByTypeRef = newDataModel.fields.find( + (d) => + f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref), + ); + return !matchByTypeRef; + }) .forEach((f) => { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); From e5de3a0474a353899c4454f967962a1c4671ce87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:42 +0100 Subject: [PATCH 35/83] feat(cli): enhance SQLite introspection with autoincrement support --- .../cli/src/actions/pull/provider/sqlite.ts | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5825becde..f891e2994 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -63,12 +63,27 @@ export const sqlite: IntrospectionProvider = { "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", ); + // SQLite maintains sqlite_sequence table for tables with AUTOINCREMENT columns + // If a table has an entry here, its INTEGER PRIMARY KEY column is autoincrement + const autoIncrementTables = new Set(); + try { + const seqRows = all<{ name: string }>("SELECT name FROM sqlite_sequence"); + for (const row of seqRows) { + autoIncrementTables.add(row.name); + } + } catch { + // sqlite_sequence table doesn't exist if no AUTOINCREMENT was ever used + } + const tables: IntrospectedTable[] = []; for (const t of tablesRaw) { const tableName = t.name; const schema = ''; + // Check if this table has autoincrement (via sqlite_sequence) + const hasAutoIncrement = autoIncrementTables.has(tableName); + // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ cid: number; @@ -88,7 +103,7 @@ export const sqlite: IntrospectionProvider = { unique: number; origin: string; partial: number; - }>(`PRAGMA index_list('${tableNameEsc}')`); + }>(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith('sqlite_autoindex_')); // Unique columns detection via unique indexes with single column const uniqueSingleColumn = new Set(); @@ -163,6 +178,13 @@ export const sqlite: IntrospectionProvider = { const fk = fkByColumn.get(c.name); + // Determine default value - check for autoincrement + // AUTOINCREMENT in SQLite can only be on INTEGER PRIMARY KEY column + let defaultValue = c.dflt_value; + if (hasAutoIncrement && c.pk) { + defaultValue = 'autoincrement'; + } + columns.push({ name: c.name, datatype: c.type || '', @@ -178,7 +200,7 @@ export const sqlite: IntrospectionProvider = { pk: !!c.pk, computed: hidden === 2, nullable: c.notnull !== 1, - default: c.dflt_value, + default: defaultValue, options: [], unique: uniqueSingleColumn.has(c.name), unique_name: null, @@ -189,7 +211,7 @@ export const sqlite: IntrospectionProvider = { } const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - + return { tables, enums }; } finally { db.close(); @@ -211,6 +233,14 @@ export const sqlite: IntrospectionProvider = { return factories; } + // Handle autoincrement + if (val === 'autoincrement') { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + if (val === 'true' || val === 'false') { factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); return factories; From 52877c3c1e297306b25d8a08a4735758df16c97f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:43 +0100 Subject: [PATCH 36/83] fix(cli): refine attribute generation in db pull --- packages/cli/src/actions/pull/index.ts | 47 ++++++++++++-------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 05a54e035..9a704b1f9 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -252,6 +252,10 @@ export function syncTable({ return typeBuilder; }); + if (column.pk && !multiPk) { + builder.addAttribute((b) => b.setDecl(idAttribute)); + } + if (column.default) { const defaultValuesAttrs = provider.getDefaultValue({ fieldName: column.name, @@ -262,10 +266,6 @@ export function syncTable({ defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } - if (column.pk && !multiPk) { - builder.addAttribute((b) => b.setDecl(idAttribute)); - } - if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); @@ -320,22 +320,8 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqueColumns.length > 0) { - modelFactory.addAttribute((builder) => - builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { - const arrayExpr = argBuilder.ArrayExpr; - uniqueColumns.forEach((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); - if (!ref) { - throw new Error(`Field ${c} not found`); - } - arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); - }); - return arrayExpr; - }), - ); - } else { + const uniqueColumns = table.columns.filter((c) => c.unique); + if(uniqueColumns.length === 0) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', @@ -361,13 +347,15 @@ export function syncTable({ return; } - if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name))) { - //skip primary key + if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name)) + || index.columns.length === 1 && index.unique) { + //skip primary key or unique constraints as they are already handled return; } modelFactory.addAttribute((builder) => - builder + { + const attr = builder .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) .addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; @@ -385,8 +373,17 @@ export function syncTable({ }); }); return arrayExpr; - }) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), + }); + + const suffix = index.unique ? '_key' : '_idx'; + + if(index.name !== `${table.name}_${index.columns.map(c => c.name).join('_')}${suffix}`){ + attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'); + } + + return attr + } + ); }); if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { From cf74d8bb4f5177bf2e74fea1947cff170de7bbb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:44 +0100 Subject: [PATCH 37/83] test(cli): update db pull tests for SQLite specific behavior --- packages/cli/test/db/pull.test.ts | 161 ++++++++++++++---------------- 1 file changed, 74 insertions(+), 87 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 34d79036a..84f34024a 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -11,43 +11,11 @@ const generator = new ZModelCodeGenerator({ indent: 4, }); -describe('DB pull', () => { +describe('DB pull - Sqlite specific', () => { it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject( - `model User { - id String @id @default(cuid()) - email String @unique @map("email_address") - name String? @default("Anonymous") - role Role @default(USER) - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - - @@index([role]) - @@map("users") -} - -model Profile { - id Int @id @default(autoincrement()) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? - - @@map("profiles") -} - + const workDir = createProject(` model Post { - id Int @id @default(autoincrement()) + id Int @id @default(1) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) authorId String title String @@ -55,7 +23,6 @@ model Post { published Boolean @default(false) tags PostTag[] createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? @@ -65,16 +32,6 @@ model Post { @@map("posts") } -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") - @@map("tags") -} - model PostTag { post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int @@ -87,10 +44,45 @@ model PostTag { @@map("post_tags") } -enum Role { - USER - ADMIN - MODERATOR +model Profile { + id Int @id @default(1) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + +model Tag { + id Int @id @default(1) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + +model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role String @default("USER") + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") }`, ); runCli('format', workDir); @@ -104,74 +96,69 @@ enum Role { it('simple schema - pull shouldn recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { - id Int @id @default(autoincrement()) + id Int @id @default(1) authorId String title String content String? published Boolean @default(false) createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? - author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - PostTag PostTag[] + user User @relation(fields: [authorId], references: [id], onDelete: Cascade, onUpdate: Cascade) + postTag PostTag[] @@unique([authorId, slug]) @@index([authorId, published]) } model PostTag { - post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int - tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) tagId Int assignedAt DateTime @default(now()) note String? @default("initial") + post Post @relation(fields: [postId], references: [id], onDelete: Cascade, onUpdate: Cascade) + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade, onUpdate: Cascade) @@id([postId, tagId]) } + +model Profile { + id Int @id @default(1) + userId String @unique + sharedUserId String @unique @map("shared_userId") + bio String? + avatarUrl String? + + profileUserId User @relation(fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) + profileSharedUserId User @relation("shared", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) +} + +model Tag { + id Int @id @default(1) + name String @unique + createdAt DateTime @default(now()) + postTag PostTag[] + + @@index([name], map: "tag_name_idx") +} + model User { - id String @id @default(cuid()) + id String @id email String @unique name String? @default("Anonymous") - role Role @default(USER) - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] + role String @default("USER") createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt jsonData Json? + balance Decimal @default(0.00) isActive Boolean @default(true) bigCounter BigInt @default(0) bytes Bytes? + post Post[] + profileUserId Profile? + profileSharedUserId Profile? @relation("shared") @@index([role]) -} - -model Profile { - id Int @id @default(autoincrement()) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? -} - -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") -} - -enum Role { - USER - ADMIN - MODERATOR }`, ); console.log(workDir) From fc9b3608c7dc674b4c00ffafb267a0005ff2a768 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:45 +0100 Subject: [PATCH 38/83] refactor(language): export ZModelServices type --- packages/language/src/document.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 2fdce233d..026d3d23e 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -13,7 +13,7 @@ import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { isDataModel, isDataSource, type Model } from './ast'; import { DB_PROVIDERS_SUPPORTING_LIST_TYPE, STD_LIB_MODULE_NAME } from './constants'; -import { createZModelServices } from './module'; +import { createZModelServices, type ZModelServices } from './module'; import { getAllFields, getDataModelAndTypeDefs, From 459b9706fd3904a2240d5bae349ba111e78b4170 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:18 +0100 Subject: [PATCH 39/83] fix(cli): improve sqlite introspection for autoincrement and fk names --- .../cli/src/actions/pull/provider/sqlite.ts | 40 ++++++++++++++----- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index f891e2994..be4e6d6cf 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,6 +1,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { writeFileSync } from 'node:fs'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. @@ -63,16 +64,18 @@ export const sqlite: IntrospectionProvider = { "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", ); - // SQLite maintains sqlite_sequence table for tables with AUTOINCREMENT columns - // If a table has an entry here, its INTEGER PRIMARY KEY column is autoincrement + // Detect AUTOINCREMENT by parsing the CREATE TABLE statement + // The sqlite_sequence table only has entries after rows are inserted, + // so we need to check the actual table definition instead const autoIncrementTables = new Set(); - try { - const seqRows = all<{ name: string }>("SELECT name FROM sqlite_sequence"); - for (const row of seqRows) { - autoIncrementTables.add(row.name); + for (const t of tablesRaw) { + if (t.type === 'table' && t.definition) { + // AUTOINCREMENT keyword appears in PRIMARY KEY definition + // e.g., PRIMARY KEY("id" AUTOINCREMENT) or PRIMARY KEY(id AUTOINCREMENT) + if (/\bAUTOINCREMENT\b/i.test(t.definition)) { + autoIncrementTables.add(t.name); + } } - } catch { - // sqlite_sequence table doesn't exist if no AUTOINCREMENT was ever used } const tables: IntrospectedTable[] = []; @@ -147,6 +150,25 @@ export const sqlite: IntrospectionProvider = { on_delete: any; }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); + // Extract FK constraint names from CREATE TABLE statement + // Pattern: CONSTRAINT "name" FOREIGN KEY("column") or CONSTRAINT name FOREIGN KEY(column) + const fkConstraintNames = new Map(); + if (t.definition) { + // Match: CONSTRAINT "name" FOREIGN KEY("col") or CONSTRAINT name FOREIGN KEY(col) + // Use [^"'`]+ for quoted names to capture full identifier including underscores and other chars + const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(\s*(?:["'`]([^"'`]+)["'`]|(\w+))\s*\)/gi; + let match; + while ((match = fkRegex.exec(t.definition)) !== null) { + // match[1] = quoted constraint name, match[2] = unquoted constraint name + // match[3] = quoted column name, match[4] = unquoted column name + const constraintName = match[1] || match[2]; + const columnName = match[3] || match[4]; + if (constraintName && columnName) { + fkConstraintNames.set(columnName, constraintName); + } + } + } + const fkByColumn = new Map< string, { @@ -164,7 +186,7 @@ export const sqlite: IntrospectionProvider = { foreign_key_schema: '', foreign_key_table: fk.table || null, foreign_key_column: fk.to || null, - foreign_key_name: null, + foreign_key_name: fkConstraintNames.get(fk.from) ?? null, foreign_key_on_update: (fk.on_update as any) ?? null, foreign_key_on_delete: (fk.on_delete as any) ?? null, }); From dbb1df21bbdbc83ecb5c93a3541d6ff4b84ab9a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:18 +0100 Subject: [PATCH 40/83] feat(cli): enhance field matching logic during pull by using relation fields --- packages/cli/src/actions/db.ts | 43 ++++++++++++++++++++------ packages/cli/src/actions/pull/utils.ts | 23 ++++++++++++++ 2 files changed, 57 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 4bf9ca47e..e97f76a4a 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -15,7 +15,7 @@ import { } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; +import { getDatasource, getDbName, getRelationFieldsKey, getRelationFkName } from './pull/utils'; type PushOptions = { schema?: string; @@ -241,11 +241,22 @@ async function runPull(options: PullOptions) { } newDataModel.fields.forEach((f) => { - // Prioritized matching: exact db name > relation FK name > type reference + // Prioritized matching: exact db name > relation fields key > relation FK name > type reference let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f)); if (originalFields.length === 0) { - // Try matching by relation FK name + // Try matching by relation fields key (the `fields` attribute in @relation) + // This matches relation fields by their FK field references + const newFieldsKey = getRelationFieldsKey(f as any); + if (newFieldsKey) { + originalFields = originalDataModel.fields.filter( + (d) => getRelationFieldsKey(d as any) === newFieldsKey, + ); + } + } + + if (originalFields.length === 0) { + // Try matching by relation FK name (the `map` attribute in @relation) originalFields = originalDataModel.fields.filter( (d) => getRelationFkName(d as any) === getRelationFkName(f as any) && @@ -267,11 +278,16 @@ async function runPull(options: PullOptions) { } if (originalFields.length > 1) { - console.warn( - colors.yellow( - `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, - ), - ); + // If this is a back-reference relation field (no `fields` attribute), + // silently skip when there are multiple potential matches + const isBackReferenceField = !getRelationFieldsKey(f as any); + if (!isBackReferenceField) { + console.warn( + colors.yellow( + `Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ), + ); + } return; } const originalField = originalFields.at(0); @@ -309,10 +325,19 @@ async function runPull(options: PullOptions) { }); originalDataModel.fields .filter((f) => { - // Prioritized matching: exact db name > relation FK name > type reference + // Prioritized matching: exact db name > relation fields key > relation FK name > type reference const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f)); if (matchByDbName) return false; + // Try matching by relation fields key (the `fields` attribute in @relation) + const originalFieldsKey = getRelationFieldsKey(f as any); + if (originalFieldsKey) { + const matchByFieldsKey = newDataModel.fields.find( + (d) => getRelationFieldsKey(d as any) === originalFieldsKey, + ); + if (matchByFieldsKey) return false; + } + const matchByFkName = newDataModel.fields.find( (d) => getRelationFkName(d as any) === getRelationFkName(f as any) && diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index e017bb9b4..38a5f0e9c 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,6 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, + type ReferenceExpr, type StringLiteral, } from '@zenstackhq/language/ast'; import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; @@ -108,6 +109,28 @@ export function getRelationFkName(decl: DataField): string | undefined { return schemaAttrValue?.value; } +/** + * Gets the FK field names from the @relation attribute's `fields` argument. + * Returns a sorted, comma-separated string of field names for comparison. + * e.g., @relation(fields: [userId], references: [id]) -> "userId" + * e.g., @relation(fields: [postId, tagId], references: [id, id]) -> "postId,tagId" + */ +export function getRelationFieldsKey(decl: DataField): string | undefined { + const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); + if (!relationAttr) return undefined; + + const fieldsArg = relationAttr.args.find((a) => a.name === 'fields')?.value; + if (!fieldsArg || fieldsArg.$type !== 'ArrayExpr') return undefined; + + const fieldNames = fieldsArg.items + .filter((item): item is ReferenceExpr => item.$type === 'ReferenceExpr') + .map((item) => item.target?.$refText || item.target?.ref?.name) + .filter((name): name is string => !!name) + .sort(); + + return fieldNames.length > 0 ? fieldNames.join(',') : undefined; +} + export function getDbSchemaName(decl: DataModel | Enum): string { const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); if (!schemaAttr) return 'public'; From 47d0273f180a3e786a7dc18d43db99dc84a44d8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:19 +0100 Subject: [PATCH 41/83] refactor(cli): refine relation name generation and table syncing --- packages/cli/src/actions/pull/index.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 9a704b1f9..bd64289fb 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -320,7 +320,7 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique); + const uniqueColumns = table.columns.filter((c) => c.unique || c.pk); if(uniqueColumns.length === 0) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( @@ -418,7 +418,7 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); - const includeRelationName = selfRelation || simmilarRelations > 1; + const includeRelationName = selfRelation || simmilarRelations > 0; if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); @@ -442,7 +442,7 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; + const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 @@ -491,7 +491,7 @@ export function syncRelation({ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); } - if (relation.fk_name) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + if (relation.fk_name && relation.fk_name !== `${relation.table}_${relation.column}_fkey`) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); return ab; }); From e318ff41d578d4d2719810981f5ff926d353f67e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:19 +0100 Subject: [PATCH 42/83] test(cli): update pull tests to reflect improved schema generation --- .../cli/src/actions/pull/provider/sqlite.ts | 5 + packages/cli/test/db/pull.test.ts | 146 +++++++++--------- 2 files changed, 81 insertions(+), 70 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index be4e6d6cf..ee562f145 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -234,6 +234,11 @@ export const sqlite: IntrospectionProvider = { const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums + writeFileSync( + 'D:/Projects/GitHub/zenstack-v3/packages/cli/sqlite-introspected.json', + JSON.stringify({ tables, enums }, null, 4), + ); + return { tables, enums }; } finally { db.close(); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 84f34024a..1754b7b4e 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -13,9 +13,40 @@ const generator = new ZModelCodeGenerator({ describe('DB pull - Sqlite specific', () => { it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject(` + const workDir = createProject(`model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + model Post { - id Int @id @default(1) + id Int @id @default(autoincrement()) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) authorId String title String @@ -23,6 +54,7 @@ model Post { published Boolean @default(false) tags PostTag[] createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? @@ -32,6 +64,16 @@ model Post { @@map("posts") } +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + model PostTag { post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int @@ -44,45 +86,10 @@ model PostTag { @@map("post_tags") } -model Profile { - id Int @id @default(1) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? - - @@map("profiles") -} - -model Tag { - id Int @id @default(1) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") - @@map("tags") -} - -model User { - id String @id @default(cuid()) - email String @unique @map("email_address") - name String? @default("Anonymous") - role String @default("USER") - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - - @@index([role]) - @@map("users") +enum Role { + USER + ADMIN + MODERATOR }`, ); runCli('format', workDir); @@ -96,7 +103,7 @@ model User { it('simple schema - pull shouldn recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { - id Int @id @default(1) + id Int @id @default(autoincrement()) authorId String title String content String? @@ -111,6 +118,7 @@ model User { @@unique([authorId, slug]) @@index([authorId, published]) } + model PostTag { postId Int tagId Int @@ -123,40 +131,38 @@ model PostTag { } model Profile { - id Int @id @default(1) - userId String @unique - sharedUserId String @unique @map("shared_userId") - bio String? - avatarUrl String? - - profileUserId User @relation(fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) - profileSharedUserId User @relation("shared", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) + id Int @id @default(autoincrement()) + userId String @unique + sharedUserId String @unique @map("shared_userId") + bio String? + avatarUrl String? + profileUserId User @relation("Profile_userIdToUser", fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) + profileSharedUserId User @relation("Profile_shared_userIdToUser", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) } model Tag { - id Int @id @default(1) - name String @unique - createdAt DateTime @default(now()) - postTag PostTag[] - + id Int @id @default(autoincrement()) + name String @unique + createdAt DateTime @default(now()) + postTag PostTag[] + @@index([name], map: "tag_name_idx") } - -model User { - id String @id - email String @unique - name String? @default("Anonymous") - role String @default("USER") - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - post Post[] - profileUserId Profile? - profileSharedUserId Profile? @relation("shared") +model User { + id String @id + email String @unique + name String? @default("Anonymous") + role String @default("USER") + createdAt DateTime @default(now()) + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + post Post[] + profileUserId Profile? @relation("Profile_userIdToUser") + profileSharedUserId Profile? @relation("Profile_shared_userIdToUser") @@index([role]) }`, From c5d87f0b7c7928bace425a88262e06a28017aef7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:44:31 +0100 Subject: [PATCH 43/83] test(cli): add MySQL support to test utility helpers Extends the testing infrastructure to support MySQL databases. Adds MySQL configuration defaults and environment variable overrides. Updates the prelude generation logic to handle MySQL connection strings and provider types, enabling broader database integration testing across the CLI. --- packages/cli/test/utils.ts | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 310fea122..29777d186 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -12,6 +12,13 @@ const TEST_PG_CONFIG = { password: process.env['TEST_PG_PASSWORD'] ?? 'postgres', }; +const TEST_MYSQL_CONFIG = { + host: process.env['TEST_MYSQL_HOST'] ?? 'localhost', + port: process.env['TEST_MYSQL_PORT'] ? parseInt(process.env['TEST_MYSQL_PORT']) : 3306, + user: process.env['TEST_MYSQL_USER'] ?? 'root', + password: process.env['TEST_MYSQL_PASSWORD'] ?? 'mysql', +}; + function getTestDbName(provider: string) { if (provider === 'sqlite') { return './test.db'; @@ -34,13 +41,24 @@ function getTestDbName(provider: string) { ); } -export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' }) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); - const dbUrl = - provider === 'sqlite' - ? `file:${dbName}` - : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + let dbUrl: string; + + switch (provider) { + case 'sqlite': + dbUrl = `file:${dbName}`; + break; + case 'postgresql': + dbUrl = `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + break; + case 'mysql': + dbUrl = `mysql://${TEST_MYSQL_CONFIG.user}:${TEST_MYSQL_CONFIG.password}@${TEST_MYSQL_CONFIG.host}:${TEST_MYSQL_CONFIG.port}/${dbName}`; + break; + default: + throw new Error(`Unsupported provider: ${provider}`); + } const ZMODEL_PRELUDE = `datasource db { provider = "${provider}" @@ -52,7 +70,7 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' export function createProject( zmodel: string, - options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' | 'mysql' }, ) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); From f0cfebb81e48bc6df80dc9ad23f6d2b734c29a69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:06:11 +0100 Subject: [PATCH 44/83] fix(cli): omit default constraint names in table sync Avoids explicitly declaring unique constraint names when they match the default database naming convention. This results in cleaner generated schema code by removing redundant mapping arguments. --- packages/cli/src/actions/pull/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index bd64289fb..8a16d2788 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -269,7 +269,7 @@ export function syncTable({ if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); - if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + if (column.unique_name && column.unique_name != `${table.name}_${column.name}_key`) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); return b; }); From f0d408b2e1ca03ab928108ef0e70a88742747634 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:13 +0100 Subject: [PATCH 45/83] fix: correctly handle default values for 'text' type in PostgreSQL --- packages/cli/src/actions/pull/provider/postgresql.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 958b0930f..ebe64ef3d 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -150,6 +150,7 @@ export const postgresql: IntrospectionProvider = { case 'uuid': case 'json': case 'jsonb': + case 'text': if (value === 'NULL') return []; factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); break; From b9e2840254895960b8b256963d57c1040cbb32b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 46/83] fix: sort table indexes to ensure stable schema generation --- packages/cli/src/actions/pull/index.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 8a16d2788..332b6bd5f 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -328,7 +328,14 @@ export function syncTable({ ); } - table.indexes.forEach((index) => { + // Sort indexes: unique indexes first, then other indexes + const sortedIndexes = table.indexes.sort((a, b) => { + if (a.unique && !b.unique) return -1; + if (!a.unique && b.unique) return 1; + return 0; + }); + + sortedIndexes.forEach((index) => { if (index.predicate) { //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints console.warn( From 66c0afbf735b9020e2cd7f5e136084fe939f7106 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 47/83] refactor: dynamically determine supported db providers in CLI --- packages/cli/src/actions/db.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e97f76a4a..6a8135650 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -16,6 +16,7 @@ import { import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFieldsKey, getRelationFkName } from './pull/utils'; +import type { DataSourceProviderType } from '@zenstackhq/schema'; type PushOptions = { schema?: string; @@ -86,7 +87,7 @@ async function runPull(options: PullOptions) { config({ ignore: ['MISSING_ENV_FILE'], }); - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const SUPPORTED_PROVIDERS = Object.keys(providers) as DataSourceProviderType[]; const datasource = getDatasource(model); if (!datasource) { throw new Error('No datasource found in the schema.'); From 057407c9d17b00bccdf9a57e8c8be03100299882 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 48/83] test: fix typo in pull test description --- packages/cli/test/db/pull.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 1754b7b4e..444431c1f 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -100,7 +100,7 @@ enum Role { expect(getSchema(workDir)).toEqual(originalSchema); }); - it('simple schema - pull shouldn recreate the schema.zmodel', async () => { + it('simple schema - pull should recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { id Int @id @default(autoincrement()) From 6c542c521cd652f3900ec466ae92c5131b684554 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:35:19 +0100 Subject: [PATCH 49/83] chore(cli): remove debug artifacts and silence test logs Removes hardcoded file system path debugging and unnecessary console logging from the introspector and test suites. Silences CLI command output during tests to provide a cleaner test execution environment. --- packages/cli/src/actions/pull/provider/sqlite.ts | 6 ------ packages/cli/test/db/pull.test.ts | 1 - packages/cli/test/utils.ts | 2 +- 3 files changed, 1 insertion(+), 8 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index ee562f145..1704cb8f0 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,7 +1,6 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import { writeFileSync } from 'node:fs'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. @@ -233,11 +232,6 @@ export const sqlite: IntrospectionProvider = { } const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - - writeFileSync( - 'D:/Projects/GitHub/zenstack-v3/packages/cli/sqlite-introspected.json', - JSON.stringify({ tables, enums }, null, 4), - ); return { tables, enums }; } finally { diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 444431c1f..b04a46ffc 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -167,7 +167,6 @@ model User { @@index([role]) }`, ); - console.log(workDir) runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 29777d186..cb9e76add 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -81,5 +81,5 @@ export function createProject( export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); - execSync(`node ${cli} ${command}`, { cwd, stdio: 'inherit' }); + execSync(`node ${cli} ${command}`, { cwd }); } From c236ba80061662ab22adaf3e2732276d3fd9d44a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 01:40:55 +0100 Subject: [PATCH 50/83] fix(cli): ensure MySQL column and index ordering Wraps JSON_ARRAYAGG calls in subqueries with explicit ORDER BY clauses to maintain correct metadata ordering. This addresses a limitation in MySQL versions prior to 8.0.21, where ORDER BY is not supported directly within the JSON_ARRAYAGG function, ensuring consistent introspection results across different database versions. --- .../cli/src/actions/pull/provider/mysql.ts | 71 ++++++++++--------- 1 file changed, 38 insertions(+), 33 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 123463725..4f5f98fe5 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -264,6 +264,8 @@ export const mysql: IntrospectionProvider = { }; function getTableIntrospectionQuery(databaseName: string) { + // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering + // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG return ` SELECT t.TABLE_SCHEMA AS \`schema\`, @@ -278,8 +280,9 @@ SELECT ELSE NULL END AS \`definition\`, ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(col_json) + FROM ( + SELECT JSON_OBJECT( 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, 'datatype_schema', c.TABLE_SCHEMA, @@ -298,24 +301,25 @@ SELECT 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, 'foreign_key_on_update', rc.UPDATE_RULE, 'foreign_key_on_delete', rc.DELETE_RULE - ) - ) - FROM INFORMATION_SCHEMA.COLUMNS c - LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk - ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA - AND c.TABLE_NAME = kcu_fk.TABLE_NAME - AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME - AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL - LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc - ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA - AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME - WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA - AND c.TABLE_NAME = t.TABLE_NAME - ORDER BY c.ORDINAL_POSITION + ) AS col_json + FROM INFORMATION_SCHEMA.COLUMNS c + LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk + ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA + AND c.TABLE_NAME = kcu_fk.TABLE_NAME + AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME + AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL + LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc + ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA + AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA + AND c.TABLE_NAME = t.TABLE_NAME + ORDER BY c.ORDINAL_POSITION + ) AS cols_ordered ) AS \`columns\`, ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(idx_json) + FROM ( + SELECT JSON_OBJECT( 'name', s.INDEX_NAME, 'method', s.INDEX_TYPE, 'unique', s.NON_UNIQUE = 0, @@ -325,27 +329,28 @@ SELECT 'partial', FALSE, 'predicate', NULL, 'columns', ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(idx_col_json) + FROM ( + SELECT JSON_OBJECT( 'name', s2.COLUMN_NAME, 'expression', NULL, 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END, 'nulls', NULL - ) + ) AS idx_col_json + FROM INFORMATION_SCHEMA.STATISTICS s2 + WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA + AND s2.TABLE_NAME = s.TABLE_NAME + AND s2.INDEX_NAME = s.INDEX_NAME ORDER BY s2.SEQ_IN_INDEX - ) - FROM INFORMATION_SCHEMA.STATISTICS s2 - WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA - AND s2.TABLE_NAME = s.TABLE_NAME - AND s2.INDEX_NAME = s.INDEX_NAME + ) AS idx_cols_ordered ) - ) - ) - FROM ( - SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME - FROM INFORMATION_SCHEMA.STATISTICS - WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME - ) s + ) AS idx_json + FROM ( + SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME + FROM INFORMATION_SCHEMA.STATISTICS + WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME + ) s + ) AS idxs_ordered ) AS \`indexes\` FROM INFORMATION_SCHEMA.TABLES t LEFT JOIN INFORMATION_SCHEMA.VIEWS v From 6fe2df4bd885729d517b968eb71554fe822206df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:00:25 +0100 Subject: [PATCH 51/83] fix(cli): preserve column order during MySQL pull Ensures database columns are sorted by their ordinal position during the introspection process. This maintains the original schema structure and provides a consistent output that matches the physical database layout. --- packages/cli/src/actions/pull/provider/mysql.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 4f5f98fe5..329d01942 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -135,12 +135,18 @@ export const mysql: IntrospectionProvider = { const columns = typeof row.columns === 'string' ? JSON.parse(row.columns) : row.columns; const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; + // Sort columns by ordinal_position to preserve database column order + const sortedColumns = (columns || []).sort( + (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => + (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) + ); + tables.push({ schema: row.schema || '', name: row.name, type: row.type as 'table' | 'view', definition: row.definition, - columns: columns || [], + columns: sortedColumns, indexes: indexes || [], }); } @@ -283,6 +289,7 @@ SELECT SELECT JSON_ARRAYAGG(col_json) FROM ( SELECT JSON_OBJECT( + 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, 'datatype_schema', c.TABLE_SCHEMA, From 3f983d2280bd3a8d4453eda26c370a6bb9815710 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:10:23 +0100 Subject: [PATCH 52/83] refactor(cli): remove schema fields from MySQL queries Eliminates redundant schema and database name fields from the MySQL introspection query. Since MySQL does not support multi-schema architectures internal to a single connection in this context, removing these fields simplifies the data structure and avoids unnecessary metadata overhead. --- packages/cli/src/actions/pull/provider/mysql.ts | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 329d01942..ca73a2194 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -142,7 +142,7 @@ export const mysql: IntrospectionProvider = { ); tables.push({ - schema: row.schema || '', + schema: '', // MySQL doesn't support multi-schema name: row.name, type: row.type as 'table' | 'view', definition: row.definition, @@ -161,7 +161,7 @@ export const mysql: IntrospectionProvider = { // Parse enum values from column_type like "enum('val1','val2','val3')" const values = parseEnumValues(row.column_type); return { - schema_name: databaseName, + schema_name: '', // MySQL doesn't support multi-schema // Create a unique enum type name based on table and column enum_type: `${row.table_name}_${row.column_name}`, values, @@ -272,9 +272,9 @@ export const mysql: IntrospectionProvider = { function getTableIntrospectionQuery(databaseName: string) { // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG + // MySQL doesn't support multi-schema, so we don't include schema in the result return ` SELECT - t.TABLE_SCHEMA AS \`schema\`, t.TABLE_NAME AS \`name\`, CASE t.TABLE_TYPE WHEN 'BASE TABLE' THEN 'table' @@ -292,7 +292,6 @@ SELECT 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, - 'datatype_schema', c.TABLE_SCHEMA, 'length', c.CHARACTER_MAXIMUM_LENGTH, 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), 'nullable', c.IS_NULLABLE = 'YES', @@ -302,7 +301,6 @@ SELECT 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', 'options', JSON_ARRAY(), - 'foreign_key_schema', kcu_fk.REFERENCED_TABLE_SCHEMA, 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, @@ -365,7 +363,7 @@ LEFT JOIN INFORMATION_SCHEMA.VIEWS v WHERE t.TABLE_SCHEMA = '${databaseName}' AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') AND t.TABLE_NAME NOT LIKE '_prisma_migrations' -ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; +ORDER BY t.TABLE_NAME; `; } From 769864ae94c4ecd09185470160c30bc897a23c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:22:05 +0100 Subject: [PATCH 53/83] fix(cli): improve MySQL introspection and index mapping Refines the database pull process to better handle MySQL-specific patterns. Improves unique constraint detection to prevent redundant mapping attributes when default naming conventions are used. Updates the MySQL introspection logic to correctly identify boolean types, handle timestamp precision in default values, and normalize numeric defaults. Also ensures auto-incrementing columns and primary key indexes are correctly mapped to prevent schema duplication. --- packages/cli/src/actions/pull/index.ts | 19 ++++++++++--- .../cli/src/actions/pull/provider/mysql.ts | 28 ++++++++++++++----- 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 332b6bd5f..e15b77fa4 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -269,7 +269,14 @@ export function syncTable({ if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); - if (column.unique_name && column.unique_name != `${table.name}_${column.name}_key`) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + // Only add map if the unique constraint name differs from default patterns + // Default patterns: TableName_columnName_key (Prisma) or just columnName (MySQL) + const isDefaultName = !column.unique_name + || column.unique_name === `${table.name}_${column.name}_key` + || column.unique_name === column.name; + if (!isDefaultName) { + b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + } return b; }); @@ -354,9 +361,13 @@ export function syncTable({ return; } - if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name)) - || index.columns.length === 1 && index.unique) { - //skip primary key or unique constraints as they are already handled + // Skip PRIMARY key index (handled via @id or @@id) + if (index.primary) { + return; + } + + // Skip single-column indexes that are already handled by @id or @unique on the field + if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) { return; } diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index ca73a2194..5a03efd64 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -179,8 +179,8 @@ export const mysql: IntrospectionProvider = { const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); - // Handle CURRENT_TIMESTAMP - if (val === 'CURRENT_TIMESTAMP' || val === 'current_timestamp()' || val === 'now()') { + // Handle CURRENT_TIMESTAMP with optional precision (e.g., CURRENT_TIMESTAMP(3)) + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { @@ -190,7 +190,7 @@ export const mysql: IntrospectionProvider = { } // Handle auto_increment - if (val === 'auto_increment') { + if (val.toLowerCase() === 'auto_increment') { factories.push( defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), ); @@ -212,11 +212,17 @@ export const mysql: IntrospectionProvider = { return factories; } - // Handle numeric values - if (/^-?\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + // Handle numeric values (integers and decimals) + if (/^-?\d+$/.test(val)) { factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } + if (/^-?\d+\.\d+$/.test(val)) { + // For decimal values, normalize to remove trailing zeros but keep reasonable precision + const numVal = parseFloat(val); + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + return factories; + } // Handle string values (quoted with single quotes) if (val.startsWith("'") && val.endsWith("'")) { @@ -291,16 +297,24 @@ SELECT SELECT JSON_OBJECT( 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, - 'datatype', c.DATA_TYPE, + 'datatype', CASE + WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean' + ELSE c.DATA_TYPE + END, + 'datatype_schema', '', 'length', c.CHARACTER_MAXIMUM_LENGTH, 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), 'nullable', c.IS_NULLABLE = 'YES', - 'default', c.COLUMN_DEFAULT, + 'default', CASE + WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment' + ELSE c.COLUMN_DEFAULT + END, 'pk', c.COLUMN_KEY = 'PRI', 'unique', c.COLUMN_KEY = 'UNI', 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', 'options', JSON_ARRAY(), + 'foreign_key_schema', NULL, 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, From 336660ebf7ebd57b49de0a6c52ccb73ceb185f65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:30:46 +0100 Subject: [PATCH 54/83] test(cli): pass provider to default prelude in tests Ensures that the default schema prelude correctly reflects the database provider specified in test options. This prevents inconsistencies when generating test projects with non-default providers. --- packages/cli/test/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index cb9e76add..8b5d79d10 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -75,7 +75,7 @@ export function createProject( const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude()}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n${zmodel}` : zmodel); return workDir; } From 52d32c872f4f2aeb90c7d8e768ffaa6950ba9647 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:44:07 +0100 Subject: [PATCH 55/83] fix(cli): improve MySQL introspection for types and defaults Disables NativeEnum support for MySQL to prevent loss of schema-level enums since MySQL enums are column-specific. Refines boolean and numeric type mapping to better handle synthetic boolean types and preserve decimal precision in default values. Updates default value parsing logic to correctly identify unquoted strings and avoid misinterpreting numeric literals as booleans. --- .../cli/src/actions/pull/provider/mysql.ts | 32 +++++++++++++------ 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 5a03efd64..a4979acef 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -10,7 +10,10 @@ export const mysql: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { case 'NativeEnum': - return true; + // MySQL enums are defined inline in column definitions, not as separate types. + // They can't be shared across tables like PostgreSQL enums. + // Return false to preserve existing enums from the schema. + return false; case 'Schema': default: return false; @@ -94,7 +97,9 @@ export const mysql: IntrospectionProvider = { case 'String': return { type: 'varchar', precisition: 191 }; case 'Boolean': - return { type: 'tinyint', precisition: 1 }; + // Boolean maps to 'boolean' (our synthetic type from tinyint(1)) + // No precision needed since we handle the mapping in the query + return { type: 'boolean' }; case 'Int': return { type: 'int' }; case 'BigInt': @@ -202,25 +207,25 @@ export const mysql: IntrospectionProvider = { return []; } - // Handle boolean values - if (val === 'true' || val === '1' || val === "b'1'") { + // Handle boolean literal values (not numeric 0/1 which should be handled as numbers) + if (val === 'true' || val === "b'1'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); return factories; } - if (val === 'false' || val === '0' || val === "b'0'") { + if (val === 'false' || val === "b'0'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); return factories; } // Handle numeric values (integers and decimals) - if (/^-?\d+$/.test(val)) { + // Check decimals first to preserve format like 0.00 + if (/^-?\d+\.\d+$/.test(val)) { + // Preserve the original decimal format factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } - if (/^-?\d+\.\d+$/.test(val)) { - // For decimal values, normalize to remove trailing zeros but keep reasonable precision - const numVal = parseFloat(val); - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + if (/^-?\d+$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } @@ -263,6 +268,13 @@ export const mysql: IntrospectionProvider = { return factories; } + // Handle unquoted string values (MySQL sometimes returns defaults without quotes) + // If it's not a number, boolean, or function, treat it as a string + if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val))); + return factories; + } + // For any other unhandled cases, use dbgenerated factories.push( defaultAttr.addArg((a) => From 2f7fe79dc9429d8bac78b715c2f341dc7244be31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 13:09:33 +0100 Subject: [PATCH 56/83] fix(cli): improve MySQL default value introspection Refines how default values are handled during database introspection for MySQL by considering the specific field type. This ensures that boolean variants and numeric literals for Float and Decimal types are correctly formatted and preserved. Also clarifies unsupported features in the SQLite provider to improve codebase maintainability. --- packages/cli/src/actions/pull/index.ts | 1 + .../cli/src/actions/pull/provider/mysql.ts | 46 ++++++++++++++++--- .../src/actions/pull/provider/postgresql.ts | 2 +- .../cli/src/actions/pull/provider/provider.ts | 1 + .../cli/src/actions/pull/provider/sqlite.ts | 7 ++- 5 files changed, 48 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index e15b77fa4..78f5ca7ac 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -259,6 +259,7 @@ export function syncTable({ if (column.default) { const defaultValuesAttrs = provider.getDefaultValue({ fieldName: column.name, + fieldType: builtinType.type, defaultValue: column.default, services, enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index a4979acef..afd468049 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -178,7 +178,7 @@ export const mysql: IntrospectionProvider = { await connection.end(); } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; @@ -207,7 +207,19 @@ export const mysql: IntrospectionProvider = { return []; } - // Handle boolean literal values (not numeric 0/1 which should be handled as numbers) + // Handle boolean values based on field type + if (fieldType === 'Boolean') { + if (val === 'true' || val === '1' || val === "b'1'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); + return factories; + } + if (val === 'false' || val === '0' || val === "b'0'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); + return factories; + } + } + + // Handle boolean literal values for non-boolean fields if (val === 'true' || val === "b'1'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); return factories; @@ -217,15 +229,35 @@ export const mysql: IntrospectionProvider = { return factories; } - // Handle numeric values (integers and decimals) - // Check decimals first to preserve format like 0.00 + // Handle numeric values based on field type if (/^-?\d+\.\d+$/.test(val)) { - // Preserve the original decimal format - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + if (fieldType === 'Decimal') { + // For Decimal, normalize to 2 decimal places if it's all zeros after decimal + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + } + } else if (fieldType === 'Float') { + // For Float, preserve decimal point + const numVal = parseFloat(val); + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + } return factories; } if (/^-?\d+$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + if (fieldType === 'Float') { + // For Float fields, add .0 to integer values + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.0'))); + } else if (fieldType === 'Decimal') { + // For Decimal fields, add .00 to integer values + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.00'))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + } return factories; } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index ebe64ef3d..26bc0ed92 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -109,7 +109,7 @@ export const postgresql: IntrospectionProvider = { return { type: 'bytea' }; } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 252a8a300..fefb2e950 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -69,6 +69,7 @@ export interface IntrospectionProvider { getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; getDefaultValue(args: { fieldName: string; + fieldType: BuiltinType | 'Unsupported'; defaultValue: string; services: ZModelServices; enums: Enum[]; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 1704cb8f0..fc9991fef 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -9,7 +9,12 @@ export const sqlite: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { case 'Schema': + // Multi-schema feature is not available for SQLite because it doesn't have + // the same concept of schemas as namespaces (unlike PostgreSQL, CockroachDB, SQL Server). + return false; case 'NativeEnum': + // SQLite doesn't support native enum types + return false; default: return false; } @@ -239,7 +244,7 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; From 52f1ffa8c125773157d119fd6d0b36f2d4e1b067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 15:19:18 +0100 Subject: [PATCH 57/83] test(cli): expand and reorganize db pull tests Enhances the test suite for the database pull command by adding comprehensive coverage for common schema features and PostgreSQL-specific functionality. Includes new test cases for: - Restoring complex schemas from scratch, including relations and indexes - Preserving existing imports in multi-file schema setups - Handling PostgreSQL-specific features like multi-schema support and native enums - Verifying schema preservation for field and table mappings The tests are restructured for better clarity across different database providers. --- packages/cli/test/db/pull.test.ts | 421 +++++++++++++++++++++++++----- 1 file changed, 357 insertions(+), 64 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index b04a46ffc..ec152d190 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -4,6 +4,7 @@ import { describe, expect, it } from 'vitest'; import { createProject, getDefaultPrelude, runCli } from '../utils'; import { loadSchemaDocument } from '../../src/actions/action-utils'; import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { getTestDbProvider } from '@zenstackhq/testtools'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); const generator = new ZModelCodeGenerator({ @@ -11,9 +12,193 @@ const generator = new ZModelCodeGenerator({ indent: 4, }); -describe('DB pull - Sqlite specific', () => { - it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject(`model User { +describe('DB pull - Common features (all providers)', () => { + describe('Pull from zero - restore complete schema from database', () => { + it('should restore basic schema with all supported types', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + name String? + age Int @default(0) + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + score Float @default(0.0) + bio String? + avatar Bytes? + metadata Json? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + // Store the schema after db push (this is what provider names will be) + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + // Remove schema content to simulate restoration from zero + fs.writeFileSync(schemaFile, getDefaultPrelude()); + + // Pull should fully restore the schema + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toContain('model User'); + }); + + it('should restore schema with relations', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with many-to-many relations', async () => { + const workDir = createProject( + `model Post { + id Int @id @default(autoincrement()) + title String + tags PostTag[] +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] +} + +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + + @@id([postId, tagId]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with indexes and unique constraints', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + username String + firstName String + lastName String + role String + + @@unique([username, email]) + @@index([role]) + @@index([firstName, lastName]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with composite primary keys', async () => { + const workDir = createProject( + `model UserRole { + userId String + role String + grantedAt DateTime @default(now()) + + @@id([userId, role]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with field and table mappings', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + firstName String @map("first_name") + lastName String @map("last_name") + + @@map("users") +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + }); + + describe('Pull with existing schema - preserve schema features', () => { + it('should not modify a comprehensive schema with all features', () => { + const workDir = createProject(`model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") @@ -91,90 +276,198 @@ enum Role { ADMIN MODERATOR }`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); + + it('should preserve imports when pulling with multi-file schema', () => { + const workDir = createProject(''); + const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); + const modelsDir = path.join(workDir, 'zenstack/models'); + fs.mkdirSync(modelsDir, { recursive: true }); + + // Create main schema with imports + const mainSchema = `${getDefaultPrelude()} + +import './models/user' +import './models/post'`; + fs.writeFileSync(schemaPath, mainSchema); + + // Create user model + const userModel = `model User { + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + createdAt DateTime @default(now()) +}`; + fs.writeFileSync(path.join(modelsDir, 'user.zmodel'), userModel); + + // Create post model + const postModel = `model Post { + id Int @id @default(autoincrement()) + title String + content String? + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + createdAt DateTime @default(now()) +}`; + fs.writeFileSync(path.join(modelsDir, 'post.zmodel'), postModel); + + runCli('format', workDir); + runCli('db push', workDir); + + // Store original schemas + const originalMainSchema = fs.readFileSync(schemaPath).toString(); + const originalUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); + const originalPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); + + // Pull and verify imports are preserved + runCli('db pull --indent 4', workDir); + + const pulledMainSchema = fs.readFileSync(schemaPath).toString(); + const pulledUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); + const pulledPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); + + expect(pulledMainSchema).toEqual(originalMainSchema); + expect(pulledUserSchema).toEqual(originalUserSchema); + expect(pulledPostSchema).toEqual(originalPostSchema); + + // Verify imports are still present in main schema + expect(pulledMainSchema).toContain("import './models/user'"); + expect(pulledMainSchema).toContain("import './models/post'"); + }); + }); +}); + +describe('DB pull - PostgreSQL specific features', () => { + it('should restore schema with multiple database schemas', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + posts Post[] + + @@schema("auth") +} + +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + + @@schema("content") +}`, + { provider: 'postgresql' }, ); runCli('format', workDir); runCli('db push', workDir); - const originalSchema = getSchema(workDir); + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); runCli('db pull --indent 4', workDir); - expect(getSchema(workDir)).toEqual(originalSchema); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toContain('@@schema("auth")'); + expect(restoredSchema).toContain('@@schema("content")'); }); - it('simple schema - pull should recreate the schema.zmodel', async () => { + it('should preserve native PostgreSQL enums when schema exists', ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } const workDir = createProject( - `model Post { - id Int @id @default(autoincrement()) - authorId String - title String - content String? - published Boolean @default(false) - createdAt DateTime @default(now()) - slug String - score Float @default(0.0) - metadata Json? - user User @relation(fields: [authorId], references: [id], onDelete: Cascade, onUpdate: Cascade) - postTag PostTag[] + `model User { + id String @id @default(cuid()) + email String @unique + status UserStatus @default(ACTIVE) + role UserRole @default(USER) +} - @@unique([authorId, slug]) - @@index([authorId, published]) +enum UserStatus { + ACTIVE + INACTIVE + SUSPENDED } -model PostTag { - postId Int - tagId Int - assignedAt DateTime @default(now()) - note String? @default("initial") - post Post @relation(fields: [postId], references: [id], onDelete: Cascade, onUpdate: Cascade) - tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade, onUpdate: Cascade) +enum UserRole { + USER + ADMIN + MODERATOR +}`, + { provider: 'postgresql' }, + ); + runCli('format', workDir); + runCli('db push', workDir); - @@id([postId, tagId]) -} + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + const pulledSchema = getSchema(workDir); -model Profile { - id Int @id @default(autoincrement()) - userId String @unique - sharedUserId String @unique @map("shared_userId") - bio String? - avatarUrl String? - profileUserId User @relation("Profile_userIdToUser", fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) - profileSharedUserId User @relation("Profile_shared_userIdToUser", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) -} + expect(pulledSchema).toEqual(originalSchema); + expect(pulledSchema).toContain('enum UserStatus'); + expect(pulledSchema).toContain('enum UserRole'); + }); -model Tag { - id Int @id @default(autoincrement()) - name String @unique - createdAt DateTime @default(now()) - postTag PostTag[] + it('should not modify schema with PostgreSQL-specific features', ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + status UserStatus @default(ACTIVE) + posts Post[] + metadata Json? - @@index([name], map: "tag_name_idx") + @@schema("auth") + @@index([status]) } -model User { - id String @id - email String @unique - name String? @default("Anonymous") - role String @default("USER") - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - post Post[] - profileUserId Profile? @relation("Profile_userIdToUser") - profileSharedUserId Profile? @relation("Profile_shared_userIdToUser") +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + tags String[] - @@index([role]) + @@schema("content") + @@index([authorId]) +} + +enum UserStatus { + ACTIVE + INACTIVE + SUSPENDED }`, + { provider: 'postgresql' }, ); runCli('format', workDir); runCli('db push', workDir); - const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const originalSchema = generator.generate(model); - fs.writeFileSync(path.join(workDir, 'zenstack/schema.zmodel'), getDefaultPrelude()); - runCli('db pull --indent 4 --field-casing=camel', workDir); + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); }); }); From 381b1d2e151787c8c675a74c0d1f93d79413e9f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 58/83] refactor: restructure introspection provider interface and attribute generation --- packages/cli/src/actions/pull/index.ts | 42 +++++++++---------- .../cli/src/actions/pull/provider/provider.ts | 22 ++++++++-- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 78f5ca7ac..896334473 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -256,15 +256,30 @@ export function syncTable({ builder.addAttribute((b) => b.setDecl(idAttribute)); } + // Add field-type-based attributes (e.g., @updatedAt for DateTime fields, @db.* attributes) + const fieldAttrs = provider.getFieldAttributes({ + fieldName: column.name, + fieldType: builtinType.type, + datatype: column.datatype, + length: column.length, + precision: column.precision, + services, + }); + fieldAttrs.forEach(builder.addAttribute.bind(builder)); + if (column.default) { - const defaultValuesAttrs = provider.getDefaultValue({ - fieldName: column.name, + const defaultExprBuilder = provider.getDefaultValue({ fieldType: builtinType.type, defaultValue: column.default, services, enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], }); - defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); + if (defaultExprBuilder) { + const defaultAttr = new DataFieldAttributeFactory() + .setDecl(getAttributeRef('@default', services)) + .addArg(defaultExprBuilder); + builder.addAttribute(defaultAttr); + } } if (column.unique && !column.pk) { @@ -272,7 +287,7 @@ export function syncTable({ b.setDecl(uniqueAttribute); // Only add map if the unique constraint name differs from default patterns // Default patterns: TableName_columnName_key (Prisma) or just columnName (MySQL) - const isDefaultName = !column.unique_name + const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name; if (!isDefaultName) { @@ -288,25 +303,6 @@ export function syncTable({ ); } - const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( - (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, - )?.node as Attribute | undefined; - - const defaultDatabaseType = provider.getDefaultDatabaseType(builtinType.type as BuiltinType); - - if ( - dbAttr && - defaultDatabaseType && - (defaultDatabaseType.type !== column.datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (column.length || column.precision))) - ) { - const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); - if (column.length || column.precision) - dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); - builder.addAttribute(dbAttrFactory); - } - return builder; }); }); diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index fefb2e950..6edee0663 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,6 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language'; -import type { BuiltinType, Enum } from '@zenstackhq/language/ast'; -import type { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import type { BuiltinType, Enum, Expression } from '@zenstackhq/language/ast'; +import type { AstFactory, DataFieldAttributeFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; export type Cascade = 'NO ACTION' | 'RESTRICT' | 'CASCADE' | 'SET NULL' | 'SET DEFAULT' | null; @@ -67,12 +67,28 @@ export interface IntrospectionProvider { isArray: boolean; }; getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; + /** + * Get the expression builder callback for a field's @default attribute value. + * Returns null if no @default attribute should be added. + * The callback will be passed to DataFieldAttributeFactory.addArg(). + */ getDefaultValue(args: { - fieldName: string; fieldType: BuiltinType | 'Unsupported'; defaultValue: string; services: ZModelServices; enums: Enum[]; + }): ((builder: ExpressionBuilder) => AstFactory) | null; + /** + * Get additional field attributes based on field type and name (e.g., @updatedAt for DateTime fields, @db.* attributes). + * This is separate from getDefaultValue to keep concerns separated. + */ + getFieldAttributes(args: { + fieldName: string; + fieldType: BuiltinType | 'Unsupported'; + datatype: string; + length: number | null; + precision: number | null; + services: ZModelServices; }): DataFieldAttributeFactory[]; isSupportedFeature(feature: DatabaseFeature): boolean; } From 34da2df3ce27baa7790f2244d8a86b9d93066813 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 59/83] feat: modernize MySQL introspection provider --- .../cli/src/actions/pull/provider/mysql.ts | 226 ++++++++++-------- 1 file changed, 126 insertions(+), 100 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index afd468049..66e103c21 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -1,4 +1,4 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -178,143 +178,169 @@ export const mysql: IntrospectionProvider = { await connection.end(); } }, - getDefaultValue({ defaultValue, fieldName, fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + // Handle NULL early + if (val.toUpperCase() === 'NULL') { + return null; + } - // Handle CURRENT_TIMESTAMP with optional precision (e.g., CURRENT_TIMESTAMP(3)) - if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + switch (fieldType) { + case 'DateTime': + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; - } + case 'Int': + case 'BigInt': + if (val.toLowerCase() === 'auto_increment') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; - // Handle auto_increment - if (val.toLowerCase() === 'auto_increment') { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true' || val === '1' || val === "b'1'") { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false' || val === '0' || val === "b'0'") { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; + + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + const strippedValue = val.slice(1, -1).replace(/''/g, "'"); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); + if (enumField) { + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } + return (ab) => ab.StringLiteral.setValue(strippedValue); + } + if (val.toLowerCase() === 'uuid()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); + } + if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { + return (ab) => ab.StringLiteral.setValue(val); + } + break; } - // Handle NULL - if (val.toUpperCase() === 'NULL') { - return []; + // Fallback handlers for values that don't match field type-specific patterns + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } - // Handle boolean values based on field type - if (fieldType === 'Boolean') { - if (val === 'true' || val === '1' || val === "b'1'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); - return factories; - } - if (val === 'false' || val === '0' || val === "b'0'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); - return factories; - } + if (val.toLowerCase() === 'auto_increment') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } - // Handle boolean literal values for non-boolean fields if (val === 'true' || val === "b'1'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(true); } if (val === 'false' || val === "b'0'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(false); } - // Handle numeric values based on field type - if (/^-?\d+\.\d+$/.test(val)) { - if (fieldType === 'Decimal') { - // For Decimal, normalize to 2 decimal places if it's all zeros after decimal - const numVal = parseFloat(val); - if (numVal === Math.floor(numVal)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); - } - } else if (fieldType === 'Float') { - // For Float, preserve decimal point - const numVal = parseFloat(val); - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - } - return factories; - } - if (/^-?\d+$/.test(val)) { - if (fieldType === 'Float') { - // For Float fields, add .0 to integer values - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.0'))); - } else if (fieldType === 'Decimal') { - // For Decimal fields, add .00 to integer values - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.00'))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - } - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } - // Handle string values (quoted with single quotes) if (val.startsWith("'") && val.endsWith("'")) { const strippedValue = val.slice(1, -1).replace(/''/g, "'"); - - // Check if it's an enum value const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); if (enumDef) { const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); if (enumField) { - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - return factories; + return (ab) => ab.ReferenceExpr.setTarget(enumField); } } - - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(strippedValue))); - return factories; + return (ab) => ab.StringLiteral.setValue(strippedValue); } // Handle function calls (e.g., uuid(), now()) if (val.includes('(') && val.includes(')')) { - // Check for known functions if (val.toLowerCase() === 'uuid()') { - factories.push( - defaultAttr.addArg((a) => a.InvocationExpr.setFunction(getFunctionRef('uuid', services))), - ); - return factories; + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); } - - // For other functions, use dbgenerated - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - return factories; + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } - // Handle unquoted string values (MySQL sometimes returns defaults without quotes) - // If it's not a number, boolean, or function, treat it as a string + // Handle unquoted string values if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val))); - return factories; + return (ab) => ab.StringLiteral.setValue(val); } // For any other unhandled cases, use dbgenerated - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); + }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + return factories; }, }; From 07c5804de93e79ce42dfa2a992f07d29f7565970 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 60/83] feat: modernize PostgreSQL introspection provider --- .../src/actions/pull/provider/postgresql.ts | 176 ++++++++++++------ 1 file changed, 121 insertions(+), 55 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 26bc0ed92..ca70065ff 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,4 +1,4 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; @@ -109,37 +109,10 @@ export const postgresql: IntrospectionProvider = { return { type: 'bytea' }; } }, - getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); - - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); - - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; - } - if (val.startsWith('nextval(')) { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; - } - if (val.includes('(') && val.includes(')')) { - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - return factories; - } + // Handle type casts early (PostgreSQL-specific pattern like 'value'::type) if (val.includes('::')) { const [value, type] = val .replace(/'/g, '') @@ -151,23 +124,17 @@ export const postgresql: IntrospectionProvider = { case 'json': case 'jsonb': case 'text': - if (value === 'NULL') return []; - factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); - break; + if (value === 'NULL') return null; + return (ab) => ab.StringLiteral.setValue(value); case 'real': - factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(value))); - break; + return (ab) => ab.NumberLiteral.setValue(value); default: { const enumDef = enums.find((e) => getDbName(e, true) === type); if (!enumDef) { - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - break; + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } const enumField = enumDef.fields.find((v) => getDbName(v) === value); if (!enumField) { @@ -175,30 +142,129 @@ export const postgresql: IntrospectionProvider = { `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, ); } - - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - break; + return (ab) => ab.ReferenceExpr.setTarget(enumField); } } + } + + switch (fieldType) { + case 'DateTime': + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); + + case 'Int': + case 'BigInt': + if (val.startsWith('nextval(')) { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; + + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true') { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false') { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; - return factories; + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); + } + break; + } + + // Fallback handlers for values that don't match field type-specific patterns + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + + if (val.startsWith('nextval(')) { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + + if (val.includes('(') && val.includes(')')) { + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } if (val === 'true' || val === 'false') { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(val === 'true'))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(val === 'true'); } - if (/^\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } if (val.startsWith("'") && val.endsWith("'")) { - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")))); - return factories; + return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); + } + + return null; + }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } - return []; + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + + return factories; }, }; From 9450535954e68be0e8b8d95272b7ce74856534a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 61/83] feat: modernize SQLite introspection provider --- .../cli/src/actions/pull/provider/sqlite.ts | 131 ++++++++++++++---- 1 file changed, 106 insertions(+), 25 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index fc9991fef..d90c2cef3 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,3 +1,4 @@ +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -141,7 +142,7 @@ export const sqlite: IntrospectionProvider = { nulls: null, })), }; - }); + }).reverse(); // Reverse to maintain creation order // Foreign keys mapping by column name const fkRows = all<{ @@ -244,37 +245,87 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + switch (fieldType) { + case 'DateTime': + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + case 'Int': + case 'BigInt': + if (val === 'autoincrement') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true' || val === '1') { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false' || val === '0') { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; + + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + const strippedName = val.slice(1, -1); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); + if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + return (ab) => ab.StringLiteral.setValue(strippedName); + } + break; + } + + // Fallback handlers for values that don't match field type-specific patterns + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } - // Handle autoincrement if (val === 'autoincrement') { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } if (val === 'true' || val === 'false') { - factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(val === 'true'); } - if (!Number.isNaN(parseFloat(val)) || !Number.isNaN(parseInt(val))) { - factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(val))); - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } if (val.startsWith("'") && val.endsWith("'")) { @@ -282,16 +333,46 @@ export const sqlite: IntrospectionProvider = { const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); if (enumDef) { const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); - if (enumField) factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - } else { - factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(strippedName))); + if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField); } - return factories; + return (ab) => ab.StringLiteral.setValue(strippedName); } //TODO: add more default value factories if exists throw new Error( - `This default value type currently is not supported. Plesase open an issue on github. Values: "${defaultValue}"`, + `This default value type currently is not supported. Please open an issue on github. Values: "${defaultValue}"`, ); }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + + return factories; + }, }; From 7a5071b568267f210c27ec42ea3c78e2515bcb1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 62/83] fix: improve relation field naming and default action handling --- packages/cli/src/actions/pull/index.ts | 30 +++++++++++++++----------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 896334473..d61873c64 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,9 @@ import type { ZModelServices } from '@zenstackhq/language'; import colors from 'colors'; import { + isArrayExpr, isEnum, - type Attribute, - type BuiltinType, + isReferenceExpr, type DataField, type DataModel, type Enum, @@ -444,7 +444,8 @@ export function syncRelation({ | undefined; if (!sourceModel) return; - const sourceField = sourceModel.fields.find((f) => getDbName(f) === relation.column) as DataField | undefined; + const sourceFieldId = sourceModel.fields.findIndex((f) => getDbName(f) === relation.column); + const sourceField = sourceModel.fields[sourceFieldId] as DataField | undefined; if (!sourceField) return; const targetModel = model.declarations.find( @@ -458,11 +459,16 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; + + const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; + + const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference); + let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : targetModel.name, + : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || resolveNameCasing("camel", targetModel.name).name}${relation.type === 'many'? 's' : ''}`, ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { @@ -486,7 +492,9 @@ export function syncRelation({ 'references', ); - if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + // Prisma defaults: onDelete is SetNull for optional, Restrict for mandatory + const onDeleteDefault = relation.nullable ? 'SET NULL' : 'RESTRICT'; + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) { const enumRef = getEnumRef('ReferentialAction', services); if (!enumRef) throw new Error('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( @@ -496,7 +504,8 @@ export function syncRelation({ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); } - if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + // Prisma default: onUpdate is Cascade + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'CASCADE') { const enumRef = getEnumRef('ReferentialAction', services); if (!enumRef) throw new Error('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( @@ -511,14 +520,14 @@ export function syncRelation({ return ab; }); - sourceModel.fields.push(sourceFieldFactory.node); + sourceModel.fields.splice(sourceFieldId, 0, sourceFieldFactory.node); // Remove the original scalar foreign key field const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : sourceModel.name, + : `${resolveNameCasing("camel", sourceModel.name).name}${relation.references.type === 'many'? 's' : ''}`, ); const targetFieldFactory = new DataFieldFactory() @@ -536,9 +545,4 @@ export function syncRelation({ ); targetModel.fields.push(targetFieldFactory.node); - - // targetModel.fields.sort((a, b) => { - // if (a.type.reference || b.type.reference) return a.name.localeCompare(b.name); - // return 0; - // }); } From 7910c095a32ee06416cc91cd37685e34b7fe7346 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 63/83] feat: track imports and auto-format during db pull --- packages/cli/src/actions/db.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6a8135650..28c60051f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,5 @@ import { config } from '@dotenvx/dotenvx'; -import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { formatDocument, ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import colors from 'colors'; import fs from 'node:fs'; @@ -83,7 +83,7 @@ async function runPull(options: PullOptions) { const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true, keepImports: true }); config({ ignore: ['MISSING_ENV_FILE'], }); @@ -400,7 +400,7 @@ async function runPull(options: PullOptions) { }); if (options.out) { - const zmodelSchema = generator.generate(newModel); + const zmodelSchema = await formatDocument(generator.generate(newModel)); console.log(colors.blue(`Writing to ${options.out}`)); @@ -408,11 +408,11 @@ async function runPull(options: PullOptions) { fs.writeFileSync(outPath, zmodelSchema); } else { - docs.forEach(({ uri, parseResult: { value: model } }) => { - const zmodelSchema = generator.generate(model); + for (const { uri, parseResult: { value: model } } of docs) { + const zmodelSchema = await formatDocument(generator.generate(model)); console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); - }); + } } console.log(colors.green.bold('\nPull completed successfully!')); From 7c3022555131ce59c814011f2dfb942ee7129cdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:17 +0100 Subject: [PATCH 64/83] test: update pull tests to reflect naming and formatting improvements --- packages/cli/src/actions/pull/index.ts | 2 - packages/cli/test/db/pull.test.ts | 155 +++++++++++-------------- packages/cli/test/utils.ts | 15 ++- 3 files changed, 80 insertions(+), 92 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index d61873c64..b94eb69d1 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,7 @@ import type { ZModelServices } from '@zenstackhq/language'; import colors from 'colors'; import { - isArrayExpr, isEnum, - isReferenceExpr, type DataField, type DataModel, type Enum, diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index ec152d190..9c0230141 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,9 +1,9 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createProject, getDefaultPrelude, runCli } from '../utils'; +import { createFormattedProject, createProject, getDefaultPrelude, runCli } from '../utils'; import { loadSchemaDocument } from '../../src/actions/action-utils'; -import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { ZModelCodeGenerator, formatDocument } from '@zenstackhq/language'; import { getTestDbProvider } from '@zenstackhq/testtools'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); @@ -15,9 +15,9 @@ const generator = new ZModelCodeGenerator({ describe('DB pull - Common features (all providers)', () => { describe('Pull from zero - restore complete schema from database', () => { it('should restore basic schema with all supported types', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique name String? age Int @default(0) @@ -32,7 +32,6 @@ describe('DB pull - Common features (all providers)', () => { updatedAt DateTime @updatedAt }`, ); - runCli('format', workDir); runCli('db push', workDir); // Store the schema after db push (this is what provider names will be) @@ -48,25 +47,23 @@ describe('DB pull - Common features (all providers)', () => { const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(expectedSchema); - expect(restoredSchema).toContain('model User'); }); it('should restore schema with relations', async () => { - const workDir = createProject( - `model User { - id String @id @default(cuid()) - email String @unique - posts Post[] -} - -model Post { + const workDir = await createFormattedProject( + `model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -81,17 +78,11 @@ model Post { }); it('should restore schema with many-to-many relations', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model Post { id Int @id @default(autoincrement()) title String - tags PostTag[] -} - -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] + postTags PostTag[] } model PostTag { @@ -101,9 +92,14 @@ model PostTag { tagId Int @@id([postId, tagId]) +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + postTags PostTag[] }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -118,9 +114,9 @@ model PostTag { }); it('should restore schema with indexes and unique constraints', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique username String firstName String @@ -130,9 +126,9 @@ model PostTag { @@unique([username, email]) @@index([role]) @@index([firstName, lastName]) + @@index([email, username, role]) }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -147,7 +143,7 @@ model PostTag { }); it('should restore schema with composite primary keys', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model UserRole { userId String role String @@ -156,7 +152,6 @@ model PostTag { @@id([userId, role]) }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -170,10 +165,13 @@ model PostTag { expect(restoredSchema).toEqual(expectedSchema); }); - it('should restore schema with field and table mappings', async () => { - const workDir = createProject( + }); + + describe('Pull with existing schema - preserve schema features', () => { + it('should preserve field and table mappings', async () => { + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique @map("email_address") firstName String @map("first_name") lastName String @map("last_name") @@ -181,25 +179,17 @@ model PostTag { @@map("users") }`, ); - runCli('format', workDir); runCli('db push', workDir); - const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); - - fs.writeFileSync(schemaFile, getDefaultPrelude()); + const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); - const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(getSchema(workDir)).toEqual(originalSchema); }); - }); - describe('Pull with existing schema - preserve schema features', () => { - it('should not modify a comprehensive schema with all features', () => { - const workDir = createProject(`model User { - id String @id @default(cuid()) + it('should not modify a comprehensive schema with all features', async () => { + const workDir = await createFormattedProject(`model User { + id Int @id @default(autoincrement()) email String @unique @map("email_address") name String? @default("Anonymous") role Role @default(USER) @@ -221,9 +211,9 @@ model PostTag { model Profile { id Int @id @default(autoincrement()) user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique + userId Int @unique user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique + shared_userId Int @unique bio String? avatarUrl String? @@ -233,7 +223,7 @@ model Profile { model Post { id Int @id @default(autoincrement()) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int title String content String? published Boolean @default(false) @@ -277,7 +267,6 @@ enum Role { MODERATOR }`, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); @@ -285,48 +274,47 @@ enum Role { expect(getSchema(workDir)).toEqual(originalSchema); }); - it('should preserve imports when pulling with multi-file schema', () => { - const workDir = createProject(''); + it('should preserve imports when pulling with multi-file schema', async () => { + const workDir = createProject('', { customPrelude: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); const modelsDir = path.join(workDir, 'zenstack/models'); + fs.mkdirSync(modelsDir, { recursive: true }); // Create main schema with imports - const mainSchema = `${getDefaultPrelude()} + const mainSchema = await formatDocument(`import "./models/user" +import "./models/post" -import './models/user' -import './models/post'`; +${getDefaultPrelude()}`); fs.writeFileSync(schemaPath, mainSchema); // Create user model - const userModel = `model User { - id String @id @default(cuid()) + const userModel = await formatDocument(`import "./post" + +model User { + id Int @id @default(autoincrement()) email String @unique name String? posts Post[] createdAt DateTime @default(now()) -}`; +}`); fs.writeFileSync(path.join(modelsDir, 'user.zmodel'), userModel); // Create post model - const postModel = `model Post { + const postModel = await formatDocument(`import "./user" + +model Post { id Int @id @default(autoincrement()) title String content String? author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int createdAt DateTime @default(now()) -}`; +}`); fs.writeFileSync(path.join(modelsDir, 'post.zmodel'), postModel); - runCli('format', workDir); runCli('db push', workDir); - // Store original schemas - const originalMainSchema = fs.readFileSync(schemaPath).toString(); - const originalUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); - const originalPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); - // Pull and verify imports are preserved runCli('db pull --indent 4', workDir); @@ -334,13 +322,9 @@ import './models/post'`; const pulledUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); const pulledPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); - expect(pulledMainSchema).toEqual(originalMainSchema); - expect(pulledUserSchema).toEqual(originalUserSchema); - expect(pulledPostSchema).toEqual(originalPostSchema); - - // Verify imports are still present in main schema - expect(pulledMainSchema).toContain("import './models/user'"); - expect(pulledMainSchema).toContain("import './models/post'"); + expect(pulledMainSchema).toEqual(mainSchema); + expect(pulledUserSchema).toEqual(userModel); + expect(pulledPostSchema).toEqual(postModel); }); }); }); @@ -352,9 +336,9 @@ describe('DB pull - PostgreSQL specific features', () => { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique posts Post[] @@ -365,13 +349,12 @@ model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int @@schema("content") }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -387,15 +370,15 @@ model Post { expect(restoredSchema).toContain('@@schema("content")'); }); - it('should preserve native PostgreSQL enums when schema exists', ({ skip }) => { + it('should preserve native PostgreSQL enums when schema exists', async ({ skip }) => { const provider = getTestDbProvider(); if (provider !== 'postgresql') { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique status UserStatus @default(ACTIVE) role UserRole @default(USER) @@ -414,7 +397,6 @@ enum UserRole { }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); @@ -426,15 +408,15 @@ enum UserRole { expect(pulledSchema).toContain('enum UserRole'); }); - it('should not modify schema with PostgreSQL-specific features', ({ skip }) => { + it('should not modify schema with PostgreSQL-specific features', async ({ skip }) => { const provider = getTestDbProvider(); if (provider !== 'postgresql') { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique status UserStatus @default(ACTIVE) posts Post[] @@ -448,7 +430,7 @@ model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int tags String[] @@schema("content") @@ -462,7 +444,6 @@ enum UserStatus { }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 8b5d79d10..7820a0b5a 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -4,6 +4,7 @@ import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; import { expect } from 'vitest'; +import { formatDocument } from '@zenstackhq/language'; const TEST_PG_CONFIG = { host: process.env['TEST_PG_HOST'] ?? 'localhost', @@ -63,8 +64,7 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' const ZMODEL_PRELUDE = `datasource db { provider = "${provider}" url = "${dbUrl}" -} -`; +}`; return ZMODEL_PRELUDE; } @@ -75,10 +75,19 @@ export function createProject( const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}` : zmodel); return workDir; } +export async function createFormattedProject( + zmodel: string, + options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }, +) { + const fullContent = `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}`; + const formatted = await formatDocument(fullContent); + return createProject(formatted, { customPrelude: true, provider: options?.provider }); +} + export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); execSync(`node ${cli} ${command}`, { cwd }); From 0f999b6d67b25809db851550e5b9f89962fca3ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:49:55 +0100 Subject: [PATCH 65/83] fix(cli): refactor PostgreSQL type casting and fix index order Extracts PostgreSQL type casting logic into a reusable helper function to improve maintainability and ensure consistent attribute handling across all field types. Adjusts the table index sorting logic to better preserve the original database creation order while maintaining the priority of unique indexes. --- packages/cli/src/actions/pull/index.ts | 2 +- .../src/actions/pull/provider/postgresql.ts | 100 +++++++++++------- .../cli/src/actions/pull/provider/sqlite.ts | 2 +- 3 files changed, 65 insertions(+), 39 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index b94eb69d1..e9cb68f6d 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -331,7 +331,7 @@ export function syncTable({ } // Sort indexes: unique indexes first, then other indexes - const sortedIndexes = table.indexes.sort((a, b) => { + const sortedIndexes = table.indexes.reverse().sort((a, b) => { if (a.unique && !b.unique) return -1; if (!a.unique && b.unique) return 1; return 0; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index ca70065ff..1e9dfcac3 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,8 +1,9 @@ -import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; -import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import type { Attribute, BuiltinType, Enum, Expression } from '@zenstackhq/language/ast'; +import { AstFactory, DataFieldAttributeFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import type { ZModelServices } from '@zenstackhq/language'; export const postgresql: IntrospectionProvider = { isSupportedFeature(feature) { @@ -112,46 +113,16 @@ export const postgresql: IntrospectionProvider = { getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - // Handle type casts early (PostgreSQL-specific pattern like 'value'::type) - if (val.includes('::')) { - const [value, type] = val - .replace(/'/g, '') - .split('::') - .map((s) => s.trim()) as [string, string]; - switch (type) { - case 'character varying': - case 'uuid': - case 'json': - case 'jsonb': - case 'text': - if (value === 'NULL') return null; - return (ab) => ab.StringLiteral.setValue(value); - case 'real': - return (ab) => ab.NumberLiteral.setValue(value); - default: { - const enumDef = enums.find((e) => getDbName(e, true) === type); - if (!enumDef) { - return (ab) => - ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ); - } - const enumField = enumDef.fields.find((v) => getDbName(v) === value); - if (!enumField) { - throw new Error( - `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, - ); - } - return (ab) => ab.ReferenceExpr.setTarget(enumField); - } - } - } - switch (fieldType) { case 'DateTime': if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } + + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + // Fallback to string literal for other DateTime defaults return (ab) => ab.StringLiteral.setValue(val); @@ -160,12 +131,21 @@ export const postgresql: IntrospectionProvider = { if (val.startsWith('nextval(')) { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } + + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val); } break; case 'Float': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); @@ -176,6 +156,10 @@ export const postgresql: IntrospectionProvider = { break; case 'Decimal': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); if (numVal === Math.floor(numVal)) { @@ -198,12 +182,20 @@ export const postgresql: IntrospectionProvider = { break; case 'String': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (val.startsWith("'") && val.endsWith("'")) { return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); } break; } + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + // Fallback handlers for values that don't match field type-specific patterns if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); @@ -447,3 +439,37 @@ WHERE AND "cls"."relname" !~ '_prisma_migrations' ORDER BY "ns"."nspname", "cls"."relname" ASC; `; + +function typeCastingConvert({defaultValue, enums, val, services}:{val: string, enums: Enum[], defaultValue:string, services:ZModelServices}): ((builder: ExpressionBuilder) => AstFactory) | null { + const [value, type] = val + .replace(/'/g, '') + .split('::') + .map((s) => s.trim()) as [string, string]; + switch (type) { + case 'character varying': + case 'uuid': + case 'json': + case 'jsonb': + case 'text': + if (value === 'NULL') return null; + return (ab) => ab.StringLiteral.setValue(value); + case 'real': + return (ab) => ab.NumberLiteral.setValue(value); + default: { + const enumDef = enums.find((e) => getDbName(e, true) === type); + if (!enumDef) { + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); + } + const enumField = enumDef.fields.find((v) => getDbName(v) === value); + if (!enumField) { + throw new Error( + `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, + ); + } + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index d90c2cef3..a6961b009 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -142,7 +142,7 @@ export const sqlite: IntrospectionProvider = { nulls: null, })), }; - }).reverse(); // Reverse to maintain creation order + }); // Foreign keys mapping by column name const fkRows = all<{ From c1e717d3407dc524af3ed3a3ce2a116f4ba47e03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 21:11:14 +0100 Subject: [PATCH 66/83] fix(cli): filter out auto-generated MySQL indexes Prevents foreign key indexes created automatically by MySQL from appearing in the introspected schema. This ensures the output reflects manually defined indexes and avoids redundancy in schema definitions. --- packages/cli/src/actions/pull/provider/mysql.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 66e103c21..702cc06b5 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -146,13 +146,20 @@ export const mysql: IntrospectionProvider = { (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) ); + // Filter out auto-generated FK indexes (MySQL creates these automatically) + // Pattern: {Table}_{column}_fkey for single-column FK indexes + const filteredIndexes = (indexes || []).filter( + (idx: { name: string; columns: { name: string }[] }) => + !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`) + ); + tables.push({ schema: '', // MySQL doesn't support multi-schema name: row.name, type: row.type as 'table' | 'view', definition: row.definition, columns: sortedColumns, - indexes: indexes || [], + indexes: filteredIndexes, }); } From 27c40bdf8ba8ade6ff4852c5d82ee90a031fed55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 21:29:06 +0100 Subject: [PATCH 67/83] test(cli): support datasource extras in test utils Enhances the test utility helpers to allow passing extra datasource properties, such as multi-schema configurations for PostgreSQL. Refactors existing database pull tests to use these extra properties, ensuring the generated ZModel schema correctly reflects multi-schema environments while simplifying assertions. --- packages/cli/src/actions/pull/utils.ts | 9 ++++----- packages/cli/test/db/pull.test.ts | 10 +++------- packages/cli/test/utils.ts | 27 +++++++++++++++++++------- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 38a5f0e9c..b46693afe 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -64,12 +64,11 @@ export function getDatasource(model: Model) { const schemasField = datasource.fields.find((f) => f.name === 'schemas'); const schemas = - (schemasField && - getLiteralArray(schemasField.value) - ?.map(getStringLiteral) - .filter((s) => s !== undefined)) || + (schemasField && + getLiteralArray(schemasField.value) + ?.filter((s) => s !== undefined)) as string[] || []; - + return { name: datasource.name, provider: getStringLiteral( diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 9c0230141..d8d677258 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -353,7 +353,7 @@ model Post { @@schema("content") }`, - { provider: 'postgresql' }, + { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, ); runCli('db push', workDir); @@ -361,13 +361,11 @@ model Post { const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); const expectedSchema = generator.generate(model); - fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"]} })); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(expectedSchema); - expect(restoredSchema).toContain('@@schema("auth")'); - expect(restoredSchema).toContain('@@schema("content")'); }); it('should preserve native PostgreSQL enums when schema exists', async ({ skip }) => { @@ -404,8 +402,6 @@ enum UserRole { const pulledSchema = getSchema(workDir); expect(pulledSchema).toEqual(originalSchema); - expect(pulledSchema).toContain('enum UserStatus'); - expect(pulledSchema).toContain('enum UserRole'); }); it('should not modify schema with PostgreSQL-specific features', async ({ skip }) => { @@ -442,7 +438,7 @@ enum UserStatus { INACTIVE SUSPENDED }`, - { provider: 'postgresql' }, + { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, ); runCli('db push', workDir); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 7820a0b5a..4a58598c2 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -42,7 +42,7 @@ function getTestDbName(provider: string) { ); } -export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); let dbUrl: string; @@ -60,11 +60,24 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' default: throw new Error(`Unsupported provider: ${provider}`); } + // Build fields array for proper alignment (matching ZModelCodeGenerator) + const fields: [string, string][] = [ + ['provider', `"${provider}"`], + ['url', `"${dbUrl}"`], + ...Object.entries(options?.extra || {}).map(([k, v]) => { + const value = Array.isArray(v) ? `[${v.map(item => `"${item}"`).join(', ')}]` : `"${v}"`; + return [k, value] as [string, string]; + }), + ]; - const ZMODEL_PRELUDE = `datasource db { - provider = "${provider}" - url = "${dbUrl}" -}`; + // Calculate alignment padding based on longest field name + const longestName = Math.max(...fields.map(([name]) => name.length)); + const formattedFields = fields.map(([name, value]) => { + const padding = ' '.repeat(longestName - name.length + 1); + return ` ${name}${padding}= ${value}`; + }).join('\n'); + + const ZMODEL_PRELUDE = `datasource db {\n${formattedFields}\n}`; return ZMODEL_PRELUDE; } @@ -81,9 +94,9 @@ export function createProject( export async function createFormattedProject( zmodel: string, - options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }, + options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }, ) { - const fullContent = `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}`; + const fullContent = `${getDefaultPrelude({ provider: options?.provider, extra: options?.extra })}\n\n${zmodel}`; const formatted = await formatDocument(fullContent); return createProject(formatted, { customPrelude: true, provider: options?.provider }); } From 9b54b5ff4073bf06b10f29ee4183ed48da195509 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sat, 31 Jan 2026 00:09:53 +0100 Subject: [PATCH 68/83] fix: address PR comments --- packages/cli/package.json | 1 - packages/cli/src/actions/db.ts | 8 ++-- packages/cli/src/actions/pull/index.ts | 40 ++++++++++++++----- .../cli/src/actions/pull/provider/mysql.ts | 10 ++--- .../src/actions/pull/provider/postgresql.ts | 6 +-- .../cli/src/actions/pull/provider/provider.ts | 2 +- .../cli/src/actions/pull/provider/sqlite.ts | 8 ++-- packages/cli/src/index.ts | 4 +- packages/language/src/factory/attribute.ts | 14 +++++-- 9 files changed, 60 insertions(+), 33 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 7a109f5e6..a0992a523 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,7 +38,6 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", - "@zenstackhq/language": "workspace:*", "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 28c60051f..702268f0b 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -149,7 +149,7 @@ async function runPull(options: PullOptions) { } // sync relation fields for (const relation of resolvedRelations) { - const simmilarRelations = resolvedRelations.filter((rr) => { + const similarRelations = resolvedRelations.filter((rr) => { return ( rr !== relation && ((rr.schema === relation.schema && @@ -170,7 +170,7 @@ async function runPull(options: PullOptions) { services, options, selfRelation, - simmilarRelations, + similarRelations: similarRelations, }); } @@ -390,8 +390,8 @@ async function runPull(options: PullOptions) { deletedFields.forEach((msg) => console.log(msg)); } - if (options.out && !fs.lstatSync(options.out).isFile()) { - throw new Error(`Output path ${options.out} is not a file`); + if (options.out && fs.existsSync(options.out) && !fs.lstatSync(options.out).isFile()) { + throw new Error(`Output path ${options.out} exists but is not a file`); } const generator = new ZModelCodeGenerator({ diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index e9cb68f6d..e68513961 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -76,8 +76,28 @@ export function syncEnums({ .filter((d) => isEnum(d)) .forEach((d) => { const factory = new EnumFactory().setName(d.name); + // Copy enum-level comments + if (d.comments?.length) { + factory.update({ comments: [...d.comments] }); + } + // Copy enum-level attributes (@@map, @@schema, etc.) + if (d.attributes?.length) { + factory.update({ attributes: [...d.attributes] }); + } + // Copy fields with their attributes and comments d.fields.forEach((v) => { - factory.addField((builder) => builder.setName(v.name)); + factory.addField((builder) => { + builder.setName(v.name); + // Copy field-level comments + if (v.comments?.length) { + v.comments.forEach((c) => builder.addComment(c)); + } + // Copy field-level attributes (@map, etc.) + if (v.attributes?.length) { + builder.update({ attributes: [...v.attributes] }); + } + return builder; + }); }); model.declarations.push(factory.get({ $container: model })); }); @@ -322,8 +342,10 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique || c.pk); - if(uniqueColumns.length === 0) { + const hasUniqueConstraint = + table.columns.some((c) => c.unique || c.pk) || + table.indexes.some((i) => i.unique); + if (!hasUniqueConstraint) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', @@ -415,14 +437,14 @@ export function syncRelation({ services, options, selfRelation, - simmilarRelations, + similarRelations, }: { model: Model; relation: Relation; services: ZModelServices; options: PullOptions; //self included - simmilarRelations: number; + similarRelations: number; selfRelation: boolean; }) { const idAttribute = getAttributeRef('@id', services); @@ -431,7 +453,7 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); - const includeRelationName = selfRelation || simmilarRelations > 0; + const includeRelationName = selfRelation || similarRelations > 0; if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); @@ -456,7 +478,7 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; + const relationName = `${relation.table}${similarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; @@ -464,7 +486,7 @@ export function syncRelation({ let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, - simmilarRelations > 0 + similarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || resolveNameCasing("camel", targetModel.name).name}${relation.type === 'many'? 's' : ''}`, ); @@ -523,7 +545,7 @@ export function syncRelation({ const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, - simmilarRelations > 0 + similarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` : `${resolveNameCasing("camel", sourceModel.name).name}${relation.references.type === 'many'? 's' : ''}`, ); diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 702cc06b5..cb104eb1e 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -95,7 +95,7 @@ export const mysql: IntrospectionProvider = { getDefaultDatabaseType(type: BuiltinType) { switch (type) { case 'String': - return { type: 'varchar', precisition: 191 }; + return { type: 'varchar', precision: 191 }; case 'Boolean': // Boolean maps to 'boolean' (our synthetic type from tinyint(1)) // No precision needed since we handle the mapping in the query @@ -107,9 +107,9 @@ export const mysql: IntrospectionProvider = { case 'Float': return { type: 'double' }; case 'Decimal': - return { type: 'decimal', precisition: 65 }; + return { type: 'decimal', precision: 65 }; case 'DateTime': - return { type: 'datetime', precisition: 3 }; + return { type: 'datetime', precision: 3 }; case 'Json': return { type: 'json' }; case 'Bytes': @@ -338,8 +338,8 @@ export const mysql: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 1e9dfcac3..08a041b56 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -103,7 +103,7 @@ export const postgresql: IntrospectionProvider = { case 'Decimal': return { type: 'decimal' }; case 'DateTime': - return { type: 'timestamp', precisition: 3 }; + return { type: 'timestamp', precision: 3 }; case 'Json': return { type: 'jsonb' }; case 'Bytes': @@ -246,8 +246,8 @@ export const postgresql: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 6edee0663..a3922b7a7 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -66,7 +66,7 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported'; isArray: boolean; }; - getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; + getDefaultDatabaseType(type: BuiltinType): { precision?: number; type: string } | undefined; /** * Get the expression builder callback for a field's @default attribute value. * Returns null if no @default attribute should be added. diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index a6961b009..fcdbfbad7 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -115,7 +115,7 @@ export const sqlite: IntrospectionProvider = { // Unique columns detection via unique indexes with single column const uniqueSingleColumn = new Set(); - const uniqueIndexRows = idxList.filter((r) => r.unique === 1); + const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1); for (const idx of uniqueIndexRows) { const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); if (idxCols.length === 1 && idxCols[0]?.name) { @@ -134,7 +134,7 @@ export const sqlite: IntrospectionProvider = { valid: true, // SQLite does not expose index validity ready: true, // SQLite does not expose index readiness partial: idx.partial === 1, - predicate: null, // SQLite does not expose index predicate + predicate: idx.partial === 1 ? '[partial]' : null, // SQLite does not expose index predicate columns: idxCols.map((col) => ({ name: col.name, expression: null, @@ -363,8 +363,8 @@ export const sqlite: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index a7bb403e0..8d253cc3e 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -150,12 +150,12 @@ function createProgram() { .addOption(noVersionCheckOption) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( - new Option('--model-casing ', 'set the casing of generated models').default( + new Option('--model-casing ', 'set the casing of generated models').default( 'none', ), ) .addOption( - new Option('--field-casing ', 'set the casing of generated fields').default( + new Option('--field-casing ', 'set the casing of generated fields').default( 'none', ), ) diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index 52aeebc7c..138d41c8f 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -21,9 +21,12 @@ export class DataFieldAttributeFactory extends AstFactory { super({ type: DataFieldAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { + if (!decl) { + throw new Error('Attribute declaration is required'); + } this.decl = { - $refText: decl?.name ?? '', - ref: decl!, + $refText: decl.name, + ref: decl, }; this.update({ decl: this.decl, @@ -50,9 +53,12 @@ export class DataModelAttributeFactory extends AstFactory { super({ type: DataModelAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { + if (!decl) { + throw new Error('Attribute declaration is required'); + } this.decl = { - $refText: decl?.name ?? '', - ref: decl!, + $refText: decl.name, + ref: decl, }; this.update({ decl: this.decl, From 3c48a175e06e15e050f9371267307a8d4208c05f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 3 Feb 2026 03:10:14 +0100 Subject: [PATCH 69/83] fix: address PR comments --- packages/cli/package.json | 1 - packages/cli/src/actions/action-utils.ts | 12 +- packages/cli/src/actions/db.ts | 246 +++++++++++++----- packages/cli/src/actions/pull/index.ts | 45 ++-- .../cli/src/actions/pull/provider/mysql.ts | 153 +++++------ .../src/actions/pull/provider/postgresql.ts | 120 +++++---- .../cli/src/actions/pull/provider/provider.ts | 5 +- .../cli/src/actions/pull/provider/sqlite.ts | 156 +++++------ packages/cli/src/actions/pull/utils.ts | 35 +-- packages/cli/src/index.ts | 15 +- packages/cli/test/db/pull.test.ts | 149 ++++++----- packages/cli/test/utils.ts | 14 +- packages/language/src/document.ts | 6 +- packages/language/src/factory/ast-factory.ts | 4 - packages/language/src/factory/declaration.ts | 4 +- packages/language/src/factory/expression.ts | 1 + .../language/src/zmodel-code-generator.ts | 40 +-- pnpm-lock.yaml | 123 +-------- 18 files changed, 570 insertions(+), 559 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index a0992a523..ceb53e2fa 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -36,7 +36,6 @@ "./package.json": "./package.json" }, "dependencies": { - "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 86d55baa6..2e264593c 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -43,20 +43,20 @@ export function getSchemaFile(file?: string) { export async function loadSchemaDocument( schemaFile: string, - opts?: { keepImports?: boolean; returnServices?: false }, + opts?: { mergeImports?: boolean; returnServices?: false }, ): Promise; export async function loadSchemaDocument( schemaFile: string, - opts: { returnServices: true; keepImports?: boolean }, + opts: { returnServices: true; mergeImports?: boolean }, ): Promise<{ model: Model; services: ZModelServices }>; export async function loadSchemaDocument( schemaFile: string, - opts: { returnServices?: boolean; keepImports?: boolean } = {}, + opts: { returnServices?: boolean; mergeImports?: boolean } = {}, ) { - const returnServices = opts.returnServices || false; - const keepImports = opts.keepImports || false; + const returnServices = opts.returnServices ?? false; + const mergeImports = opts.mergeImports ?? true; - const loadResult = await loadDocument(schemaFile, [], keepImports); + const loadResult = await loadDocument(schemaFile, [], mergeImports); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 702268f0b..e9f730ec6 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,3 @@ -import { config } from '@dotenvx/dotenvx'; import { formatDocument, ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import colors from 'colors'; @@ -14,9 +13,10 @@ import { requireDataSourceUrl, } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; -import { providers } from './pull/provider'; +import { providers as pullProviders } from './pull/provider'; import { getDatasource, getDbName, getRelationFieldsKey, getRelationFkName } from './pull/utils'; import type { DataSourceProviderType } from '@zenstackhq/schema'; +import { CliError } from '../cli-error'; type PushOptions = { schema?: string; @@ -26,9 +26,9 @@ type PushOptions = { export type PullOptions = { schema?: string; - out?: string; - modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; - fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + output?: string; + modelCasing: 'pascal' | 'camel' | 'snake' | 'none'; + fieldCasing: 'pascal' | 'camel' | 'snake' | 'none'; alwaysMap: boolean; quote: 'single' | 'double'; indent: number; @@ -83,37 +83,38 @@ async function runPull(options: PullOptions) { const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true, keepImports: true }); - config({ - ignore: ['MISSING_ENV_FILE'], + + // Determine early if `--out` is a single file output (combined schema) or a directory export. + const outPath = options.output ? path.resolve(options.output) : undefined; + const treatAsFile = + !!outPath && + ((fs.existsSync(outPath) && fs.lstatSync(outPath).isFile()) || path.extname(outPath) !== ''); + + const { model, services } = await loadSchemaDocument(schemaFile, { + returnServices: true, + mergeImports: treatAsFile, }); - const SUPPORTED_PROVIDERS = Object.keys(providers) as DataSourceProviderType[]; + + const SUPPORTED_PROVIDERS = Object.keys(pullProviders) as DataSourceProviderType[]; const datasource = getDatasource(model); if (!datasource) { - throw new Error('No datasource found in the schema.'); + throw new CliError('No datasource found in the schema.'); } if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`); + throw new CliError(`Unsupported datasource provider: ${datasource.provider}`); } - const provider = providers[datasource.provider]; + const provider = pullProviders[datasource.provider]; if (!provider) { - throw new Error(`No introspection provider found for: ${datasource.provider}`); + throw new CliError(`No introspection provider found for: ${datasource.provider}`); } spinner.start('Introspecting database...'); - const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + const { enums, tables } = await provider.introspect(datasource.url, { schemas: datasource.allSchemas }); spinner.succeed('Database introspected'); - const enums = provider.isSupportedFeature('Schema') - ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) - : allEnums; - const tables = provider.isSupportedFeature('Schema') - ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) - : allTables; - console.log(colors.blue('Syncing schema...')); const newModel: Model = { @@ -122,8 +123,9 @@ async function runPull(options: PullOptions) { $containerProperty: undefined, $containerIndex: undefined, declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], - imports: [], + imports: model.imports, }; + syncEnums({ dbEnums: enums, model: newModel, @@ -176,9 +178,10 @@ async function runPull(options: PullOptions) { console.log(colors.blue('Schema synced')); - const cwd = new URL(`file://${process.cwd()}`).pathname; + const baseDir = path.dirname(path.resolve(schemaFile)); + const baseDirUrlPath = new URL(`file://${baseDir}`).pathname; const docs = services.shared.workspace.LangiumDocuments.all - .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) + .filter(({ uri }) => uri.path.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); @@ -186,11 +189,30 @@ async function runPull(options: PullOptions) { const deletedModels: string[] = []; const deletedEnums: string[] = []; - const addedFields: string[] = []; - const deletedAttributes: string[] = []; - const deletedFields: string[] = []; + const addedModels: string[] = []; + const addedEnums: string[] = []; + // Hierarchical change tracking: model -> field changes -> attribute changes + type ModelChanges = { + addedFields: string[]; + deletedFields: string[]; + addedAttributes: string[]; + deletedAttributes: string[]; + }; + const modelChanges = new Map(); + + const getModelChanges = (modelName: string): ModelChanges => { + if (!modelChanges.has(modelName)) { + modelChanges.set(modelName, { + addedFields: [], + deletedFields: [], + addedAttributes: [], + deletedAttributes: [], + }); + } + return modelChanges.get(modelName)!; + }; - //Delete models + // Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( (declaration) => @@ -216,18 +238,22 @@ async function runPull(options: PullOptions) { model.declarations.splice(index, 1); deletedEnums.push(colors.red(`- Enum ${decl.name} deleted`)); }); - // + // Add/update models and their fields newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { const newDataModel = _declaration as DataModel | Enum; - const declarations = services.shared.workspace.IndexManager.allElements( - newDataModel.$type, - docsSet, - ).toArray(); + const declarations = services.shared.workspace.IndexManager.allElements(newDataModel.$type, docsSet).toArray(); const originalDataModel = declarations.find((d) => getDbName(d.node as any) === getDbName(newDataModel)) ?.node as DataModel | Enum | undefined; if (!originalDataModel) { + + if (newDataModel.$type === 'DataModel') { + addedModels.push(colors.green(`+ Model ${newDataModel.name} added`)); + } else if (newDataModel.$type === 'Enum') { + addedEnums.push(colors.green(`+ Enum ${newDataModel.name} added`)); + } + model.declarations.push(newDataModel); (newDataModel as any).$container = model; newDataModel.fields.forEach((f) => { @@ -245,6 +271,13 @@ async function runPull(options: PullOptions) { // Prioritized matching: exact db name > relation fields key > relation FK name > type reference let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f)); + // If this is a back-reference relation field (has @relation but no `fields` arg), silently skip + const isRelationField = + f.$type === 'DataField' && !!(f as any).attributes?.some((a: any) => a?.decl?.ref?.name === '@relation'); + if (originalFields.length === 0 && isRelationField && !getRelationFieldsKey(f as any)) { + return; + } + if (originalFields.length === 0) { // Try matching by relation fields key (the `fields` attribute in @relation) // This matches relation fields by their FK field references @@ -268,6 +301,9 @@ async function runPull(options: PullOptions) { if (originalFields.length === 0) { // Try matching by type reference + // We need this because for relations that don't have @relation, we can only check if the original exists by the field type. + // Yes, in this case it can potentially result in multiple original fields, but we only want to ensure that at least one relation exists. + // In the future, we might implement some logic to detect how many of these types of relations we need and add/remove fields based on this. originalFields = originalDataModel.fields.filter( (d) => f.$type === 'DataField' && @@ -292,9 +328,9 @@ async function runPull(options: PullOptions) { return; } const originalField = originalFields.at(0); - Object.freeze(originalField); + if (!originalField) { - addedFields.push(colors.green(`+ Field ${f.name} added to ${originalDataModel.name}`)); + getModelChanges(originalDataModel.name).addedFields.push(colors.green(`+ ${f.name}`)); (f as any).$container = originalDataModel; originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { @@ -308,7 +344,7 @@ async function runPull(options: PullOptions) { } return; } - + // Track deleted attributes (in original but not in new) originalField.attributes .filter( (attr) => @@ -319,8 +355,21 @@ async function runPull(options: PullOptions) { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - deletedAttributes.push( - colors.yellow(`- Attribute ${attr.decl.$refText} deleted from field: ${field.name}`), + getModelChanges(originalDataModel.name).deletedAttributes.push( + colors.yellow(`- ${attr.decl.$refText} from field: ${originalDataModel.name}.${field.name}`), + ); + }); + + // Track added attributes (in new but not in original) + f.attributes + .filter( + (attr) => + !originalField.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && + !['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText), + ) + .forEach((attr) => { + getModelChanges(originalDataModel.name).addedAttributes.push( + colors.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`), ); }); }); @@ -361,37 +410,80 @@ async function runPull(options: PullOptions) { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); _model.fields.splice(index, 1); - deletedFields.push(colors.red(`- Field ${f.name} deleted from ${_model.name}`)); + getModelChanges(_model.name).deletedFields.push(colors.red(`- ${f.name}`)); }); }); if (deletedModels.length > 0) { console.log(colors.bold('\nDeleted Models:')); - deletedModels.forEach((msg) => console.log(msg)); + deletedModels.forEach((msg) => { + console.log(msg); + }); } if (deletedEnums.length > 0) { console.log(colors.bold('\nDeleted Enums:')); - deletedEnums.forEach((msg) => console.log(msg)); + deletedEnums.forEach((msg) => { + console.log(msg); + }); } - if (addedFields.length > 0) { - console.log(colors.bold('\nAdded Fields:')); - addedFields.forEach((msg) => console.log(msg)); + if (addedModels.length > 0) { + console.log(colors.bold('\nAdded Models:')); + addedModels.forEach((msg) => { + console.log(msg); + }); } - if (deletedAttributes.length > 0) { - console.log(colors.bold('\nDeleted Attributes:')); - deletedAttributes.forEach((msg) => console.log(msg)); + if (addedEnums.length > 0) { + console.log(colors.bold('\nAdded Enums:')); + addedEnums.forEach((msg) => { + console.log(msg); + }); } - if (deletedFields.length > 0) { - console.log(colors.bold('\nDeleted Fields:')); - deletedFields.forEach((msg) => console.log(msg)); - } + // Print hierarchical model changes + if (modelChanges.size > 0) { + console.log(colors.bold('\nModel Changes:')); + modelChanges.forEach((changes, modelName) => { + const hasChanges = + changes.addedFields.length > 0 || + changes.deletedFields.length > 0 || + changes.addedAttributes.length > 0 || + changes.deletedAttributes.length > 0; + + if (hasChanges) { + console.log(colors.cyan(` ${modelName}:`)); + + if (changes.addedFields.length > 0) { + console.log(colors.gray(' Added Fields:')); + changes.addedFields.forEach((msg) => { + console.log(` ${msg}`); + }); + } + + if (changes.deletedFields.length > 0) { + console.log(colors.gray(' Deleted Fields:')); + changes.deletedFields.forEach((msg) => { + console.log(` ${msg}`); + }); + } + + if (changes.addedAttributes.length > 0) { + console.log(colors.gray(' Added Attributes:')); + changes.addedAttributes.forEach((msg) => { + console.log(` ${msg}`); + }); + } - if (options.out && fs.existsSync(options.out) && !fs.lstatSync(options.out).isFile()) { - throw new Error(`Output path ${options.out} exists but is not a file`); + if (changes.deletedAttributes.length > 0) { + console.log(colors.gray(' Deleted Attributes:')); + changes.deletedAttributes.forEach((msg) => { + console.log(` ${msg}`); + }); + } + } + }); } const generator = new ZModelCodeGenerator({ @@ -399,17 +491,47 @@ async function runPull(options: PullOptions) { indent: options.indent, }); - if (options.out) { - const zmodelSchema = await formatDocument(generator.generate(newModel)); - - console.log(colors.blue(`Writing to ${options.out}`)); + if (options.output) { + if (treatAsFile) { + const zmodelSchema = await formatDocument(generator.generate(newModel)); + console.log(colors.blue(`Writing to ${outPath}`)); + fs.mkdirSync(path.dirname(outPath), { recursive: true }); + fs.writeFileSync(outPath, zmodelSchema); + } else { + // Otherwise treat `--out` as a directory path. Create it if needed. + fs.mkdirSync(outPath!, { recursive: true }); + + // Preserve the directory structure relative to the schema file location (options.schema base). + const baseDir = path.dirname(path.resolve(schemaFile)); + const baseDirUrlPath = new URL(`file://${baseDir}`).pathname; + + for (const { + uri, + parseResult: { value: documentModel }, + } of docs) { + const zmodelSchema = await formatDocument(generator.generate(documentModel)); + + // Map input file path -> output file path under `--out` + let relPath = uri.path; + if (relPath.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())) { + relPath = relPath.slice(baseDirUrlPath.length); + } + relPath = relPath.replace(/^\/+/, ''); - const outPath = options.out ? path.resolve(options.out) : schemaFile; + // Ensure consistent platform-specific separators for filesystem writes + const targetFile = path.join(outPath!, ...relPath.split('/')); - fs.writeFileSync(outPath, zmodelSchema); + fs.mkdirSync(path.dirname(targetFile), { recursive: true }); + console.log(colors.blue(`Writing to ${targetFile}`)); + fs.writeFileSync(targetFile, zmodelSchema); + } + } } else { - for (const { uri, parseResult: { value: model } } of docs) { - const zmodelSchema = await formatDocument(generator.generate(model)); + for (const { + uri, + parseResult: { value: documentModel }, + } of docs) { + const zmodelSchema = await formatDocument(generator.generate(documentModel)); console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); } @@ -421,4 +543,4 @@ async function runPull(options: PullOptions) { console.error(error); throw error; } -} \ No newline at end of file +} diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index e68513961..fda1f54fe 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -16,6 +16,7 @@ import { import type { PullOptions } from '../db'; import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; +import { CliError } from '../../cli-error'; export function syncEnums({ dbEnums, @@ -90,7 +91,9 @@ export function syncEnums({ builder.setName(v.name); // Copy field-level comments if (v.comments?.length) { - v.comments.forEach((c) => builder.addComment(c)); + v.comments.forEach((c) => { + builder.addComment(c); + }); } // Copy field-level attributes (@map, etc.) if (v.attributes?.length) { @@ -104,7 +107,7 @@ export function syncEnums({ } } -function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none', originalName: string) { +function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'none', originalName: string) { let name = originalName; const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; @@ -118,9 +121,6 @@ function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'non case 'snake': name = toSnakeCase(originalName); break; - case 'kebab': - name = toKebabCase(originalName); - break; } return { @@ -144,13 +144,6 @@ function toSnakeCase(str: string): string { .toLowerCase(); } -function toKebabCase(str: string): string { - return str - .replace(/[_ ]+/g, '-') - .replace(/([a-z0-9])([A-Z])/g, '$1-$2') - .toLowerCase(); -} - export type Relation = { schema: string; table: string; @@ -203,7 +196,7 @@ export function syncTable({ !modelUniqueAttribute || !modelindexAttribute ) { - throw new Error('Cannot find required attributes in the model.'); + throw new CliError('Cannot find required attributes in the model.'); } const relations: Relation[] = []; @@ -248,13 +241,13 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (column.options.length > 0) { - const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as + if (column.datatype === 'enum') { + const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype_name) as | Enum | undefined; if (!ref) { - throw new Error(`Enum ${column.datatype} not found`); + throw new CliError(`Enum ${column.datatype_name} not found`); } typeBuilder.setReference(ref); } else { @@ -288,6 +281,8 @@ export function syncTable({ if (column.default) { const defaultExprBuilder = provider.getDefaultValue({ fieldType: builtinType.type, + datatype: column.datatype, + datatype_name: column.datatype_name, defaultValue: column.default, services, enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], @@ -333,7 +328,7 @@ export function syncTable({ pkColumns.forEach((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); if (!ref) { - throw new Error(`Field ${c} not found`); + throw new CliError(`Field ${c} not found`); } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); @@ -397,7 +392,7 @@ export function syncTable({ index.columns.forEach((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name); if (!ref) { - throw new Error(`Column ${c.name} not found in model ${table.name}`); + throw new CliError(`Column ${c.name} not found in model ${table.name}`); } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); @@ -418,7 +413,7 @@ export function syncTable({ return attr } - + ); }); if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { @@ -456,7 +451,7 @@ export function syncRelation({ const includeRelationName = selfRelation || similarRelations > 0; if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.'); + throw new CliError('Cannot find required attributes in the model.'); } const sourceModel = model.declarations.find((d) => d.$type === 'DataModel' && getDbName(d) === relation.table) as @@ -483,7 +478,7 @@ export function syncRelation({ const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference); - + let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, similarRelations > 0 @@ -516,22 +511,22 @@ export function syncRelation({ const onDeleteDefault = relation.nullable ? 'SET NULL' : 'RESTRICT'; if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) { const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); + if (!enumRef) throw new CliError('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_delete} not found`); ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); } // Prisma default: onUpdate is Cascade if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'CASCADE') { const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); + if (!enumRef) throw new CliError('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_update} not found`); ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); } diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index cb104eb1e..d8bdc33b8 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -2,6 +2,7 @@ import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { CliError } from '../../../cli-error'; // Note: We dynamically import mysql2 inside the async function to avoid // requiring it at module load time for environments that don't use MySQL. @@ -10,10 +11,7 @@ export const mysql: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { case 'NativeEnum': - // MySQL enums are defined inline in column definitions, not as separate types. - // They can't be shared across tables like PostgreSQL enums. - // Return false to preserve existing enums from the schema. - return false; + return true; case 'Schema': default: return false; @@ -126,7 +124,7 @@ export const mysql: IntrospectionProvider = { const databaseName = url.pathname.replace('/', ''); if (!databaseName) { - throw new Error('Database name not found in connection string'); + throw new CliError('Database name not found in connection string'); } // Introspect tables @@ -141,10 +139,19 @@ export const mysql: IntrospectionProvider = { const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; // Sort columns by ordinal_position to preserve database column order - const sortedColumns = (columns || []).sort( - (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => - (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) - ); + const sortedColumns = (columns || []) + .sort( + (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => + (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) + ) + .map((col: { options?: string | string[] | null }) => ({ + ...col, + // Parse enum options from COLUMN_TYPE if present (e.g., "enum('val1','val2')") + options: + typeof col.options === 'string' + ? parseEnumValues(col.options) + : col.options ?? [], + })); // Filter out auto-generated FK indexes (MySQL creates these automatically) // Pattern: {Table}_{column}_fkey for single-column FK indexes @@ -179,13 +186,12 @@ export const mysql: IntrospectionProvider = { values, }; }); - return { tables, enums }; } finally { await connection.end(); } }, - getDefaultValue({ defaultValue, fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) { const val = defaultValue.trim(); // Handle NULL early @@ -193,6 +199,19 @@ export const mysql: IntrospectionProvider = { return null; } + // Handle enum defaults + if (datatype === 'enum' && datatype_name) { + const enumDef = enums.find((e) => getDbName(e) === datatype_name); + if (enumDef) { + // Strip quotes from the value (MySQL returns 'value') + const enumValue = val.startsWith("'") && val.endsWith("'") ? val.slice(1, -1) : val; + const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue); + if (enumField) { + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } + } + switch (fieldType) { case 'DateTime': if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { @@ -206,42 +225,30 @@ export const mysql: IntrospectionProvider = { if (val.toLowerCase() === 'auto_increment') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Float': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); - } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.0'); - } - break; + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - if (numVal === Math.floor(numVal)) { - return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); - } - return (ab) => ab.NumberLiteral.setValue(String(numVal)); - } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.00'); - } - break; + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': - if (val === 'true' || val === '1' || val === "b'1'") { - return (ab) => ab.BooleanLiteral.setValue(true); - } - if (val === 'false' || val === '0' || val === "b'0'") { - return (ab) => ab.BooleanLiteral.setValue(false); - } - break; + return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === 'true' || val === '1' || val === "b'1'"); case 'String': if (val.startsWith("'") && val.endsWith("'")) { @@ -258,30 +265,7 @@ export const mysql: IntrospectionProvider = { if (val.toLowerCase() === 'uuid()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); } - if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { - return (ab) => ab.StringLiteral.setValue(val); - } - break; - } - - // Fallback handlers for values that don't match field type-specific patterns - if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); - } - - if (val.toLowerCase() === 'auto_increment') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); - } - - if (val === 'true' || val === "b'1'") { - return (ab) => ab.BooleanLiteral.setValue(true); - } - if (val === 'false' || val === "b'0'") { - return (ab) => ab.BooleanLiteral.setValue(false); - } - - if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); + return (ab) => ab.StringLiteral.setValue(val); } if (val.startsWith("'") && val.endsWith("'")) { @@ -307,16 +291,8 @@ export const mysql: IntrospectionProvider = { ); } - // Handle unquoted string values - if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { - return (ab) => ab.StringLiteral.setValue(val); - } - - // For any other unhandled cases, use dbgenerated - return (ab) => - ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ); + console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`); + return null; }, getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { @@ -364,7 +340,7 @@ SELECT WHEN 'VIEW' THEN 'view' ELSE NULL END AS \`type\`, - CASE + CASE WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION ELSE NULL END AS \`definition\`, @@ -374,23 +350,30 @@ SELECT SELECT JSON_OBJECT( 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, - 'datatype', CASE + 'datatype', CASE WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean' ELSE c.DATA_TYPE END, + 'datatype_name', CASE + WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME) + ELSE NULL + END, 'datatype_schema', '', 'length', c.CHARACTER_MAXIMUM_LENGTH, 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), 'nullable', c.IS_NULLABLE = 'YES', - 'default', CASE + 'default', CASE WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment' - ELSE c.COLUMN_DEFAULT + ELSE c.COLUMN_DEFAULT END, 'pk', c.COLUMN_KEY = 'PRI', 'unique', c.COLUMN_KEY = 'UNI', 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', - 'options', JSON_ARRAY(), + 'options', CASE + WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE + ELSE NULL + END, 'foreign_key_schema', NULL, 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, @@ -399,9 +382,9 @@ SELECT 'foreign_key_on_delete', rc.DELETE_RULE ) AS col_json FROM INFORMATION_SCHEMA.COLUMNS c - LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk - ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA - AND c.TABLE_NAME = kcu_fk.TABLE_NAME + LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk + ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA + AND c.TABLE_NAME = kcu_fk.TABLE_NAME AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc @@ -449,7 +432,7 @@ SELECT ) AS idxs_ordered ) AS \`indexes\` FROM INFORMATION_SCHEMA.TABLES t -LEFT JOIN INFORMATION_SCHEMA.VIEWS v +LEFT JOIN INFORMATION_SCHEMA.VIEWS v ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME WHERE t.TABLE_SCHEMA = '${databaseName}' AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') @@ -460,7 +443,7 @@ ORDER BY t.TABLE_NAME; function getEnumIntrospectionQuery(databaseName: string) { return ` -SELECT +SELECT c.TABLE_NAME AS table_name, c.COLUMN_NAME AS column_name, c.COLUMN_TYPE AS column_type diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 08a041b56..1c035324f 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -4,15 +4,12 @@ import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import type { ZModelServices } from '@zenstackhq/language'; +import { CliError } from '../../../cli-error'; export const postgresql: IntrospectionProvider = { isSupportedFeature(feature) { - switch (feature) { - case 'Schema': - return true; - default: - return false; - } + const supportedFeatures = ['Schema', 'NativeEnum']; + return supportedFeatures.includes(feature); }, getBuiltinType(type) { const t = (type || '').toLowerCase(); @@ -76,17 +73,25 @@ export const postgresql: IntrospectionProvider = { return { type: 'Unsupported' as const, isArray }; } }, - async introspect(connectionString: string): Promise { + async introspect(connectionString: string, options: { schemas: string[] }): Promise { const client = new Client({ connectionString }); await client.connect(); - const { rows: tables } = await client.query(tableIntrospectionQuery); - const { rows: enums } = await client.query(enumIntrospectionQuery); + try { + const { rows: tables } = await client.query(tableIntrospectionQuery); + const { rows: enums } = await client.query(enumIntrospectionQuery); - return { - enums, - tables, - }; + // Filter tables and enums to only include those from the selected schemas + const filteredTables = tables.filter((t) => options.schemas.includes(t.schema)); + const filteredEnums = enums.filter((e) => options.schemas.includes(e.schema_name)); + + return { + enums: filteredEnums, + tables: filteredTables, + }; + } finally { + await client.end(); + } }, getDefaultDatabaseType(type: BuiltinType) { switch (type) { @@ -110,15 +115,30 @@ export const postgresql: IntrospectionProvider = { return { type: 'bytea' }; } }, - getDefaultValue({ defaultValue, fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) { const val = defaultValue.trim(); + // Handle enum defaults (PostgreSQL returns 'value'::enum_type) + if (datatype === 'enum' && datatype_name) { + const enumDef = enums.find((e) => getDbName(e) === datatype_name); + if (enumDef) { + // Extract the enum value from the default (format: 'VALUE'::"enum_type") + const enumValue = val.replace(/'/g, '').split('::')[0]?.trim(); + const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue); + if (enumField) { + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } + // Fall through to typeCastingConvert if datatype_name lookup fails + return typeCastingConvert({defaultValue,enums,val,services}); + } + switch (fieldType) { case 'DateTime': if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } - + if (val.includes('::')) { return typeCastingConvert({defaultValue,enums,val,services}); } @@ -135,11 +155,7 @@ export const postgresql: IntrospectionProvider = { if (val.includes('::')) { return typeCastingConvert({defaultValue,enums,val,services}); } - - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Float': if (val.includes('::')) { @@ -153,7 +169,7 @@ export const postgresql: IntrospectionProvider = { if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.0'); } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': if (val.includes('::')) { @@ -162,24 +178,15 @@ export const postgresql: IntrospectionProvider = { if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); - if (numVal === Math.floor(numVal)) { - return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); - } - return (ab) => ab.NumberLiteral.setValue(String(numVal)); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); } if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.00'); } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': - if (val === 'true') { - return (ab) => ab.BooleanLiteral.setValue(true); - } - if (val === 'false') { - return (ab) => ab.BooleanLiteral.setValue(false); - } - break; + return (ab) => ab.BooleanLiteral.setValue(val === 'true'); case 'String': if (val.includes('::')) { @@ -189,20 +196,7 @@ export const postgresql: IntrospectionProvider = { if (val.startsWith("'") && val.endsWith("'")) { return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); } - break; - } - - if (val.includes('::')) { - return typeCastingConvert({defaultValue,enums,val,services}); - } - - // Fallback handlers for values that don't match field type-specific patterns - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); - } - - if (val.startsWith('nextval(')) { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + return (ab) => ab.StringLiteral.setValue(val); } if (val.includes('(') && val.includes(')')) { @@ -212,18 +206,7 @@ export const postgresql: IntrospectionProvider = { ); } - if (val === 'true' || val === 'false') { - return (ab) => ab.BooleanLiteral.setValue(val === 'true'); - } - - if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - - if (val.startsWith("'") && val.endsWith("'")) { - return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); - } - + console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`); return null; }, @@ -289,7 +272,20 @@ SELECT FROM ( SELECT "att"."attname" AS "name", - "typ"."typname" AS "datatype", + CASE + WHEN EXISTS ( + SELECT 1 FROM "pg_catalog"."pg_enum" AS "e" + WHERE "e"."enumtypid" = "typ"."oid" + ) THEN 'enum' + ELSE "typ"."typname" + END AS "datatype", + CASE + WHEN EXISTS ( + SELECT 1 FROM "pg_catalog"."pg_enum" AS "e" + WHERE "e"."enumtypid" = "typ"."oid" + ) THEN "typ"."typname" + ELSE NULL + END AS "datatype_name", "tns"."nspname" AS "datatype_schema", "c"."character_maximum_length" AS "length", COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", @@ -465,11 +461,11 @@ function typeCastingConvert({defaultValue, enums, val, services}:{val: string, e } const enumField = enumDef.fields.find((v) => getDbName(v) === value); if (!enumField) { - throw new Error( + throw new CliError( `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, ); } return (ab) => ab.ReferenceExpr.setTarget(enumField); } } -} \ No newline at end of file +} diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index a3922b7a7..01cb28e61 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -12,6 +12,7 @@ export interface IntrospectedTable { columns: { name: string; datatype: string; + datatype_name: string | null; length: number | null; precision: number | null; datatype_schema: string; @@ -61,7 +62,7 @@ export type IntrospectedSchema = { export type DatabaseFeature = 'Schema' | 'NativeEnum'; export interface IntrospectionProvider { - introspect(connectionString: string): Promise; + introspect(connectionString: string, options: { schemas: string[] }): Promise; getBuiltinType(type: string): { type: BuiltinType | 'Unsupported'; isArray: boolean; @@ -74,6 +75,8 @@ export interface IntrospectionProvider { */ getDefaultValue(args: { fieldType: BuiltinType | 'Unsupported'; + datatype: string; + datatype_name: string | null; defaultValue: string; services: ZModelServices; enums: Enum[]; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index fcdbfbad7..b03bad307 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,4 +1,3 @@ -import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -21,30 +20,95 @@ export const sqlite: IntrospectionProvider = { } }, getBuiltinType(type) { - const t = (type || '').toLowerCase().trim(); + // Strip parenthesized constraints (e.g., VARCHAR(255) → varchar, DECIMAL(10,2) → decimal) + const t = (type || '').toLowerCase().trim().replace(/\(.*\)$/, '').trim(); // SQLite has no array types const isArray = false; + + // SQLite type affinity rules (https://www.sqlite.org/datatype3.html): + // 1. If type contains "INT" → INTEGER affinity + // 2. If type contains "CHAR", "CLOB", or "TEXT" → TEXT affinity + // 3. If type contains "BLOB" or no type → BLOB affinity + // 4. If type contains "REAL", "FLOA", or "DOUB" → REAL affinity + // 5. Otherwise → NUMERIC affinity + + // Handle specific known types first for better mapping switch (t) { + // INTEGER types (SQLite: INT, INTEGER, TINYINT, SMALLINT, MEDIUMINT, INT2, INT8) case 'integer': + case 'int': + case 'tinyint': + case 'smallint': + case 'mediumint': + case 'int2': + case 'int8': return { type: 'Int', isArray }; - case 'text': - return { type: 'String', isArray }; + + // BIGINT - map to BigInt for large integers case 'bigint': + case 'unsigned big int': return { type: 'BigInt', isArray }; + + // TEXT types (SQLite: CHARACTER, VARCHAR, VARYING CHARACTER, NCHAR, NATIVE CHARACTER, NVARCHAR, TEXT, CLOB) + case 'text': + case 'varchar': + case 'char': + case 'character': + case 'varying character': + case 'nchar': + case 'native character': + case 'nvarchar': + case 'clob': + return { type: 'String', isArray }; + + // BLOB type case 'blob': return { type: 'Bytes', isArray }; + + // REAL types (SQLite: REAL, DOUBLE, DOUBLE PRECISION, FLOAT) case 'real': + case 'float': + case 'double': + case 'double precision': return { type: 'Float', isArray }; + + // NUMERIC types (SQLite: NUMERIC, DECIMAL) case 'numeric': case 'decimal': return { type: 'Decimal', isArray }; + + // DateTime types case 'datetime': + case 'date': + case 'time': + case 'timestamp': return { type: 'DateTime', isArray }; + + // JSON types + case 'json': case 'jsonb': return { type: 'Json', isArray }; + + // Boolean types case 'boolean': + case 'bool': return { type: 'Boolean', isArray }; + default: { + // Fallback: Use SQLite affinity rules for unknown types + if (t.includes('int')) { + return { type: 'Int', isArray }; + } + if (t.includes('char') || t.includes('clob') || t.includes('text')) { + return { type: 'String', isArray }; + } + if (t.includes('blob')) { + return { type: 'Bytes', isArray }; + } + if (t.includes('real') || t.includes('floa') || t.includes('doub')) { + return { type: 'Float', isArray }; + } + // Default to Unsupported for truly unknown types return { type: 'Unsupported' as const, isArray }; } } @@ -215,6 +279,7 @@ export const sqlite: IntrospectionProvider = { columns.push({ name: c.name, datatype: c.type || '', + datatype_name: null, // SQLite doesn't support native enums length: null, precision: null, datatype_schema: schema, @@ -245,7 +310,7 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue({ defaultValue, fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { // datatype and datatype_name not used for SQLite const val = defaultValue.trim(); switch (fieldType) { @@ -261,10 +326,7 @@ export const sqlite: IntrospectionProvider = { if (val === 'autoincrement') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Float': if (/^-?\d+\.\d+$/.test(val)) { @@ -274,30 +336,20 @@ export const sqlite: IntrospectionProvider = { if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.0'); } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); - if (numVal === Math.floor(numVal)) { - return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); - } - return (ab) => ab.NumberLiteral.setValue(String(numVal)); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); } if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.00'); } - break; + return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': - if (val === 'true' || val === '1') { - return (ab) => ab.BooleanLiteral.setValue(true); - } - if (val === 'false' || val === '0') { - return (ab) => ab.BooleanLiteral.setValue(false); - } - break; - + return (ab) => ab.BooleanLiteral.setValue(val === 'true' || val === '1'); case 'String': if (val.startsWith("'") && val.endsWith("'")) { const strippedName = val.slice(1, -1); @@ -308,43 +360,14 @@ export const sqlite: IntrospectionProvider = { } return (ab) => ab.StringLiteral.setValue(strippedName); } - break; - } - - // Fallback handlers for values that don't match field type-specific patterns - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); - } - - if (val === 'autoincrement') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); - } - - if (val === 'true' || val === 'false') { - return (ab) => ab.BooleanLiteral.setValue(val === 'true'); - } - - if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - - if (val.startsWith("'") && val.endsWith("'")) { - const strippedName = val.slice(1, -1); - const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); - if (enumDef) { - const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); - if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField); - } - return (ab) => ab.StringLiteral.setValue(strippedName); + return (ab) => ab.StringLiteral.setValue(val); } - //TODO: add more default value factories if exists - throw new Error( - `This default value type currently is not supported. Please open an issue on github. Values: "${defaultValue}"`, - ); + console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`); + return null; }, - getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + getFieldAttributes({ fieldName, fieldType, services }) { const factories: DataFieldAttributeFactory[] = []; // Add @updatedAt for DateTime fields named updatedAt or updated_at @@ -352,27 +375,6 @@ export const sqlite: IntrospectionProvider = { factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } - // Add @db.* attribute if the datatype differs from the default - const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( - (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, - )?.node as Attribute | undefined; - - const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); - - if ( - dbAttr && - defaultDatabaseType && - (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precision && - defaultDatabaseType.precision !== (length || precision))) - ) { - const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); - if (length || precision) { - dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); - } - factories.push(dbAttrFactory); - } - return factories; }, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index b46693afe..44355a595 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -15,9 +15,10 @@ import { import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/schema'; import type { Reference } from 'langium'; +import { CliError } from '../../cli-error'; export function getAttribute(model: Model, attrName: string) { - if (!model.$document) throw new Error('Model is not associated with a document.'); + if (!model.$document) throw new CliError('Model is not associated with a document.'); const references = model.$document.references as Reference[]; return references.find((a) => a.ref?.$type === 'Attribute' && a.ref?.name === attrName)?.ref as @@ -28,22 +29,22 @@ export function getAttribute(model: Model, attrName: string) { export function getDatasource(model: Model) { const datasource = model.declarations.find((d) => d.$type === 'DataSource'); if (!datasource) { - throw new Error('No datasource declaration found in the schema.'); + throw new CliError('The schema\'s "datasource" must have a "url" field to use this command.'); } const urlField = datasource.fields.find((f) => f.name === 'url'); - if (!urlField) throw new Error(`No url field found in the datasource declaration.`); + if (!urlField) throw new CliError(`No url field found in the datasource declaration.`); let url = getStringLiteral(urlField.value); if (!url && isInvocationExpr(urlField.value)) { const envName = getStringLiteral(urlField.value.args[0]?.value); if (!envName) { - throw new Error('The url field must be a string literal or an env().'); + throw new CliError('The url field must be a string literal or an env().'); } if (!process.env[envName]) { - throw new Error( + throw new CliError( `Environment variable ${envName} is not set, please set it to the database connection string.`, ); } @@ -51,7 +52,7 @@ export function getDatasource(model: Model) { } if (!url) { - throw new Error('The url field must be a string literal or an env().'); + throw new CliError('The url field must be a string literal or an env().'); } if (url.startsWith('file:')) { @@ -68,7 +69,7 @@ export function getDatasource(model: Model) { getLiteralArray(schemasField.value) ?.filter((s) => s !== undefined)) as string[] || []; - + return { name: datasource.name, provider: getStringLiteral( @@ -85,11 +86,13 @@ export function getDbName(decl: AbstractDeclaration | DataField | EnumField, inc if (!('attributes' in decl)) return decl.name; const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); - const schemaAttrValue = schemaAttr?.args[0]?.value; - let schema: string; - if (schemaAttrValue?.$type !== 'StringLiteral') schema = 'public'; - if (!schemaAttr) schema = 'public'; - else schema = (schemaAttr.args[0]?.value as any)?.value as string; + let schema = 'public'; + if (schemaAttr) { + const schemaAttrValue = schemaAttr.args[0]?.value; + if (schemaAttrValue?.$type === 'StringLiteral') { + schema = schemaAttrValue.value; + } + } const formatName = (name: string) => `${schema && includeSchema ? `${schema}.` : ''}${name}`; @@ -117,16 +120,16 @@ export function getRelationFkName(decl: DataField): string | undefined { export function getRelationFieldsKey(decl: DataField): string | undefined { const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); if (!relationAttr) return undefined; - + const fieldsArg = relationAttr.args.find((a) => a.name === 'fields')?.value; if (!fieldsArg || fieldsArg.$type !== 'ArrayExpr') return undefined; - + const fieldNames = fieldsArg.items .filter((item): item is ReferenceExpr => item.$type === 'ReferenceExpr') .map((item) => item.target?.$refText || item.target?.ref?.name) .filter((name): name is string => !!name) .sort(); - + return fieldNames.length > 0 ? fieldNames.join(',') : undefined; } @@ -148,7 +151,7 @@ export function getDeclarationRef( const node = services.shared.workspace.IndexManager.allElements(type).find( (m) => m.node && getDbName(m.node as T) === name, )?.node; - if (!node) throw new Error(`Declaration not found: ${name}`); + if (!node) throw new CliError(`Declaration not found: ${name}`); return node as T; } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 8d253cc3e..7d4f62d32 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -148,14 +148,19 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( - new Option('--model-casing ', 'set the casing of generated models').default( + new Option( + '-o, --output ', + 'set custom output path for the introspected schema. If a file path is provided, all schemas are merged into that single file. If a directory path is provided, files are written to the directory and imports are kept.', + ), + ) + .addOption( + new Option('--model-casing ', 'set the casing of generated models').default( 'none', ), ) .addOption( - new Option('--field-casing ', 'set the casing of generated fields').default( + new Option('--field-casing ', 'set the casing of generated fields').default( 'none', ), ) @@ -163,9 +168,9 @@ function createProgram() { new Option('--always-map', 'always add @map and @@map attributes to models and fields').default(false), ) .addOption( - new Option('--quote ', 'set the quote style of generated schema files').default('double'), + new Option('--quote ', 'set the quote style of generated schema files').default('single'), ) - .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(4)) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(4).argParser(parseInt)) .action((options) => dbAction('pull', options)); dbCommand diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index d8d677258..85aca1261 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -15,7 +15,7 @@ const generator = new ZModelCodeGenerator({ describe('DB pull - Common features (all providers)', () => { describe('Pull from zero - restore complete schema from database', () => { it('should restore basic schema with all supported types', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -36,8 +36,6 @@ describe('DB pull - Common features (all providers)', () => { // Store the schema after db push (this is what provider names will be) const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); // Remove schema content to simulate restoration from zero fs.writeFileSync(schemaFile, getDefaultPrelude()); @@ -46,11 +44,11 @@ describe('DB pull - Common features (all providers)', () => { runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); it('should restore schema with relations', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model Post { id Int @id @default(autoincrement()) title String @@ -67,18 +65,16 @@ model User { runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); fs.writeFileSync(schemaFile, getDefaultPrelude()); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); it('should restore schema with many-to-many relations', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model Post { id Int @id @default(autoincrement()) title String @@ -103,18 +99,16 @@ model Tag { runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); fs.writeFileSync(schemaFile, getDefaultPrelude()); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); it('should restore schema with indexes and unique constraints', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -132,18 +126,16 @@ model Tag { runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); fs.writeFileSync(schemaFile, getDefaultPrelude()); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); it('should restore schema with composite primary keys', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model UserRole { userId String role String @@ -155,46 +147,43 @@ model Tag { runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); fs.writeFileSync(schemaFile, getDefaultPrelude()); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); }); describe('Pull with existing schema - preserve schema features', () => { it('should preserve field and table mappings', async () => { - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) - email String @unique @map("email_address") - firstName String @map("first_name") - lastName String @map("last_name") + email String @unique @map('email_address') + firstName String @map('first_name') + lastName String @map('last_name') - @@map("users") + @@map('users') }`, ); runCli('db push', workDir); - const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); - expect(getSchema(workDir)).toEqual(originalSchema); + expect(getSchema(workDir)).toEqual(schema); }); it('should not modify a comprehensive schema with all features', async () => { - const workDir = await createFormattedProject(`model User { + const { workDir, schema } = await createFormattedProject(`model User { id Int @id @default(autoincrement()) - email String @unique @map("email_address") - name String? @default("Anonymous") - role Role @default(USER) + email String @unique @map('email_address') + name String? @default('Anonymous') + role users_role @default(USER) profile Profile? - shared_profile Profile? @relation("shared") + shared_profile Profile? @relation('shared') posts Post[] createdAt DateTime @default(now()) updatedAt DateTime @updatedAt @@ -205,19 +194,19 @@ model Tag { bytes Bytes? @@index([role]) - @@map("users") + @@map('users') } model Profile { id Int @id @default(autoincrement()) user User @relation(fields: [userId], references: [id], onDelete: Cascade) userId Int @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + user_shared User @relation('shared', fields: [shared_userId], references: [id], onDelete: Cascade) shared_userId Int @unique bio String? avatarUrl String? - @@map("profiles") + @@map('profiles') } model Post { @@ -236,7 +225,7 @@ model Post { @@unique([authorId, slug]) @@index([authorId, published]) - @@map("posts") + @@map('posts') } model Tag { @@ -245,8 +234,8 @@ model Tag { posts PostTag[] createdAt DateTime @default(now()) - @@index([name], name: "tag_name_idx") - @@map("tags") + @@index([name], name: 'tag_name_idx') + @@map('tags') } model PostTag { @@ -255,23 +244,24 @@ model PostTag { tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) tagId Int assignedAt DateTime @default(now()) - note String? @default("initial") + note String? @default('initial') @@id([postId, tagId]) - @@map("post_tags") + @@map('post_tags') } -enum Role { +enum users_role { USER ADMIN MODERATOR }`, +// When using MySQL, the introspection simply overrides the enum and cannot detect if it exists with the same name because it only stores the values. +// TODO: Create a better way to handle this, possibly by finding enums by their values as well if the schema exists. ); runCli('db push', workDir); - const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); - expect(getSchema(workDir)).toEqual(originalSchema); + expect(getSchema(workDir)).toEqual(schema); }); it('should preserve imports when pulling with multi-file schema', async () => { @@ -282,14 +272,14 @@ enum Role { fs.mkdirSync(modelsDir, { recursive: true }); // Create main schema with imports - const mainSchema = await formatDocument(`import "./models/user" -import "./models/post" + const mainSchema = await formatDocument(`import './models/user' +import './models/post' ${getDefaultPrelude()}`); fs.writeFileSync(schemaPath, mainSchema); // Create user model - const userModel = await formatDocument(`import "./post" + const userModel = await formatDocument(`import './post' model User { id Int @id @default(autoincrement()) @@ -301,7 +291,7 @@ model User { fs.writeFileSync(path.join(modelsDir, 'user.zmodel'), userModel); // Create post model - const postModel = await formatDocument(`import "./user" + const postModel = await formatDocument(`import './user' model Post { id Int @id @default(autoincrement()) @@ -336,13 +326,13 @@ describe('DB pull - PostgreSQL specific features', () => { skip(); return; } - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) email String @unique posts Post[] - @@schema("auth") + @@schema('auth') } model Post { @@ -351,21 +341,19 @@ model Post { author User @relation(fields: [authorId], references: [id], onDelete: Cascade) authorId Int - @@schema("content") + @@schema('content') }`, - { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, + { provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth'] } }, ); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); - fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"]} })); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth']} })); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toEqual(schema); }); it('should preserve native PostgreSQL enums when schema exists', async ({ skip }) => { @@ -374,7 +362,7 @@ model Post { skip(); return; } - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -397,11 +385,10 @@ enum UserRole { ); runCli('db push', workDir); - const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); const pulledSchema = getSchema(workDir); - expect(pulledSchema).toEqual(originalSchema); + expect(pulledSchema).toEqual(schema); }); it('should not modify schema with PostgreSQL-specific features', async ({ skip }) => { @@ -410,7 +397,7 @@ enum UserRole { skip(); return; } - const workDir = await createFormattedProject( + const { workDir, schema } = await createFormattedProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -418,7 +405,7 @@ enum UserRole { posts Post[] metadata Json? - @@schema("auth") + @@schema('auth') @@index([status]) } @@ -429,7 +416,7 @@ model Post { authorId Int tags String[] - @@schema("content") + @@schema('content') @@index([authorId]) } @@ -438,13 +425,47 @@ enum UserStatus { INACTIVE SUSPENDED }`, - { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, + { provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth'] } }, ); runCli('db push', workDir); - const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); - expect(getSchema(workDir)).toEqual(originalSchema); + expect(getSchema(workDir)).toEqual(schema); + }); +}); + +describe('DB pull - SQL specific features', () => { + it('should restore enum fields from zero', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'mysql' && provider !== 'postgresql') { + skip(); + return; + } + + const { workDir, schema } = await createFormattedProject( + `model User { + id Int @id @default(autoincrement()) + email String @unique + status User_status @default(ACTIVE) +} + +enum User_status { + ACTIVE + INACTIVE + SUSPENDED +}`); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + // Remove schema content to simulate restoration from zero + fs.writeFileSync(schemaFile, getDefaultPrelude()); + + // Pull should fully restore the schema including enum fields + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 4a58598c2..5052aa558 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -62,19 +62,16 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' } // Build fields array for proper alignment (matching ZModelCodeGenerator) const fields: [string, string][] = [ - ['provider', `"${provider}"`], - ['url', `"${dbUrl}"`], + ['provider', `'${provider}'`], + ['url', `'${dbUrl}'`], ...Object.entries(options?.extra || {}).map(([k, v]) => { - const value = Array.isArray(v) ? `[${v.map(item => `"${item}"`).join(', ')}]` : `"${v}"`; + const value = Array.isArray(v) ? `[${v.map(item => `'${item}'`).join(', ')}]` : `'${v}'`; return [k, value] as [string, string]; }), ]; - // Calculate alignment padding based on longest field name - const longestName = Math.max(...fields.map(([name]) => name.length)); const formattedFields = fields.map(([name, value]) => { - const padding = ' '.repeat(longestName - name.length + 1); - return ` ${name}${padding}= ${value}`; + return ` ${name} = ${value}`; }).join('\n'); const ZMODEL_PRELUDE = `datasource db {\n${formattedFields}\n}`; @@ -98,7 +95,8 @@ export async function createFormattedProject( ) { const fullContent = `${getDefaultPrelude({ provider: options?.provider, extra: options?.extra })}\n\n${zmodel}`; const formatted = await formatDocument(fullContent); - return createProject(formatted, { customPrelude: true, provider: options?.provider }); + const workDir = createProject(formatted, { customPrelude: true, provider: options?.provider }); + return { workDir, schema: formatted }; } export function runCli(command: string, cwd: string) { diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 026d3d23e..a7a60e9ce 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -32,7 +32,7 @@ import type { ZModelFormatter } from './zmodel-formatter'; export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], - keepImports: boolean = false, + mergeImports: boolean = true, ): Promise< | { success: true; model: Model; warnings: string[]; services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } @@ -123,7 +123,7 @@ export async function loadDocument( const model = document.parseResult.value as Model; - if (keepImports === false) { + if (mergeImports) { // merge all declarations into the main document const imported = mergeImportsDeclarations(langiumDocuments, model); @@ -135,7 +135,7 @@ export async function loadDocument( } // extra validation after merging imported declarations - const additionalErrors = validationAfterImportMerge(model); + const additionalErrors = mergeImports === true ? validationAfterImportMerge(model) : []; if (additionalErrors.length > 0) { return { success: false, diff --git a/packages/language/src/factory/ast-factory.ts b/packages/language/src/factory/ast-factory.ts index e01dd7ced..7618f0738 100644 --- a/packages/language/src/factory/ast-factory.ts +++ b/packages/language/src/factory/ast-factory.ts @@ -49,8 +49,4 @@ export abstract class AstFactory { }); return this.node; } - - resolveChilds(nodeArg: T | NodeFactoriesFor): T { - return this.update(nodeArg); - } } diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts index 1f514982b..0ec80fe9e 100644 --- a/packages/language/src/factory/declaration.ts +++ b/packages/language/src/factory/declaration.ts @@ -153,7 +153,9 @@ export class DataFieldFactory extends AstFactory { builder.setContainer(this.node); this.attributes.push(builder); } else { - this.attributes.push(builder(new DataFieldAttributeFactory())); + const attr = builder(new DataFieldAttributeFactory()); + attr.setContainer(this.node); + this.attributes.push(attr); } this.update({ attributes: this.attributes, diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts index a0ba84001..f84b7497c 100644 --- a/packages/language/src/factory/expression.ts +++ b/packages/language/src/factory/expression.ts @@ -278,6 +278,7 @@ export class BinaryExprFactory extends AstFactory { operator?: BinaryExpr['operator']; right?: AstFactory; left?: AstFactory; + // TODO: add support for CollectionPredicateBinding constructor() { super({ type: BinaryExpr }); diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index c6059ebe6..50759fc81 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -101,7 +101,11 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations - .sort((d) => (d.$type === 'Enum' ? 1 : 0)) + .sort((a, b) => { + if (a.$type === 'Enum' && b.$type !== 'Enum') return 1; + if (a.$type !== 'Enum' && b.$type === 'Enum') return -1; + return 0; + }) .map((d) => this.generate(d)) .join('\n\n')}`; } @@ -145,9 +149,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ConfigField) private _generateConfigField(ast: ConfigField) { - const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); - const padding = ' '.repeat(longestName - ast.name.length + 1); - return `${ast.name}${padding}= ${this.generate(ast.value)}`; + return `${ast.name} = ${this.generate(ast.value)}`; } @gen(ConfigArrayExpr) @@ -175,9 +177,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(PluginField) private _generatePluginField(ast: PluginField) { - const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); - const padding = ' '.repeat(longestName - ast.name.length + 1); - return `${ast.name}${padding}= ${this.generate(ast.value)}`; + return `${ast.name} = ${this.generate(ast.value)}`; } @gen(DataModel) @@ -185,14 +185,9 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} const comments = `${ast.comments.join('\n')}\n`; return `${ast.comments.length > 0 ? comments : ''}${ast.isView ? 'view' : 'model'} ${ast.name}${ - ast.mixins.length > 0 ? ' mixes ' + ast.mixins.map((x) => x.$refText).join(', ') : '' + ast.mixins.length > 0 ? ' with ' + ast.mixins.map((x) => x.$refText).join(', ') : '' } { -${ast.fields - .map((x) => { - const comments = x.comments.map((c) => `${this.indent}${c}`).join('\n'); - return (x.comments.length ? `${comments}\n` : '') + this.indent + this.generate(x); - }) - .join('\n')}${ +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' @@ -202,13 +197,20 @@ ${ast.fields @gen(DataField) private _generateDataField(ast: DataField) { - const longestFieldName = Math.max(...ast.$container.fields.map((f) => f.name.length)); - const longestType = Math.max(...ast.$container.fields.map((f) => this.fieldType(f.type).length)); - const paddingLeft = longestFieldName - ast.name.length; - const paddingRight = ast.attributes.length > 0 ? longestType - this.fieldType(ast.type).length : 0; - return `${ast.name}${' '.repeat(paddingLeft)} ${this.fieldType(ast.type)}${' '.repeat(paddingRight)}${ + const fieldLine = `${ast.name} ${this.fieldType(ast.type)}${ ast.attributes.length > 0 ? ' ' + ast.attributes.map((x) => this.generate(x)).join(' ') : '' }`; + + if (ast.comments.length === 0) { + return fieldLine; + } + + // Build comment block with proper indentation: + // - First comment: no indent (caller adds it via `this.indent + this.generate(x)`) + // - Subsequent comments: add indent + // - Field line: add indent (since it comes after the comment block) + const commentLines = ast.comments.map((c, i) => (i === 0 ? c : this.indent + c)); + return `${commentLines.join('\n')}\n${this.indent}${fieldLine}`; } private fieldType(type: DataFieldType) { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3412d6554..f6e6a972e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -192,9 +192,6 @@ importers: packages/cli: dependencies: - '@dotenvx/dotenvx': - specifier: ^1.51.0 - version: 1.52.0 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -1564,22 +1561,12 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} - '@dotenvx/dotenvx@1.52.0': - resolution: {integrity: sha512-CaQcc8JvtzQhUSm9877b6V4Tb7HCotkcyud9X2YwdqtQKwgljkMRwU96fVYKnzN3V0Hj74oP7Es+vZ0mS+Aa1w==} - hasBin: true - '@dxup/nuxt@0.2.2': resolution: {integrity: sha512-RNpJjDZs9+JcT9N87AnOuHsNM75DEd58itADNd/s1LIF6BZbTLZV0xxilJZb55lntn4TYvscTaXLCBX2fq9CXg==} '@dxup/unimport@0.1.2': resolution: {integrity: sha512-/B8YJGPzaYq1NbsQmwgP8EZqg40NpTw4ZB3suuI0TplbxKHeK94jeaawLmVhCv+YwUnOpiWEz9U6SeThku/8JQ==} - '@ecies/ciphers@0.2.5': - resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} - engines: {bun: '>=1', deno: '>=2', node: '>=16'} - peerDependencies: - '@noble/ciphers': ^1.0.0 - '@edge-runtime/primitives@6.0.0': resolution: {integrity: sha512-FqoxaBT+prPBHBwE1WXS1ocnu/VLTQyZ6NMUBAdbP7N2hsFTTxMC/jMu2D/8GAlMQfxeuppcPuCUk/HO3fpIvA==} engines: {node: '>=18'} @@ -2399,26 +2386,14 @@ packages: cpu: [x64] os: [win32] - '@noble/ciphers@1.3.0': - resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} - engines: {node: ^14.21.3 || >=16} - '@noble/ciphers@2.0.1': resolution: {integrity: sha512-xHK3XHPUW8DTAobU+G0XT+/w+JLM7/8k1UFdB5xg/zTFPnFCobhftzw8wl4Lw2aq/Rvir5pxfZV5fEazmeCJ2g==} engines: {node: '>= 20.19.0'} - '@noble/curves@1.9.7': - resolution: {integrity: sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==} - engines: {node: ^14.21.3 || >=16} - '@noble/hashes@1.7.1': resolution: {integrity: sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ==} engines: {node: ^14.21.3 || >=16} - '@noble/hashes@1.8.0': - resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} - engines: {node: ^14.21.3 || >=16} - '@noble/hashes@2.0.1': resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} engines: {node: '>= 20.19.0'} @@ -5115,10 +5090,6 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - eciesjs@0.4.17: - resolution: {integrity: sha512-TOOURki4G7sD1wDCjj7NfLaXZZ49dFOeEb5y39IXpb8p0hRzVvfvzZHOi5JcT+PpyAbi/Y+lxPb8eTag2WYH8w==} - engines: {bun: '>=1', deno: '>=2', node: '>=16'} - ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -5408,10 +5379,6 @@ packages: '@sinclair/typebox': optional: true - execa@5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - execa@8.0.1: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} @@ -5629,10 +5596,6 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} - get-stream@6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - get-stream@8.0.1: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} @@ -5796,10 +5759,6 @@ packages: httpxy@0.1.7: resolution: {integrity: sha512-pXNx8gnANKAndgga5ahefxc++tJvNL87CXoRwxn1cJE2ZkWEojF3tNfQIEhZX/vfpt+wzeAzpUI4qkediX1MLQ==} - human-signals@2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - human-signals@5.0.0: resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} engines: {node: '>=16.17.0'} @@ -6721,10 +6680,6 @@ packages: engines: {node: '>= 4'} hasBin: true - npm-run-path@4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - npm-run-path@5.3.0: resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -6766,10 +6721,6 @@ packages: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} - object-treeify@1.1.33: - resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} - engines: {node: '>= 10'} - object.assign@4.1.7: resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} engines: {node: '>= 0.4'} @@ -7926,10 +7877,6 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} - strip-final-newline@2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} @@ -8769,11 +8716,6 @@ packages: engines: {node: '>= 8'} hasBin: true - which@4.0.0: - resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} - engines: {node: ^16.13.0 || >=18.0.0} - hasBin: true - which@5.0.0: resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==} engines: {node: ^18.17.0 || >=20.5.0} @@ -9401,18 +9343,6 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true - '@dotenvx/dotenvx@1.52.0': - dependencies: - commander: 11.1.0 - dotenv: 17.2.3 - eciesjs: 0.4.17 - execa: 5.1.1 - fdir: 6.5.0(picomatch@4.0.3) - ignore: 5.3.2 - object-treeify: 1.1.33 - picomatch: 4.0.3 - which: 4.0.0 - '@dxup/nuxt@0.2.2(magicast@0.5.1)': dependencies: '@dxup/unimport': 0.1.2 @@ -9425,10 +9355,6 @@ snapshots: '@dxup/unimport@0.1.2': {} - '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': - dependencies: - '@noble/ciphers': 1.3.0 - '@edge-runtime/primitives@6.0.0': {} '@edge-runtime/vm@5.0.0': @@ -10000,18 +9926,10 @@ snapshots: '@next/swc-win32-x64-msvc@16.0.10': optional: true - '@noble/ciphers@1.3.0': {} - '@noble/ciphers@2.0.1': {} - '@noble/curves@1.9.7': - dependencies: - '@noble/hashes': 1.8.0 - '@noble/hashes@1.7.1': {} - '@noble/hashes@1.8.0': {} - '@noble/hashes@2.0.1': {} '@nodelib/fs.scandir@2.1.5': @@ -12805,13 +12723,6 @@ snapshots: eastasianwidth@0.2.0: {} - eciesjs@0.4.17: - dependencies: - '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) - '@noble/ciphers': 1.3.0 - '@noble/curves': 1.9.7 - '@noble/hashes': 1.8.0 - ee-first@1.1.1: {} effect@3.18.4: @@ -13072,7 +12983,7 @@ snapshots: eslint: 9.29.0(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.29.0(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.29.0(jiti@2.6.1)) eslint-plugin-react: 7.37.5(eslint@9.29.0(jiti@2.6.1)) eslint-plugin-react-hooks: 7.0.1(eslint@9.29.0(jiti@2.6.1)) @@ -13105,7 +13016,7 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.29.0(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)) transitivePeerDependencies: - supports-color @@ -13120,7 +13031,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.29.0(jiti@2.6.1)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.46.2(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)))(eslint@9.29.0(jiti@2.6.1)): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -13298,18 +13209,6 @@ snapshots: optionalDependencies: '@sinclair/typebox': 0.34.41 - execa@5.1.1: - dependencies: - cross-spawn: 7.0.6 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - execa@8.0.1: dependencies: cross-spawn: 7.0.6 @@ -13603,8 +13502,6 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 - get-stream@6.0.1: {} - get-stream@8.0.1: {} get-stream@9.0.1: @@ -13795,8 +13692,6 @@ snapshots: httpxy@0.1.7: {} - human-signals@2.1.0: {} - human-signals@5.0.0: {} human-signals@8.0.1: {} @@ -14761,10 +14656,6 @@ snapshots: shell-quote: 1.8.3 string.prototype.padend: 3.1.6 - npm-run-path@4.0.1: - dependencies: - path-key: 3.1.1 - npm-run-path@5.3.0: dependencies: path-key: 4.0.0 @@ -14915,8 +14806,6 @@ snapshots: object-keys@1.1.1: {} - object-treeify@1.1.33: {} - object.assign@4.1.7: dependencies: call-bind: 1.0.8 @@ -16177,8 +16066,6 @@ snapshots: strip-bom@3.0.0: {} - strip-final-newline@2.0.0: {} - strip-final-newline@3.0.0: {} strip-final-newline@4.0.0: {} @@ -17162,10 +17049,6 @@ snapshots: dependencies: isexe: 2.0.0 - which@4.0.0: - dependencies: - isexe: 3.1.1 - which@5.0.0: dependencies: isexe: 3.1.1 From 5dc900ed3406a35f1d5a48e2eb8b626c93f97ff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 3 Feb 2026 20:02:05 +0100 Subject: [PATCH 70/83] fix: address PR comments --- packages/cli/src/actions/db.ts | 7 ++- packages/cli/src/actions/pull/index.ts | 13 +++-- .../cli/src/actions/pull/provider/mysql.ts | 42 +++++++++------- .../src/actions/pull/provider/postgresql.ts | 26 ++++++---- .../cli/src/actions/pull/provider/sqlite.ts | 26 ++++++---- packages/cli/test/db/pull.test.ts | 48 +++++++++++++++++++ packages/language/src/factory/declaration.ts | 18 +++---- 7 files changed, 134 insertions(+), 46 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e9f730ec6..03f335894 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -89,7 +89,7 @@ async function runPull(options: PullOptions) { const treatAsFile = !!outPath && ((fs.existsSync(outPath) && fs.lstatSync(outPath).isFile()) || path.extname(outPath) !== ''); - + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true, mergeImports: treatAsFile, @@ -328,7 +328,7 @@ async function runPull(options: PullOptions) { return; } const originalField = originalFields.at(0); - + if (!originalField) { getModelChanges(originalDataModel.name).addedFields.push(colors.green(`+ ${f.name}`)); (f as any).$container = originalDataModel; @@ -368,6 +368,9 @@ async function runPull(options: PullOptions) { !['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText), ) .forEach((attr) => { + // attach the new attribute to the original field + const cloned = { ...attr, $container: originalField } as typeof attr; + originalField.attributes.push(cloned); getModelChanges(originalDataModel.name).addedAttributes.push( colors.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`), ); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index fda1f54fe..94186c35b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -82,8 +82,10 @@ export function syncEnums({ factory.update({ comments: [...d.comments] }); } // Copy enum-level attributes (@@map, @@schema, etc.) + // Re-parent attributes to the new factory node if (d.attributes?.length) { - factory.update({ attributes: [...d.attributes] }); + const reparentedAttrs = d.attributes.map((attr) => ({ ...attr, $container: factory.node })); + factory.update({ attributes: reparentedAttrs }); } // Copy fields with their attributes and comments d.fields.forEach((v) => { @@ -96,8 +98,10 @@ export function syncEnums({ }); } // Copy field-level attributes (@map, etc.) + // Re-parent attributes to the new builder node if (v.attributes?.length) { - builder.update({ attributes: [...v.attributes] }); + const reparentedAttrs = v.attributes.map((attr) => ({ ...attr, $container: builder.node })); + builder.update({ attributes: reparentedAttrs }); } return builder; }); @@ -213,6 +217,9 @@ export function syncTable({ } table.columns.forEach((column) => { if (column.foreign_key_table) { + // Check if this FK column is the table's single-column primary key + // If so, it should be treated as a one-to-one relation + const isSingleColumnPk = !multiPk && column.pk; relations.push({ schema: table.schema, table: table.name, @@ -226,7 +233,7 @@ export function syncTable({ schema: column.foreign_key_schema, table: column.foreign_key_table, column: column.foreign_key_column, - type: column.unique ? 'one' : 'many', + type: column.unique || isSingleColumnPk ? 'one' : 'many', }, }); } diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index d8bdc33b8..e2443c61a 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -228,24 +228,34 @@ export const mysql: IntrospectionProvider = { return (ab) => ab.NumberLiteral.setValue(val); case 'Float': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); - } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.0'); - } - return (ab) => ab.NumberLiteral.setValue(val); + // Integer strings: append '.0' + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + // Decimal strings: preserve exactly to avoid parseFloat precision loss + if (/^-?\d+\.\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + // Other values: return unchanged + return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); - } - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.00'); - } - return (ab) => ab.NumberLiteral.setValue(val); + // Integer strings: append '.00' + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros + if (/^-?\d+\.\d+$/.test(val)) { + const [integerPart, fractionalPart] = val.split('.'); + // Strip trailing zeros, but keep at least 2 digits + let normalized = fractionalPart!.replace(/0+$/, ''); + if (normalized.length < 2) { + normalized = normalized.padEnd(2, '0'); + } + return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); + } + // Other values: return unchanged + return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === 'true' || val === '1' || val === "b'1'"); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 1c035324f..d93fd9245 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -162,13 +162,15 @@ export const postgresql: IntrospectionProvider = { return typeCastingConvert({defaultValue,enums,val,services}); } - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); - } + // Integer strings: append '.0' if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.0'); } + // Decimal strings: preserve exactly to avoid parseFloat precision loss + if (/^-?\d+\.\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + // Other values: return unchanged return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': @@ -176,13 +178,21 @@ export const postgresql: IntrospectionProvider = { return typeCastingConvert({defaultValue,enums,val,services}); } - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); - } + // Integer strings: append '.00' if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.00'); } + // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros + if (/^-?\d+\.\d+$/.test(val)) { + const [integerPart, fractionalPart] = val.split('.'); + // Strip trailing zeros, but keep at least 2 digits + let normalized = fractionalPart!.replace(/0+$/, ''); + if (normalized.length < 2) { + normalized = normalized.padEnd(2, '0'); + } + return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); + } + // Other values: return unchanged return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index b03bad307..044b287c0 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -329,23 +329,33 @@ export const sqlite: IntrospectionProvider = { return (ab) => ab.NumberLiteral.setValue(val); case 'Float': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); - } + // Integer strings: append '.0' if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.0'); } + // Decimal strings: preserve exactly to avoid parseFloat precision loss + if (/^-?\d+\.\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + // Other values: return unchanged return (ab) => ab.NumberLiteral.setValue(val); case 'Decimal': - if (/^-?\d+\.\d+$/.test(val)) { - const numVal = parseFloat(val); - return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(2) : String(numVal)); - } + // Integer strings: append '.00' if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val + '.00'); } + // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros + if (/^-?\d+\.\d+$/.test(val)) { + const [integerPart, fractionalPart] = val.split('.'); + // Strip trailing zeros, but keep at least 2 digits + let normalized = fractionalPart!.replace(/0+$/, ''); + if (normalized.length < 2) { + normalized = normalized.padEnd(2, '0'); + } + return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); + } + // Other values: return unchanged return (ab) => ab.NumberLiteral.setValue(val); case 'Boolean': diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 85aca1261..30c757586 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -107,6 +107,31 @@ model Tag { expect(restoredSchema).toEqual(schema); }); + it('should restore one-to-one relation when FK is the single-column primary key', async () => { + const { workDir, schema } = await createFormattedProject( + `model Profile { + user User @relation(fields: [id], references: [id], onDelete: Cascade) + id Int @id @default(autoincrement()) + bio String? +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + profile Profile? +}`, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); + }); + it('should restore schema with indexes and unique constraints', async () => { const { workDir, schema } = await createFormattedProject( `model User { @@ -155,6 +180,29 @@ model Tag { expect(restoredSchema).toEqual(schema); }); + it('should preserve Decimal and Float default value precision', async () => { + const { workDir, schema } = await createFormattedProject( + `model Product { + id Int @id @default(autoincrement()) + price Decimal @default(99.99) + discount Decimal @default(0.50) + taxRate Decimal @default(7.00) + weight Float @default(1.5) + rating Float @default(4.0) + temperature Float @default(98.6) +}`, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); + }); + }); describe('Pull with existing schema - preserve schema features', () => { diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts index 0ec80fe9e..247d31f47 100644 --- a/packages/language/src/factory/declaration.ts +++ b/packages/language/src/factory/declaration.ts @@ -80,7 +80,7 @@ export class DataModelFactory extends AstFactory { } addAttribute(builder: (attr: DataModelAttributeFactory) => DataModelAttributeFactory) { - this.attributes.push(builder(new DataModelAttributeFactory())); + this.attributes.push(builder(new DataModelAttributeFactory()).setContainer(this.node)); this.update({ attributes: this.attributes, }); @@ -104,7 +104,7 @@ export class DataModelFactory extends AstFactory { } addField(builder: (field: DataFieldFactory) => DataFieldFactory) { - this.fields.push(builder(new DataFieldFactory())); + this.fields.push(builder(new DataFieldFactory()).setContainer(this.node)); this.update({ fields: this.fields, }); @@ -180,7 +180,7 @@ export class DataFieldFactory extends AstFactory { } setType(builder: (type: DataFieldTypeFactory) => DataFieldTypeFactory) { - this.type = builder(new DataFieldTypeFactory()); + this.type = builder(new DataFieldTypeFactory()).setContainer(this.node); this.update({ type: this.type, }); @@ -235,7 +235,7 @@ export class DataFieldTypeFactory extends AstFactory { } setUnsupported(builder: (a: UnsupportedFieldTypeFactory) => UnsupportedFieldTypeFactory) { - this.unsupported = builder(new UnsupportedFieldTypeFactory()); + this.unsupported = builder(new UnsupportedFieldTypeFactory()).setContainer(this.node); this.update({ unsupported: this.unsupported, }); @@ -264,14 +264,14 @@ export class ModelFactory extends AstFactory { super({ type: Model, node: { declarations: [], imports: [] } }); } addImport(builder: (b: ModelImportFactory) => ModelImportFactory) { - this.imports.push(builder(new ModelImportFactory())); + this.imports.push(builder(new ModelImportFactory()).setContainer(this.node)); this.update({ imports: this.imports, }); return this; } addDeclaration(builder: (b: DeclarationBuilder) => AstFactory) { - this.declarations.push(builder(DeclarationBuilder())); + this.declarations.push(builder(DeclarationBuilder()).setContainer(this.node)); this.update({ declarations: this.declarations, }); @@ -306,7 +306,7 @@ export class EnumFactory extends AstFactory { } addField(builder: (b: EnumFieldFactory) => EnumFieldFactory) { - this.fields.push(builder(new EnumFieldFactory())); + this.fields.push(builder(new EnumFieldFactory()).setContainer(this.node)); this.update({ fields: this.fields, }); @@ -314,7 +314,7 @@ export class EnumFactory extends AstFactory { } addAttribute(builder: (b: DataModelAttributeFactory) => DataModelAttributeFactory) { - this.attributes.push(builder(new DataModelAttributeFactory())); + this.attributes.push(builder(new DataModelAttributeFactory()).setContainer(this.node)); this.update({ attributes: this.attributes, }); @@ -348,7 +348,7 @@ export class EnumFieldFactory extends AstFactory { } addAttribute(builder: (b: DataFieldAttributeFactory) => DataFieldAttributeFactory) { - this.attributes.push(builder(new DataFieldAttributeFactory())); + this.attributes.push(builder(new DataFieldAttributeFactory()).setContainer(this.node)); this.update({ attributes: this.attributes, }); From a3c5d62f2aa8afc9e795d28ef03464ed596e8a93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 3 Feb 2026 23:35:46 +0100 Subject: [PATCH 71/83] fix: address PR comments --- packages/cli/src/actions/db.ts | 129 +++++++++++- packages/cli/src/actions/pull/index.ts | 7 +- .../cli/src/actions/pull/provider/mysql.ts | 7 +- .../src/actions/pull/provider/postgresql.ts | 122 +++++++++++- packages/cli/src/actions/pull/utils.ts | 2 +- packages/cli/test/db/pull.test.ts | 187 +++++++++++++++++- packages/language/src/document.ts | 2 +- packages/language/src/factory/attribute.ts | 2 +- packages/language/src/factory/declaration.ts | 10 +- packages/language/src/factory/expression.ts | 2 +- packages/language/src/factory/primitives.ts | 2 +- .../language/src/zmodel-code-generator.ts | 2 +- 12 files changed, 447 insertions(+), 27 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 03f335894..e06fbe01b 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -97,9 +97,6 @@ async function runPull(options: PullOptions) { const SUPPORTED_PROVIDERS = Object.keys(pullProviders) as DataSourceProviderType[]; const datasource = getDatasource(model); - if (!datasource) { - throw new CliError('No datasource found in the schema.'); - } if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { throw new CliError(`Unsupported datasource provider: ${datasource.provider}`); @@ -195,8 +192,10 @@ async function runPull(options: PullOptions) { type ModelChanges = { addedFields: string[]; deletedFields: string[]; + updatedFields: string[]; addedAttributes: string[]; deletedAttributes: string[]; + updatedAttributes: string[]; }; const modelChanges = new Map(); @@ -205,8 +204,10 @@ async function runPull(options: PullOptions) { modelChanges.set(modelName, { addedFields: [], deletedFields: [], + updatedFields: [], addedAttributes: [], deletedAttributes: [], + updatedAttributes: [], }); } return modelChanges.get(modelName)!; @@ -261,7 +262,12 @@ async function runPull(options: PullOptions) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), )?.node; - if (ref) (f.type.reference.ref as any) = ref; + if (ref && f.type.reference) { + (f.type.reference.ref as any) = ref; + // Keep the textual reference in sync with the semantic reference + (f.type.reference as any).$refText = + (ref as any).name ?? (f.type.reference as any).$refText; + } } }); return; @@ -329,6 +335,103 @@ async function runPull(options: PullOptions) { } const originalField = originalFields.at(0); + // Update existing field if type, optionality, or array flag changed + if (originalField && f.$type === 'DataField' && originalField.$type === 'DataField') { + const newType = f.type; + const oldType = originalField.type; + const fieldUpdates: string[] = []; + + // Check and update builtin type (e.g., String -> Int) + // Skip if old type is an Enum reference and provider doesn't support native enums + const isOldTypeEnumWithoutNativeSupport = + oldType.reference?.ref?.$type === 'Enum' && !provider.isSupportedFeature('NativeEnum'); + if (newType.type && oldType.type !== newType.type && !isOldTypeEnumWithoutNativeSupport) { + fieldUpdates.push(`type: ${oldType.type} -> ${newType.type}`); + (oldType as any).type = newType.type; + } + + // Check and update type reference (e.g., User -> Profile) + if (newType.reference?.ref && oldType.reference?.ref) { + const newRefName = getDbName(newType.reference.ref); + const oldRefName = getDbName(oldType.reference.ref); + if (newRefName !== oldRefName) { + fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`); + (oldType.reference as any).ref = newType.reference.ref; + (oldType.reference as any).$refText = newType.reference.$refText; + } + } else if (newType.reference?.ref && !oldType.reference) { + // Changed from builtin to reference type + fieldUpdates.push(`type: ${oldType.type} -> ${newType.reference.$refText}`); + (oldType as any).reference = newType.reference; + (oldType as any).type = undefined; + } else if (!newType.reference && oldType.reference?.ref && newType.type) { + // Changed from reference to builtin type + // Skip if old type is an Enum and provider doesn't support native enums (e.g., SQLite stores enums as strings) + const isEnumWithoutNativeSupport = + oldType.reference.ref.$type === 'Enum' && !provider.isSupportedFeature('NativeEnum'); + if (!isEnumWithoutNativeSupport) { + fieldUpdates.push(`type: ${oldType.reference.$refText} -> ${newType.type}`); + (oldType as any).type = newType.type; + (oldType as any).reference = undefined; + } + } + + // Check and update optionality (e.g., String -> String?) + if (!!newType.optional !== !!oldType.optional) { + fieldUpdates.push(`optional: ${!!oldType.optional} -> ${!!newType.optional}`); + (oldType as any).optional = newType.optional; + } + + // Check and update array flag (e.g., String -> String[]) + if (!!newType.array !== !!oldType.array) { + fieldUpdates.push(`array: ${!!oldType.array} -> ${!!newType.array}`); + (oldType as any).array = newType.array; + } + + if (fieldUpdates.length > 0) { + getModelChanges(originalDataModel.name).updatedFields.push( + colors.yellow(`~ ${originalField.name} (${fieldUpdates.join(', ')})`), + ); + } + + // Update @default attribute arguments if changed + const newDefaultAttr = f.attributes.find((a) => a.decl.$refText === '@default'); + const oldDefaultAttr = originalField.attributes.find((a) => a.decl.$refText === '@default'); + if (newDefaultAttr && oldDefaultAttr) { + // Compare attribute arguments by serializing them (avoid circular refs with $type fallback) + const serializeArgs = (args: any[]) => + args.map((arg) => { + if (arg.value?.$type === 'StringLiteral') return `"${arg.value.value}"`; + if (arg.value?.$type === 'NumberLiteral') return String(arg.value.value); + if (arg.value?.$type === 'BooleanLiteral') return String(arg.value.value); + if (arg.value?.$type === 'InvocationExpr') return arg.value.function?.$refText ?? ''; + if (arg.value?.$type === 'ReferenceExpr') return arg.value.target?.$refText ?? ''; + if (arg.value?.$type === 'ArrayExpr') { + return `[${(arg.value.items ?? []).map((item: any) => { + if (item.$type === 'ReferenceExpr') return item.target?.$refText ?? ''; + return item.$type ?? 'unknown'; + }).join(',')}]`; + } + // Fallback: use $type to avoid circular reference issues + return arg.value?.$type ?? 'unknown'; + }).join(','); + + const newArgsStr = serializeArgs(newDefaultAttr.args ?? []); + const oldArgsStr = serializeArgs(oldDefaultAttr.args ?? []); + + if (newArgsStr !== oldArgsStr) { + // Replace old @default arguments with new ones + (oldDefaultAttr as any).args = newDefaultAttr.args.map((arg) => ({ + ...arg, + $container: oldDefaultAttr, + })); + getModelChanges(originalDataModel.name).updatedAttributes.push( + colors.yellow(`~ @default on ${originalDataModel.name}.${originalField.name}`), + ); + } + } + } + if (!originalField) { getModelChanges(originalDataModel.name).addedFields.push(colors.green(`+ ${f.name}`)); (f as any).$container = originalDataModel; @@ -452,8 +555,10 @@ async function runPull(options: PullOptions) { const hasChanges = changes.addedFields.length > 0 || changes.deletedFields.length > 0 || + changes.updatedFields.length > 0 || changes.addedAttributes.length > 0 || - changes.deletedAttributes.length > 0; + changes.deletedAttributes.length > 0 || + changes.updatedAttributes.length > 0; if (hasChanges) { console.log(colors.cyan(` ${modelName}:`)); @@ -472,6 +577,13 @@ async function runPull(options: PullOptions) { }); } + if (changes.updatedFields.length > 0) { + console.log(colors.gray(' Updated Fields:')); + changes.updatedFields.forEach((msg) => { + console.log(` ${msg}`); + }); + } + if (changes.addedAttributes.length > 0) { console.log(colors.gray(' Added Attributes:')); changes.addedAttributes.forEach((msg) => { @@ -485,6 +597,13 @@ async function runPull(options: PullOptions) { console.log(` ${msg}`); }); } + + if (changes.updatedAttributes.length > 0) { + console.log(colors.gray(' Updated Attributes:')); + changes.updatedAttributes.forEach((msg) => { + console.log(` ${msg}`); + }); + } } }); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 94186c35b..ac96b359f 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -246,7 +246,8 @@ export function syncTable({ builder.setName(name); builder.setType((typeBuilder) => { typeBuilder.setArray(builtinType.isArray); - typeBuilder.setOptional(column.nullable); + // Array fields cannot be optional (Prisma/ZenStack limitation) + typeBuilder.setOptional(builtinType.isArray ? false : column.nullable); if (column.datatype === 'enum') { const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype_name) as @@ -349,7 +350,7 @@ export function syncTable({ table.indexes.some((i) => i.unique); if (!hasUniqueConstraint) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); - modelFactory.comments.push( + modelFactory.addComment( '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', ); } @@ -542,7 +543,7 @@ export function syncRelation({ return ab; }); - sourceModel.fields.splice(sourceFieldId, 0, sourceFieldFactory.node); // Remove the original scalar foreign key field + sourceModel.fields.splice(sourceFieldId, 0, sourceFieldFactory.node); // Insert the relation field before the FK scalar fie const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index e2443c61a..f59d3790d 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -328,8 +328,9 @@ export const mysql: IntrospectionProvider = { defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); - if (length || precision) { - dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + const sizeValue = length ?? precision; + if (sizeValue !== undefined && sizeValue !== null) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(sizeValue)); } factories.push(dbAttrFactory); } @@ -446,7 +447,7 @@ LEFT JOIN INFORMATION_SCHEMA.VIEWS v ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME WHERE t.TABLE_SCHEMA = '${databaseName}' AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') - AND t.TABLE_NAME NOT LIKE '_prisma_migrations' + AND t.TABLE_NAME <> '_prisma_migrations' ORDER BY t.TABLE_NAME; `; } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index d93fd9245..00bf97c82 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -6,6 +6,108 @@ import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, Introspec import type { ZModelServices } from '@zenstackhq/language'; import { CliError } from '../../../cli-error'; +/** + * Maps PostgreSQL internal type names to their standard SQL names for comparison. + * This is used to normalize type names when checking against default database types. + */ +const pgTypnameToStandard: Record = { + int2: 'smallint', + int4: 'integer', + int8: 'bigint', + float4: 'real', + float8: 'double precision', + bool: 'boolean', + bpchar: 'character', + numeric: 'decimal', +}; + +/** + * Standard bit widths for integer/float types that shouldn't be added as precision arguments. + * PostgreSQL returns these as precision values, but they're implicit for the type. + */ +const standardTypePrecisions: Record = { + int2: 16, + smallint: 16, + int4: 32, + integer: 32, + int8: 64, + bigint: 64, + float4: 24, + real: 24, + float8: 53, + 'double precision': 53, +}; + +/** + * Maps PostgreSQL typnames (from pg_type.typname) to Prisma native type attribute names. + * PostgreSQL introspection returns internal type names like 'int2', 'int4', 'float8', 'bpchar', + * but Prisma/ZenStack attributes are named @db.SmallInt, @db.Integer, @db.DoublePrecision, @db.Char, etc. + */ +const pgTypnameToPrismaNativeType: Record = { + // integers + int2: 'SmallInt', + smallint: 'SmallInt', + int4: 'Integer', + integer: 'Integer', + int8: 'BigInt', + bigint: 'BigInt', + + // decimals and floats + numeric: 'Decimal', + decimal: 'Decimal', + float4: 'Real', + real: 'Real', + float8: 'DoublePrecision', + 'double precision': 'DoublePrecision', + + // boolean + bool: 'Boolean', + boolean: 'Boolean', + + // strings + text: 'Text', + varchar: 'VarChar', + 'character varying': 'VarChar', + bpchar: 'Char', + character: 'Char', + + // uuid + uuid: 'Uuid', + + // dates/times + date: 'Date', + time: 'Time', + timetz: 'Timetz', + timestamp: 'Timestamp', + timestamptz: 'Timestamptz', + + // binary + bytea: 'ByteA', + + // json + json: 'Json', + jsonb: 'JsonB', + + // xml + xml: 'Xml', + + // network types + inet: 'Inet', + + // bit strings + bit: 'Bit', + varbit: 'VarBit', + + // oid + oid: 'Oid', + + // money + money: 'Money', + + // citext extension + citext: 'Citext', +}; + export const postgresql: IntrospectionProvider = { isSupportedFeature(feature) { const supportedFeatures = ['Schema', 'NativeEnum']; @@ -57,6 +159,7 @@ export const postgresql: IntrospectionProvider = { // dates/times case 'date': case 'time': + case 'timetz': case 'timestamp': case 'timestamptz': return { type: 'DateTime', isArray }; @@ -228,22 +331,35 @@ export const postgresql: IntrospectionProvider = { factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } + // Map PostgreSQL typname to Prisma native type attribute name + // PostgreSQL returns typnames like 'int2', 'float8', 'bpchar', but Prisma attributes + // are named @db.SmallInt, @db.DoublePrecision, @db.Char, etc. + const nativeTypeName = pgTypnameToPrismaNativeType[datatype.toLowerCase()] ?? datatype; + // Add @db.* attribute if the datatype differs from the default const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( - (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + (d) => d.name.toLowerCase() === `@db.${nativeTypeName.toLowerCase()}`, )?.node as Attribute | undefined; const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + // Normalize datatype for comparison (e.g., 'int4' -> 'integer') + const normalizedDatatype = pgTypnameToStandard[datatype.toLowerCase()] ?? datatype.toLowerCase(); + + // Check if the precision is the standard bit width for this type (shouldn't be added) + const standardPrecision = standardTypePrecisions[datatype.toLowerCase()]; + const isStandardPrecision = standardPrecision !== undefined && precision === standardPrecision; + if ( dbAttr && defaultDatabaseType && - (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.type !== normalizedDatatype || (defaultDatabaseType.precision && defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); - if (length || precision) { + // Only add length/precision if it's meaningful (not the standard bit width for the type) + if ((length || precision) && !isStandardPrecision) { dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); } factories.push(dbAttrFactory); diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 44355a595..b3f6c4880 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -29,7 +29,7 @@ export function getAttribute(model: Model, attrName: string) { export function getDatasource(model: Model) { const datasource = model.declarations.find((d) => d.$type === 'DataSource'); if (!datasource) { - throw new CliError('The schema\'s "datasource" must have a "url" field to use this command.'); + throw new CliError('No datasource declaration found in the schema.'); } const urlField = datasource.fields.find((f) => f.name === 'url'); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 30c757586..367ba1348 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -2,15 +2,10 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; import { createFormattedProject, createProject, getDefaultPrelude, runCli } from '../utils'; -import { loadSchemaDocument } from '../../src/actions/action-utils'; -import { ZModelCodeGenerator, formatDocument } from '@zenstackhq/language'; +import { formatDocument } from '@zenstackhq/language'; import { getTestDbProvider } from '@zenstackhq/testtools'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); -const generator = new ZModelCodeGenerator({ - quote: 'double', - indent: 4, -}); describe('DB pull - Common features (all providers)', () => { describe('Pull from zero - restore complete schema from database', () => { @@ -365,6 +360,131 @@ model Post { expect(pulledPostSchema).toEqual(postModel); }); }); + + describe('Pull should update existing field definitions when database changes', () => { + it('should update field type when database column type changes', async () => { + // Step 1: Create initial schema with String field + const { workDir } = await createFormattedProject( + `model User { + id Int @id @default(autoincrement()) + email String @unique + age String +}`, + ); + runCli('db push', workDir); + + // Step 2: Modify schema to change age from String to Int + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const updatedSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + age Int +}`); + fs.writeFileSync(schemaFile, updatedSchema); + runCli('db push', workDir); + + // Step 3: Revert schema back to original (with String type) + const originalSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + age String +}`); + fs.writeFileSync(schemaFile, originalSchema); + + // Step 4: Pull from database - should detect that age is now Int + runCli('db pull --indent 4', workDir); + + // Step 5: Verify that pulled schema has Int type (matching database) + const pulledSchema = getSchema(workDir); + expect(pulledSchema).toEqual(updatedSchema); + }); + + it('should update field optionality when database column nullability changes', async () => { + // Step 1: Create initial schema with required field + const { workDir } = await createFormattedProject( + `model User { + id Int @id @default(autoincrement()) + email String @unique + name String +}`, + ); + runCli('db push', workDir); + + // Step 2: Modify schema to make name optional + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const updatedSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +}`); + fs.writeFileSync(schemaFile, updatedSchema); + runCli('db push', workDir); + + // Step 3: Revert schema back to original (with required name) + const originalSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String +}`); + fs.writeFileSync(schemaFile, originalSchema); + + // Step 4: Pull from database - should detect that name is now optional + runCli('db pull --indent 4', workDir); + + // Step 5: Verify that pulled schema has optional name (matching database) + const pulledSchema = getSchema(workDir); + expect(pulledSchema).toEqual(updatedSchema); + }); + + it('should update default value when database default changes', async () => { + // Step 1: Create initial schema with default value + const { workDir } = await createFormattedProject( + `model User { + id Int @id @default(autoincrement()) + email String @unique + status String @default('active') +}`, + ); + runCli('db push', workDir); + + // Step 2: Modify schema to change default value + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const updatedSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + status String @default('pending') +}`); + fs.writeFileSync(schemaFile, updatedSchema); + runCli('db push', workDir); + + // Step 3: Revert schema back to original default + const originalSchema = await formatDocument(`${getDefaultPrelude()} + +model User { + id Int @id @default(autoincrement()) + email String @unique + status String @default('active') +}`); + fs.writeFileSync(schemaFile, originalSchema); + + // Step 4: Pull from database - should detect that default changed + runCli('db pull --indent 4', workDir); + + // Step 5: Verify that pulled schema has updated default (matching database) + const pulledSchema = getSchema(workDir); + expect(pulledSchema).toEqual(updatedSchema); + }); + }); }); describe('DB pull - PostgreSQL specific features', () => { @@ -481,6 +601,61 @@ enum UserStatus { expect(getSchema(workDir)).toEqual(schema); }); + + it('should restore native type attributes from PostgreSQL typnames', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + // PostgreSQL introspection returns typnames like 'int2', 'float8', 'bpchar', + // but Prisma/ZenStack attributes are named @db.SmallInt, @db.DoublePrecision, @db.Char, etc. + // This test verifies the mapping works correctly. + // Note: Default native types (jsonb for Json, bytea for Bytes) are not added when pulling from zero + // because they match the default database type for that field type. + const { workDir } = await createFormattedProject( + `model TypeTest { + id Int @id @default(autoincrement()) + smallNumber Int @db.SmallInt() + realNumber Float @db.Real() + doubleNum Float @db.DoublePrecision() + fixedChar String @db.Char(10) + uuid String @db.Uuid() + jsonData Json @db.Json() + jsonbData Json @db.JsonB() + binaryData Bytes @db.ByteA() +}`, + { provider: 'postgresql' }, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + // Remove schema content to simulate restoration from zero + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + + // Pull should restore non-default native type attributes + // Default types (jsonb for Json, bytea for Bytes) are not added + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + // Verify key native type mappings are restored correctly: + // - @db.SmallInt for int2 (non-default for Int which defaults to integer/int4) + // - @db.Real for float4 (non-default for Float which defaults to double precision/float8) + // - @db.Char(10) for bpchar with length (non-default for String which defaults to text) + // - @db.Uuid for uuid (non-default for String which defaults to text) + // - @db.Json for json (non-default for Json which defaults to jsonb) + expect(restoredSchema).toContain('@db.SmallInt'); + expect(restoredSchema).toContain('@db.Real'); + expect(restoredSchema).toContain('@db.Char(10)'); + expect(restoredSchema).toContain('@db.Uuid'); + expect(restoredSchema).toContain('@db.Json'); + // Default types should NOT be added when pulling from zero + expect(restoredSchema).not.toContain('@db.Integer'); // integer is default for Int + expect(restoredSchema).not.toContain('@db.DoublePrecision'); // double precision is default for Float + expect(restoredSchema).not.toContain('@db.JsonB'); // jsonb is default for Json + expect(restoredSchema).not.toContain('@db.ByteA'); // bytea is default for Bytes + }); }); describe('DB pull - SQL specific features', () => { diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index a7a60e9ce..7426c606d 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -123,7 +123,7 @@ export async function loadDocument( const model = document.parseResult.value as Model; - if (mergeImports) { + if (mergeImports) { // merge all declarations into the main document const imported = mergeImportsDeclarations(langiumDocuments, model); diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index 138d41c8f..b59e35ef1 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -1,4 +1,4 @@ -import { AstFactory } from '.'; +import { AstFactory } from './ast-factory'; import { Attribute, AttributeArg, diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts index 247d31f47..a6f772a20 100644 --- a/packages/language/src/factory/declaration.ts +++ b/packages/language/src/factory/declaration.ts @@ -1,4 +1,4 @@ -import { AstFactory } from '.'; +import { AstFactory } from './ast-factory'; import { AbstractDeclaration, type Reference } from '../ast'; import { type BuiltinType, @@ -103,6 +103,14 @@ export class DataModelFactory extends AstFactory { return this; } + addComment(comment: string) { + this.comments.push(comment); + this.update({ + comments: this.comments, + }); + return this; + } + addField(builder: (field: DataFieldFactory) => DataFieldFactory) { this.fields.push(builder(new DataFieldFactory()).setContainer(this.node)); this.update({ diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts index f84b7497c..19fe16af1 100644 --- a/packages/language/src/factory/expression.ts +++ b/packages/language/src/factory/expression.ts @@ -1,5 +1,5 @@ import type { Reference } from 'langium'; -import { AstFactory } from '.'; +import { AstFactory } from './ast-factory'; import { Argument, ArrayExpr, diff --git a/packages/language/src/factory/primitives.ts b/packages/language/src/factory/primitives.ts index 1db7e0515..e97310d54 100644 --- a/packages/language/src/factory/primitives.ts +++ b/packages/language/src/factory/primitives.ts @@ -1,4 +1,4 @@ -import { AstFactory } from '.'; +import { AstFactory } from './ast-factory'; import { BooleanLiteral, NullExpr, NumberLiteral, StringLiteral, ThisExpr } from '../ast'; export class ThisExprFactory extends AstFactory { diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 50759fc81..0b238d60d 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -149,7 +149,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ConfigField) private _generateConfigField(ast: ConfigField) { - return `${ast.name} = ${this.generate(ast.value)}`; + return `${ast.name} = ${this.generate(ast.value)}`; } @gen(ConfigArrayExpr) From cea210d5d9e7b2653ab4632f9e22f7d3bcae6ab3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:44 +0100 Subject: [PATCH 72/83] fix(cli): improve file path resolution in pull action --- packages/cli/src/actions/db.ts | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e06fbe01b..ec3b28c36 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -625,7 +625,6 @@ async function runPull(options: PullOptions) { // Preserve the directory structure relative to the schema file location (options.schema base). const baseDir = path.dirname(path.resolve(schemaFile)); - const baseDirUrlPath = new URL(`file://${baseDir}`).pathname; for (const { uri, @@ -634,14 +633,8 @@ async function runPull(options: PullOptions) { const zmodelSchema = await formatDocument(generator.generate(documentModel)); // Map input file path -> output file path under `--out` - let relPath = uri.path; - if (relPath.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())) { - relPath = relPath.slice(baseDirUrlPath.length); - } - relPath = relPath.replace(/^\/+/, ''); - - // Ensure consistent platform-specific separators for filesystem writes - const targetFile = path.join(outPath!, ...relPath.split('/')); + const relPath = path.relative(baseDir, uri.fsPath); + const targetFile = path.join(outPath!, relPath); fs.mkdirSync(path.dirname(targetFile), { recursive: true }); console.log(colors.blue(`Writing to ${targetFile}`)); From b68e9ab137791d16fb44aa4128440313b1db9083 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:46 +0100 Subject: [PATCH 73/83] refactor(cli): extract and enhance name casing logic --- packages/cli/src/actions/pull/casing.ts | 43 ++++++++ packages/cli/src/index.ts | 4 +- packages/cli/test/casing.test.ts | 130 ++++++++++++++++++++++++ 3 files changed, 175 insertions(+), 2 deletions(-) create mode 100644 packages/cli/src/actions/pull/casing.ts create mode 100644 packages/cli/test/casing.test.ts diff --git a/packages/cli/src/actions/pull/casing.ts b/packages/cli/src/actions/pull/casing.ts new file mode 100644 index 000000000..5e0846bc9 --- /dev/null +++ b/packages/cli/src/actions/pull/casing.ts @@ -0,0 +1,43 @@ +export function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'none', originalName: string) { + let name = originalName; + const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; + + switch (casing) { + case 'pascal': + name = toPascalCase(originalName); + break; + case 'camel': + name = toCamelCase(originalName); + break; + case 'snake': + name = toSnakeCase(originalName); + break; + } + + return { + modified: name !== originalName || fieldPrefix !== '', + name: `${fieldPrefix}${name}`, + }; +} + +function isAllUpperCase(str: string): boolean { + return str === str.toUpperCase(); +} + +export function toPascalCase(str: string): string { + if (isAllUpperCase(str)) return str; + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase()); +} + +export function toCamelCase(str: string): string { + if (isAllUpperCase(str)) return str; + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase()); +} + +export function toSnakeCase(str: string): string { + if (isAllUpperCase(str)) return str; + return str + .replace(/[- ]+/g, '_') + .replace(/([a-z0-9])([A-Z])/g, '$1_$2') + .toLowerCase(); +} diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 7d4f62d32..6e1daafec 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -156,12 +156,12 @@ function createProgram() { ) .addOption( new Option('--model-casing ', 'set the casing of generated models').default( - 'none', + 'pascal', ), ) .addOption( new Option('--field-casing ', 'set the casing of generated fields').default( - 'none', + 'camel', ), ) .addOption( diff --git a/packages/cli/test/casing.test.ts b/packages/cli/test/casing.test.ts new file mode 100644 index 000000000..60b9d1c88 --- /dev/null +++ b/packages/cli/test/casing.test.ts @@ -0,0 +1,130 @@ +import { describe, expect, it } from 'vitest'; +import { resolveNameCasing, toPascalCase, toCamelCase, toSnakeCase } from '../src/actions/pull/casing'; + +describe('toPascalCase', () => { + it('converts snake_case', () => { + expect(toPascalCase('user_status')).toBe('UserStatus'); + expect(toPascalCase('first_name')).toBe('FirstName'); + }); + + it('converts kebab-case', () => { + expect(toPascalCase('user-status')).toBe('UserStatus'); + }); + + it('capitalizes first char of lowercase', () => { + expect(toPascalCase('user')).toBe('User'); + }); + + it('preserves already PascalCase', () => { + expect(toPascalCase('UserStatus')).toBe('UserStatus'); + }); + + it('preserves all-uppercase strings', () => { + expect(toPascalCase('ACTIVE')).toBe('ACTIVE'); + expect(toPascalCase('USER')).toBe('USER'); + expect(toPascalCase('MODERATOR')).toBe('MODERATOR'); + expect(toPascalCase('SET_NULL')).toBe('SET_NULL'); + expect(toPascalCase('NO_ACTION')).toBe('NO_ACTION'); + }); + + it('converts mixed snake_case with uppercase', () => { + expect(toPascalCase('User_status')).toBe('UserStatus'); + }); +}); + +describe('toCamelCase', () => { + it('converts snake_case', () => { + expect(toCamelCase('user_status')).toBe('userStatus'); + expect(toCamelCase('first_name')).toBe('firstName'); + }); + + it('converts kebab-case', () => { + expect(toCamelCase('user-status')).toBe('userStatus'); + }); + + it('lowercases first char of PascalCase', () => { + expect(toCamelCase('User')).toBe('user'); + expect(toCamelCase('Post')).toBe('post'); + }); + + it('preserves already camelCase', () => { + expect(toCamelCase('userStatus')).toBe('userStatus'); + }); + + it('preserves all-uppercase strings', () => { + expect(toCamelCase('ACTIVE')).toBe('ACTIVE'); + expect(toCamelCase('INACTIVE')).toBe('INACTIVE'); + expect(toCamelCase('SUSPENDED')).toBe('SUSPENDED'); + expect(toCamelCase('USER')).toBe('USER'); + expect(toCamelCase('SET_NULL')).toBe('SET_NULL'); + expect(toCamelCase('NO_ACTION')).toBe('NO_ACTION'); + }); +}); + +describe('toSnakeCase', () => { + it('converts camelCase', () => { + expect(toSnakeCase('userStatus')).toBe('user_status'); + expect(toSnakeCase('firstName')).toBe('first_name'); + }); + + it('converts PascalCase', () => { + expect(toSnakeCase('UserStatus')).toBe('user_status'); + }); + + it('converts kebab-case', () => { + expect(toSnakeCase('user-status')).toBe('user_status'); + }); + + it('preserves already snake_case', () => { + expect(toSnakeCase('user_status')).toBe('user_status'); + }); + + it('preserves all-uppercase strings', () => { + expect(toSnakeCase('ACTIVE')).toBe('ACTIVE'); + expect(toSnakeCase('INACTIVE')).toBe('INACTIVE'); + expect(toSnakeCase('SUSPENDED')).toBe('SUSPENDED'); + expect(toSnakeCase('SET_NULL')).toBe('SET_NULL'); + expect(toSnakeCase('NO_ACTION')).toBe('NO_ACTION'); + }); +}); + +describe('resolveNameCasing', () => { + it('applies pascal casing', () => { + expect(resolveNameCasing('pascal', 'user_status')).toEqual({ modified: true, name: 'UserStatus' }); + expect(resolveNameCasing('pascal', 'User')).toEqual({ modified: false, name: 'User' }); + }); + + it('applies camel casing', () => { + expect(resolveNameCasing('camel', 'User')).toEqual({ modified: true, name: 'user' }); + expect(resolveNameCasing('camel', 'first_name')).toEqual({ modified: true, name: 'firstName' }); + }); + + it('applies snake casing', () => { + expect(resolveNameCasing('snake', 'UserStatus')).toEqual({ modified: true, name: 'user_status' }); + expect(resolveNameCasing('snake', 'user_status')).toEqual({ modified: false, name: 'user_status' }); + }); + + it('preserves name with none casing', () => { + expect(resolveNameCasing('none', 'User_status')).toEqual({ modified: false, name: 'User_status' }); + expect(resolveNameCasing('none', 'ACTIVE')).toEqual({ modified: false, name: 'ACTIVE' }); + }); + + it('preserves all-uppercase enum values across all casings', () => { + expect(resolveNameCasing('pascal', 'ACTIVE')).toEqual({ modified: false, name: 'ACTIVE' }); + expect(resolveNameCasing('camel', 'ACTIVE')).toEqual({ modified: false, name: 'ACTIVE' }); + expect(resolveNameCasing('snake', 'ACTIVE')).toEqual({ modified: false, name: 'ACTIVE' }); + expect(resolveNameCasing('none', 'ACTIVE')).toEqual({ modified: false, name: 'ACTIVE' }); + }); + + it('preserves all-uppercase enum values with underscores across all casings', () => { + expect(resolveNameCasing('pascal', 'SET_NULL')).toEqual({ modified: false, name: 'SET_NULL' }); + expect(resolveNameCasing('camel', 'SET_NULL')).toEqual({ modified: false, name: 'SET_NULL' }); + expect(resolveNameCasing('snake', 'SET_NULL')).toEqual({ modified: false, name: 'SET_NULL' }); + expect(resolveNameCasing('none', 'SET_NULL')).toEqual({ modified: false, name: 'SET_NULL' }); + }); + + it('prefixes names starting with a digit', () => { + expect(resolveNameCasing('none', '1foo')).toEqual({ modified: true, name: '_1foo' }); + expect(resolveNameCasing('camel', '1foo')).toEqual({ modified: true, name: '_1foo' }); + }); +}); From 3f0b6f2d3a6a728067bbb2f775605fe84481362b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:48 +0100 Subject: [PATCH 74/83] refactor(cli): consolidate default value normalization --- .../cli/src/actions/pull/provider/mysql.ts | 54 +------------------ .../src/actions/pull/provider/postgresql.ts | 30 +---------- .../cli/src/actions/pull/provider/sqlite.ts | 28 +--------- packages/cli/src/actions/pull/utils.ts | 37 +++++++++++++ 4 files changed, 43 insertions(+), 106 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index f59d3790d..4c041f80b 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -228,73 +228,23 @@ export const mysql: IntrospectionProvider = { return (ab) => ab.NumberLiteral.setValue(val); case 'Float': - // Integer strings: append '.0' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.0'); - } - // Decimal strings: preserve exactly to avoid parseFloat precision loss - if (/^-?\d+\.\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeFloatDefault(val); case 'Decimal': - // Integer strings: append '.00' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.00'); - } - // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros - if (/^-?\d+\.\d+$/.test(val)) { - const [integerPart, fractionalPart] = val.split('.'); - // Strip trailing zeros, but keep at least 2 digits - let normalized = fractionalPart!.replace(/0+$/, ''); - if (normalized.length < 2) { - normalized = normalized.padEnd(2, '0'); - } - return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeDecimalDefault(val); case 'Boolean': return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === 'true' || val === '1' || val === "b'1'"); case 'String': - if (val.startsWith("'") && val.endsWith("'")) { - const strippedValue = val.slice(1, -1).replace(/''/g, "'"); - const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); - if (enumDef) { - const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); - if (enumField) { - return (ab) => ab.ReferenceExpr.setTarget(enumField); - } - } - return (ab) => ab.StringLiteral.setValue(strippedValue); - } if (val.toLowerCase() === 'uuid()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); } return (ab) => ab.StringLiteral.setValue(val); } - if (val.startsWith("'") && val.endsWith("'")) { - const strippedValue = val.slice(1, -1).replace(/''/g, "'"); - const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); - if (enumDef) { - const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); - if (enumField) { - return (ab) => ab.ReferenceExpr.setTarget(enumField); - } - } - return (ab) => ab.StringLiteral.setValue(strippedValue); - } - // Handle function calls (e.g., uuid(), now()) if (val.includes('(') && val.includes(')')) { - if (val.toLowerCase() === 'uuid()') { - return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); - } return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)), diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 00bf97c82..b66c19a0c 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -264,39 +264,13 @@ export const postgresql: IntrospectionProvider = { if (val.includes('::')) { return typeCastingConvert({defaultValue,enums,val,services}); } - - // Integer strings: append '.0' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.0'); - } - // Decimal strings: preserve exactly to avoid parseFloat precision loss - if (/^-?\d+\.\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeFloatDefault(val); case 'Decimal': if (val.includes('::')) { return typeCastingConvert({defaultValue,enums,val,services}); } - - // Integer strings: append '.00' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.00'); - } - // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros - if (/^-?\d+\.\d+$/.test(val)) { - const [integerPart, fractionalPart] = val.split('.'); - // Strip trailing zeros, but keep at least 2 digits - let normalized = fractionalPart!.replace(/0+$/, ''); - if (normalized.length < 2) { - normalized = normalized.padEnd(2, '0'); - } - return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeDecimalDefault(val); case 'Boolean': return (ab) => ab.BooleanLiteral.setValue(val === 'true'); diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 044b287c0..a7b6eeade 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -329,34 +329,10 @@ export const sqlite: IntrospectionProvider = { return (ab) => ab.NumberLiteral.setValue(val); case 'Float': - // Integer strings: append '.0' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.0'); - } - // Decimal strings: preserve exactly to avoid parseFloat precision loss - if (/^-?\d+\.\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeFloatDefault(val); case 'Decimal': - // Integer strings: append '.00' - if (/^-?\d+$/.test(val)) { - return (ab) => ab.NumberLiteral.setValue(val + '.00'); - } - // Decimal strings: normalize to minimum 2 decimal places, strip excess trailing zeros - if (/^-?\d+\.\d+$/.test(val)) { - const [integerPart, fractionalPart] = val.split('.'); - // Strip trailing zeros, but keep at least 2 digits - let normalized = fractionalPart!.replace(/0+$/, ''); - if (normalized.length < 2) { - normalized = normalized.padEnd(2, '0'); - } - return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); - } - // Other values: return unchanged - return (ab) => ab.NumberLiteral.setValue(val); + return normalizeDecimalDefault(val); case 'Boolean': return (ab) => ab.BooleanLiteral.setValue(val === 'true' || val === '1'); diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index b3f6c4880..58b879908 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -5,6 +5,7 @@ import { type DataModel, type Enum, type EnumField, + type Expression, type FunctionDecl, isInvocationExpr, type Attribute, @@ -12,6 +13,7 @@ import { type ReferenceExpr, type StringLiteral, } from '@zenstackhq/language/ast'; +import type { AstFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/schema'; import type { Reference } from 'langium'; @@ -170,3 +172,38 @@ export function getAttributeRef(name: string, services: ZModelServices) { export function getFunctionRef(name: string, services: ZModelServices) { return getDeclarationRef('FunctionDecl', name, services); } + +/** + * Normalize a default value string for a Float field. + * - Integer strings get `.0` appended + * - Decimal strings are preserved as-is + */ +export function normalizeFloatDefault(val: string): (ab: ExpressionBuilder) => AstFactory { + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + if (/^-?\d+\.\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + return (ab) => ab.NumberLiteral.setValue(val); +} + +/** + * Normalize a default value string for a Decimal field. + * - Integer strings get `.00` appended + * - Decimal strings are normalized to minimum 2 decimal places, stripping excess trailing zeros + */ +export function normalizeDecimalDefault(val: string): (ab: ExpressionBuilder) => AstFactory { + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + if (/^-?\d+\.\d+$/.test(val)) { + const [integerPart, fractionalPart] = val.split('.'); + let normalized = fractionalPart!.replace(/0+$/, ''); + if (normalized.length < 2) { + normalized = normalized.padEnd(2, '0'); + } + return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`); + } + return (ab) => ab.NumberLiteral.setValue(val); +} From d03c2bc101ea20ae5813baa9017e3a67120c9c40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:50 +0100 Subject: [PATCH 75/83] feat(cli): improve enum syncing and relation naming during pull --- packages/cli/src/actions/pull/index.ts | 112 ++++++------------------- 1 file changed, 24 insertions(+), 88 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index ac96b359f..a1239af76 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -13,9 +13,12 @@ import { DataModelFactory, EnumFactory, } from '@zenstackhq/language/factory'; +import { AstUtils, type Reference, type AstNode, type CstNode } from 'langium'; +import { lowerCaseFirst } from '@zenstackhq/common-helpers'; import type { PullOptions } from '../db'; import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; +import { resolveNameCasing } from './casing'; import { CliError } from '../../cli-error'; export function syncEnums({ @@ -73,81 +76,22 @@ export function syncEnums({ model.declarations.push(factory.get({ $container: model })); } } else { + // For providers that don't support native enums (e.g., SQLite), carry over + // enum declarations from the existing schema as-is by deep-cloning the AST nodes. + // A dummy buildReference is used since we don't need cross-reference resolution. + const dummyBuildReference = (_node: AstNode, _property: string, _refNode: CstNode | undefined, refText: string): Reference => + ({ $refText: refText }) as Reference; + oldModel.declarations .filter((d) => isEnum(d)) .forEach((d) => { - const factory = new EnumFactory().setName(d.name); - // Copy enum-level comments - if (d.comments?.length) { - factory.update({ comments: [...d.comments] }); - } - // Copy enum-level attributes (@@map, @@schema, etc.) - // Re-parent attributes to the new factory node - if (d.attributes?.length) { - const reparentedAttrs = d.attributes.map((attr) => ({ ...attr, $container: factory.node })); - factory.update({ attributes: reparentedAttrs }); - } - // Copy fields with their attributes and comments - d.fields.forEach((v) => { - factory.addField((builder) => { - builder.setName(v.name); - // Copy field-level comments - if (v.comments?.length) { - v.comments.forEach((c) => { - builder.addComment(c); - }); - } - // Copy field-level attributes (@map, etc.) - // Re-parent attributes to the new builder node - if (v.attributes?.length) { - const reparentedAttrs = v.attributes.map((attr) => ({ ...attr, $container: builder.node })); - builder.update({ attributes: reparentedAttrs }); - } - return builder; - }); - }); - model.declarations.push(factory.get({ $container: model })); + const copy = AstUtils.copyAstNode(d, dummyBuildReference); + (copy as { $container: unknown }).$container = model; + model.declarations.push(copy); }); } } -function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'none', originalName: string) { - let name = originalName; - const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; - - switch (casing) { - case 'pascal': - name = toPascalCase(originalName); - break; - case 'camel': - name = toCamelCase(originalName); - break; - case 'snake': - name = toSnakeCase(originalName); - break; - } - - return { - modified: name !== originalName || fieldPrefix !== '', - name: `${fieldPrefix}${name}`, - }; -} - -function toPascalCase(str: string): string { - return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase()); -} - -function toCamelCase(str: string): string { - return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase()); -} - -function toSnakeCase(str: string): string { - return str - .replace(/[- ]+/g, '_') - .replace(/([a-z0-9])([A-Z])/g, '$1_$2') - .toLowerCase(); -} - export type Relation = { schema: string; table: string; @@ -185,24 +129,10 @@ export function syncTable({ const modelIdAttribute = getAttributeRef('@@id', services); const uniqueAttribute = getAttributeRef('@unique', services); const modelUniqueAttribute = getAttributeRef('@@unique', services); - const relationAttribute = getAttributeRef('@relation', services); const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); const modelindexAttribute = getAttributeRef('@@index', services); - if ( - !idAttribute || - !uniqueAttribute || - !relationAttribute || - !fieldMapAttribute || - !tableMapAttribute || - !modelIdAttribute || - !modelUniqueAttribute || - !modelindexAttribute - ) { - throw new CliError('Cannot find required attributes in the model.'); - } - const relations: Relation[] = []; const { name, modified } = resolveNameCasing(options.modelCasing, table.name); const multiPk = table.columns.filter((c) => c.pk).length > 1; @@ -483,19 +413,25 @@ export function syncRelation({ const relationName = `${relation.table}${similarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; - const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; + // Derive a relation field name from the FK scalar field: if the field ends with "Id", + // strip the suffix and use the remainder (e.g., "authorId" -> "author"). + const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing(options.fieldCasing, sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; + // Check if the derived name would clash with an existing field const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference); + // Determine the relation field name: + // - For ambiguous relations (multiple FKs to the same table), include the source column for disambiguation. + // - Otherwise, prefer the name derived from the FK field (if no clash), falling back to the target model name. let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, similarRelations > 0 - ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || resolveNameCasing("camel", targetModel.name).name}${relation.type === 'many'? 's' : ''}`, + ? `${fieldPrefix}${lowerCaseFirst(sourceModel.name)}_${relation.column}` + : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || lowerCaseFirst(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === 'many'? 's' : ''}`, ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { - sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; + sourceFieldName = `${sourceFieldName}To${lowerCaseFirst(targetModel.name)}_${relation.references.column}`; } const sourceFieldFactory = new DataFieldFactory() @@ -549,8 +485,8 @@ export function syncRelation({ const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, similarRelations > 0 - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : `${resolveNameCasing("camel", sourceModel.name).name}${relation.references.type === 'many'? 's' : ''}`, + ? `${oppositeFieldPrefix}${lowerCaseFirst(sourceModel.name)}_${relation.column}` + : `${lowerCaseFirst(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === 'many'? 's' : ''}`, ); const targetFieldFactory = new DataFieldFactory() From 2922d6b45da6a85454c1b281a4900fff20eb5c56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:51 +0100 Subject: [PATCH 76/83] docs(cli): add documentation comments to SQL introspection queries --- .../cli/src/actions/pull/provider/mysql.ts | 149 ++++++++----- .../src/actions/pull/provider/postgresql.ts | 197 ++++++++++++------ .../cli/src/actions/pull/provider/provider.ts | 1 - .../cli/src/actions/pull/provider/sqlite.ts | 30 ++- 4 files changed, 244 insertions(+), 133 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 4c041f80b..2b65d1594 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -1,6 +1,6 @@ import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; -import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import { getAttributeRef, getDbName, getFunctionRef, normalizeDecimalDefault, normalizeFloatDefault } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import { CliError } from '../../../cli-error'; @@ -139,19 +139,11 @@ export const mysql: IntrospectionProvider = { const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; // Sort columns by ordinal_position to preserve database column order - const sortedColumns = (columns || []) - .sort( - (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => - (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) - ) - .map((col: { options?: string | string[] | null }) => ({ - ...col, - // Parse enum options from COLUMN_TYPE if present (e.g., "enum('val1','val2')") - options: - typeof col.options === 'string' - ? parseEnumValues(col.options) - : col.options ?? [], - })); + const sortedColumns = (columns || []) + .sort( + (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => + (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) + ); // Filter out auto-generated FK indexes (MySQL creates these automatically) // Pattern: {Table}_{column}_fkey for single-column FK indexes @@ -291,126 +283,171 @@ export const mysql: IntrospectionProvider = { function getTableIntrospectionQuery(databaseName: string) { // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering - // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG - // MySQL doesn't support multi-schema, so we don't include schema in the result + // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG. + // MySQL doesn't support multi-schema, so we don't include schema in the result. return ` +-- Main query: one row per table/view with columns and indexes as nested JSON arrays. +-- Uses INFORMATION_SCHEMA which is MySQL's standard metadata catalog. SELECT - t.TABLE_NAME AS \`name\`, - CASE t.TABLE_TYPE + t.TABLE_NAME AS \`name\`, -- table or view name + CASE t.TABLE_TYPE -- map MySQL table type strings to our internal types WHEN 'BASE TABLE' THEN 'table' WHEN 'VIEW' THEN 'view' ELSE NULL END AS \`type\`, - CASE + CASE -- for views, retrieve the SQL definition WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION ELSE NULL END AS \`definition\`, + + -- ===== COLUMNS subquery ===== + -- Wraps an ordered subquery in JSON_ARRAYAGG to produce a JSON array of column objects. ( SELECT JSON_ARRAYAGG(col_json) FROM ( SELECT JSON_OBJECT( - 'ordinal_position', c.ORDINAL_POSITION, - 'name', c.COLUMN_NAME, + 'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting) + 'name', c.COLUMN_NAME, -- column name + + -- datatype: special-case tinyint(1) as 'boolean' (MySQL's boolean convention), + -- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime') 'datatype', CASE WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean' ELSE c.DATA_TYPE END, + + -- datatype_name: for enum columns, generate a synthetic name "TableName_ColumnName" + -- (MySQL doesn't have named enum types like PostgreSQL) 'datatype_name', CASE WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME) ELSE NULL END, - 'datatype_schema', '', - 'length', c.CHARACTER_MAXIMUM_LENGTH, - 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), - 'nullable', c.IS_NULLABLE = 'YES', + + 'datatype_schema', '', -- MySQL doesn't support multi-schema + 'length', c.CHARACTER_MAXIMUM_LENGTH, -- max length for string types (e.g., VARCHAR(255) -> 255) + 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), -- numeric or datetime precision + + 'nullable', c.IS_NULLABLE = 'YES', -- true if column allows NULL + + -- default: for auto_increment columns, report 'auto_increment' instead of NULL; + -- otherwise use the COLUMN_DEFAULT value 'default', CASE WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment' ELSE c.COLUMN_DEFAULT END, - 'pk', c.COLUMN_KEY = 'PRI', - 'unique', c.COLUMN_KEY = 'UNI', + + 'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key + 'unique', c.COLUMN_KEY = 'UNI', -- true if column has a unique constraint 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, + + -- computed: true if column has a generation expression (virtual or stored) 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', + + -- options: for enum columns, the full COLUMN_TYPE string (e.g., "enum('a','b','c')") + -- which gets parsed into individual values later 'options', CASE WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE ELSE NULL END, - 'foreign_key_schema', NULL, - 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, - 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, - 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, - 'foreign_key_on_update', rc.UPDATE_RULE, - 'foreign_key_on_delete', rc.DELETE_RULE + + -- Foreign key info (NULL if column is not part of a FK) + 'foreign_key_schema', NULL, -- MySQL doesn't support cross-schema FKs here + 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, -- referenced table + 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, -- referenced column + 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, -- FK constraint name + 'foreign_key_on_update', rc.UPDATE_RULE, -- referential action on update (CASCADE, SET NULL, etc.) + 'foreign_key_on_delete', rc.DELETE_RULE -- referential action on delete ) AS col_json - FROM INFORMATION_SCHEMA.COLUMNS c + + FROM INFORMATION_SCHEMA.COLUMNS c -- one row per column in the database + + -- Join KEY_COLUMN_USAGE to find foreign key references for this column. + -- Filter to only FK entries (REFERENCED_TABLE_NAME IS NOT NULL). LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA AND c.TABLE_NAME = kcu_fk.TABLE_NAME AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL + + -- Join REFERENTIAL_CONSTRAINTS to get ON UPDATE / ON DELETE rules for the FK. LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA AND c.TABLE_NAME = t.TABLE_NAME - ORDER BY c.ORDINAL_POSITION + ORDER BY c.ORDINAL_POSITION -- preserve original column order ) AS cols_ordered ) AS \`columns\`, + + -- ===== INDEXES subquery ===== + -- Aggregates all indexes for this table into a JSON array. ( SELECT JSON_ARRAYAGG(idx_json) FROM ( SELECT JSON_OBJECT( - 'name', s.INDEX_NAME, - 'method', s.INDEX_TYPE, - 'unique', s.NON_UNIQUE = 0, - 'primary', s.INDEX_NAME = 'PRIMARY', - 'valid', TRUE, - 'ready', TRUE, - 'partial', FALSE, - 'predicate', NULL, + 'name', s.INDEX_NAME, -- index name (e.g., 'PRIMARY', 'idx_email') + 'method', s.INDEX_TYPE, -- index type (e.g., 'BTREE', 'HASH', 'FULLTEXT') + 'unique', s.NON_UNIQUE = 0, -- NON_UNIQUE=0 means it IS unique + 'primary', s.INDEX_NAME = 'PRIMARY', -- MySQL names the PK index 'PRIMARY' + 'valid', TRUE, -- MySQL doesn't expose index validity status + 'ready', TRUE, -- MySQL doesn't expose index readiness status + 'partial', FALSE, -- MySQL doesn't support partial indexes + 'predicate', NULL, -- no WHERE clause on indexes in MySQL + + -- Index columns: nested subquery for columns in this index 'columns', ( SELECT JSON_ARRAYAGG(idx_col_json) FROM ( SELECT JSON_OBJECT( - 'name', s2.COLUMN_NAME, - 'expression', NULL, + 'name', s2.COLUMN_NAME, -- column name in the index + 'expression', NULL, -- MySQL doesn't expose expression indexes via STATISTICS + -- COLLATION: 'A' = ascending, 'D' = descending, NULL = not sorted 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END, - 'nulls', NULL + 'nulls', NULL -- MySQL doesn't expose NULLS FIRST/LAST ) AS idx_col_json - FROM INFORMATION_SCHEMA.STATISTICS s2 + FROM INFORMATION_SCHEMA.STATISTICS s2 -- one row per column per index WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA AND s2.TABLE_NAME = s.TABLE_NAME AND s2.INDEX_NAME = s.INDEX_NAME - ORDER BY s2.SEQ_IN_INDEX + ORDER BY s2.SEQ_IN_INDEX -- preserve column order within the index ) AS idx_cols_ordered ) ) AS idx_json FROM ( + -- Deduplicate: STATISTICS has one row per (index, column), but we need one row per index. + -- DISTINCT on INDEX_NAME gives us one entry per index with its metadata. SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.STATISTICS WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME ) s ) AS idxs_ordered ) AS \`indexes\` + +-- === Main FROM: INFORMATION_SCHEMA.TABLES lists all tables and views === FROM INFORMATION_SCHEMA.TABLES t +-- Join VIEWS to get VIEW_DEFINITION for view tables LEFT JOIN INFORMATION_SCHEMA.VIEWS v ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME -WHERE t.TABLE_SCHEMA = '${databaseName}' - AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') - AND t.TABLE_NAME <> '_prisma_migrations' +WHERE t.TABLE_SCHEMA = '${databaseName}' -- only the target database + AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW + AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table ORDER BY t.TABLE_NAME; `; } function getEnumIntrospectionQuery(databaseName: string) { + // MySQL doesn't have standalone enum types like PostgreSQL's CREATE TYPE. + // Instead, enum values are embedded in column definitions (e.g., COLUMN_TYPE = "enum('a','b','c')"). + // This query finds all enum columns so we can extract their allowed values. return ` SELECT - c.TABLE_NAME AS table_name, - c.COLUMN_NAME AS column_name, - c.COLUMN_TYPE AS column_type + c.TABLE_NAME AS table_name, -- table containing the enum column + c.COLUMN_NAME AS column_name, -- column name + c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')") FROM INFORMATION_SCHEMA.COLUMNS c -WHERE c.TABLE_SCHEMA = '${databaseName}' - AND c.DATA_TYPE = 'enum' +WHERE c.TABLE_SCHEMA = '${databaseName}' -- only the target database + AND c.DATA_TYPE = 'enum' -- only enum columns ORDER BY c.TABLE_NAME, c.COLUMN_NAME; `; } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index b66c19a0c..879a0b89d 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,7 +1,7 @@ import type { Attribute, BuiltinType, Enum, Expression } from '@zenstackhq/language/ast'; import { AstFactory, DataFieldAttributeFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; import { Client } from 'pg'; -import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import { getAttributeRef, getDbName, getFunctionRef, normalizeDecimalDefault, normalizeFloatDefault } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import type { ZModelServices } from '@zenstackhq/language'; import { CliError } from '../../../cli-error'; @@ -39,11 +39,11 @@ const standardTypePrecisions: Record = { }; /** - * Maps PostgreSQL typnames (from pg_type.typname) to Prisma native type attribute names. + * Maps PostgreSQL typnames (from pg_type.typname) to ZenStack native type attribute names. * PostgreSQL introspection returns internal type names like 'int2', 'int4', 'float8', 'bpchar', - * but Prisma/ZenStack attributes are named @db.SmallInt, @db.Integer, @db.DoublePrecision, @db.Char, etc. + * but ZenStack attributes are named @db.SmallInt, @db.Integer, @db.DoublePrecision, @db.Char, etc. */ -const pgTypnameToPrismaNativeType: Record = { +const pgTypnameToZenStackNativeType: Record = { // integers int2: 'SmallInt', smallint: 'SmallInt', @@ -305,10 +305,10 @@ export const postgresql: IntrospectionProvider = { factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } - // Map PostgreSQL typname to Prisma native type attribute name - // PostgreSQL returns typnames like 'int2', 'float8', 'bpchar', but Prisma attributes + // Map PostgreSQL typname to ZenStack native type attribute name + // PostgreSQL returns typnames like 'int2', 'float8', 'bpchar', but ZenStack attributes // are named @db.SmallInt, @db.DoublePrecision, @db.Char, etc. - const nativeTypeName = pgTypnameToPrismaNativeType[datatype.toLowerCase()] ?? datatype; + const nativeTypeName = pgTypnameToZenStackNativeType[datatype.toLowerCase()] ?? datatype; // Add @db.* attribute if the datatype differs from the default const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( @@ -345,40 +345,49 @@ export const postgresql: IntrospectionProvider = { const enumIntrospectionQuery = ` SELECT - n.nspname AS schema_name, - t.typname AS enum_type, - coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -FROM pg_type t -JOIN pg_enum e ON t.oid = e.enumtypid -JOIN pg_namespace n ON n.oid = t.typnamespace -GROUP BY schema_name, enum_type + n.nspname AS schema_name, -- schema the enum belongs to (e.g., 'public') + t.typname AS enum_type, -- enum type name as defined in CREATE TYPE + coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -- ordered list of enum labels as JSON array +FROM pg_type t -- pg_type: catalog of all data types +JOIN pg_enum e ON t.oid = e.enumtypid -- pg_enum: one row per enum label; join to get labels for this enum type +JOIN pg_namespace n ON n.oid = t.typnamespace -- pg_namespace: schema info; join to get the schema name +GROUP BY schema_name, enum_type -- one row per enum type, with all labels aggregated ORDER BY schema_name, enum_type;`; const tableIntrospectionQuery = ` +-- Main query: one row per table/view with columns and indexes as nested JSON arrays. +-- Joins pg_class (tables/views) with pg_namespace (schemas). SELECT - "ns"."nspname" AS "schema", - "cls"."relname" AS "name", - CASE "cls"."relkind" + "ns"."nspname" AS "schema", -- schema name (e.g., 'public') + "cls"."relname" AS "name", -- table or view name + CASE "cls"."relkind" -- relkind: 'r' = ordinary table, 'v' = view WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' ELSE NULL END AS "type", - CASE + CASE -- for views, retrieve the SQL definition WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true) ELSE NULL END AS "definition", + + -- ===== COLUMNS subquery ===== + -- Aggregates all columns for this table into a JSON array. ( SELECT coalesce(json_agg(agg), '[]') FROM ( SELECT - "att"."attname" AS "name", + "att"."attname" AS "name", -- column name + + -- datatype: if the type is an enum, report 'enum'; otherwise use the pg_type name CASE WHEN EXISTS ( SELECT 1 FROM "pg_catalog"."pg_enum" AS "e" WHERE "e"."enumtypid" = "typ"."oid" ) THEN 'enum' - ELSE "typ"."typname" + ELSE "typ"."typname" -- internal type name (e.g., 'int4', 'varchar', 'text') END AS "datatype", + + -- datatype_name: for enums only, the actual enum type name (used to look up the enum definition) CASE WHEN EXISTS ( SELECT 1 FROM "pg_catalog"."pg_enum" AS "e" @@ -386,13 +395,18 @@ SELECT ) THEN "typ"."typname" ELSE NULL END AS "datatype_name", - "tns"."nspname" AS "datatype_schema", - "c"."character_maximum_length" AS "length", - COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", - "fk_ns"."nspname" AS "foreign_key_schema", - "fk_cls"."relname" AS "foreign_key_table", - "fk_att"."attname" AS "foreign_key_column", - "fk_con"."conname" AS "foreign_key_name", + + "tns"."nspname" AS "datatype_schema", -- schema where the data type is defined + "c"."character_maximum_length" AS "length", -- max length for char/varchar types (from information_schema) + COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", -- numeric or datetime precision + + -- Foreign key info (NULL if column is not part of a FK constraint) + "fk_ns"."nspname" AS "foreign_key_schema", -- schema of the referenced table + "fk_cls"."relname" AS "foreign_key_table", -- referenced table name + "fk_att"."attname" AS "foreign_key_column", -- referenced column name + "fk_con"."conname" AS "foreign_key_name", -- FK constraint name + + -- FK referential actions: decode single-char codes to human-readable strings CASE "fk_con"."confupdtype" WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' @@ -409,27 +423,37 @@ SELECT WHEN 'd' THEN 'SET DEFAULT' ELSE NULL END AS "foreign_key_on_delete", + + -- pk: true if this column is part of the table's primary key constraint "pk_con"."conkey" IS NOT NULL AS "pk", + + -- unique: true if the column has a single-column UNIQUE constraint OR a single-column unique index ( + -- Check for a single-column UNIQUE constraint (contype = 'u') EXISTS ( SELECT 1 FROM "pg_catalog"."pg_constraint" AS "u_con" - WHERE "u_con"."contype" = 'u' - AND "u_con"."conrelid" = "cls"."oid" - AND array_length("u_con"."conkey", 1) = 1 - AND "att"."attnum" = ANY ("u_con"."conkey") + WHERE "u_con"."contype" = 'u' -- 'u' = unique constraint + AND "u_con"."conrelid" = "cls"."oid" -- on this table + AND array_length("u_con"."conkey", 1) = 1 -- single-column only + AND "att"."attnum" = ANY ("u_con"."conkey") -- this column is in the constraint ) - OR EXISTS ( + OR + -- Check for a single-column unique index (may exist without an explicit constraint) + EXISTS ( SELECT 1 FROM "pg_catalog"."pg_index" AS "u_idx" - WHERE "u_idx"."indrelid" = "cls"."oid" - AND "u_idx"."indisunique" = TRUE - AND "u_idx"."indnkeyatts" = 1 - AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + WHERE "u_idx"."indrelid" = "cls"."oid" -- on this table + AND "u_idx"."indisunique" = TRUE -- it's a unique index + AND "u_idx"."indnkeyatts" = 1 -- single key column + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) -- this column is the key ) ) AS "unique", + + -- unique_name: the name of the unique constraint or index (whichever exists first) ( SELECT COALESCE( + -- Try constraint name first ( SELECT "u_con"."conname" FROM "pg_catalog"."pg_constraint" AS "u_con" @@ -439,6 +463,7 @@ SELECT AND "att"."attnum" = ANY ("u_con"."conkey") LIMIT 1 ), + -- Fall back to unique index name ( SELECT "u_idx_cls"."relname" FROM "pg_catalog"."pg_index" AS "u_idx" @@ -451,9 +476,12 @@ SELECT ) ) ) AS "unique_name", - "att"."attgenerated" != '' AS "computed", - pg_get_expr("def"."adbin", "def"."adrelid") AS "default", - "att"."attnotnull" != TRUE AS "nullable", + + "att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column + pg_get_expr("def"."adbin", "def"."adrelid") AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()') + "att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values + + -- options: for enum columns, aggregates all allowed enum labels into a JSON array coalesce( ( SELECT json_agg("enm"."enumlabel") AS "o" @@ -463,76 +491,109 @@ SELECT '[]' ) AS "options" - FROM "pg_catalog"."pg_attribute" AS "att" + -- === FROM / JOINs for the columns subquery === + + -- pg_attribute: one row per table column (attnum >= 0 excludes system columns) + FROM "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + -- pg_type: data type of the column (e.g., int4, text, custom_enum) + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + -- pg_namespace for the type: needed to determine which schema the type lives in + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname" - AND "c"."table_name" = "cls"."relname" - AND "c"."column_name" = "att"."attname" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + -- information_schema.columns: provides length/precision info not easily available from pg_catalog + LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname" + AND "c"."table_name" = "cls"."relname" + AND "c"."column_name" = "att"."attname" + -- pg_constraint (primary key): join on contype='p' to detect if column is part of PK + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' AND "pk_con"."conrelid" = "cls"."oid" AND "att"."attnum" = ANY ("pk_con"."conkey") + + -- pg_constraint (foreign key): join on contype='f' to get FK details for this column LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' AND "fk_con"."conrelid" = "cls"."oid" AND "att"."attnum" = ANY ("fk_con"."conkey") + + -- pg_class for FK target table: resolve the referenced table's OID to its name LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + + -- pg_namespace for FK target: get the schema of the referenced table LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + + -- pg_attribute for FK target column: resolve the referenced column number to its name LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + + -- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr() LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum" + WHERE - "att"."attrelid" = "cls"."oid" - AND "att"."attnum" >= 0 - AND "att"."attisdropped" != TRUE - ORDER BY "att"."attnum" + "att"."attrelid" = "cls"."oid" -- only columns belonging to this table + AND "att"."attnum" >= 0 -- exclude system columns (ctid, xmin, etc. have attnum < 0) + AND "att"."attisdropped" != TRUE -- exclude dropped (deleted) columns + ORDER BY "att"."attnum" -- preserve original column order ) AS agg ) AS "columns", + + -- ===== INDEXES subquery ===== + -- Aggregates all indexes for this table into a JSON array. ( SELECT coalesce(json_agg(agg), '[]') FROM ( SELECT - "idx_cls"."relname" AS "name", - "am"."amname" AS "method", - "idx"."indisunique" AS "unique", - "idx"."indisprimary" AS "primary", - "idx"."indisvalid" AS "valid", - "idx"."indisready" AS "ready", - ("idx"."indpred" IS NOT NULL) AS "partial", - pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", + "idx_cls"."relname" AS "name", -- index name + "am"."amname" AS "method", -- access method (e.g., 'btree', 'hash', 'gin', 'gist') + "idx"."indisunique" AS "unique", -- true if unique index + "idx"."indisprimary" AS "primary", -- true if this is the PK index + "idx"."indisvalid" AS "valid", -- false during concurrent index builds + "idx"."indisready" AS "ready", -- true when index is ready for inserts + ("idx"."indpred" IS NOT NULL) AS "partial", -- true if index has a WHERE clause (partial index) + pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", -- the WHERE clause expression for partial indexes + + -- Index columns: iterate over each position in the index key array ( SELECT json_agg( json_build_object( + -- 'name': column name, or for expression indexes the expression text 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)), + -- 'expression': non-null only for expression-based index columns (e.g., lower(name)) 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END, + -- 'order': sort direction; bit 0 of indoption = 1 means DESC 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END, + -- 'nulls': null ordering; bit 1 of indoption = 1 means NULLS FIRST 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END ) - ORDER BY "s"."i" + ORDER BY "s"."i" -- preserve column order within the index ) + -- generate_subscripts creates one row per index key position (1-based) FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i") + -- Join to pg_attribute to resolve column numbers to names + -- NULL attname means it's an expression index column LEFT JOIN "pg_catalog"."pg_attribute" AS "att" ON "att"."attrelid" = "cls"."oid" AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"] ) AS "columns" - FROM "pg_catalog"."pg_index" AS "idx" - JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" - JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" - WHERE "idx"."indrelid" = "cls"."oid" + + FROM "pg_catalog"."pg_index" AS "idx" -- pg_index: one row per index + JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" -- index's own pg_class entry (for the name) + JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" -- access method catalog + WHERE "idx"."indrelid" = "cls"."oid" -- only indexes on this table ORDER BY "idx_cls"."relname" ) AS agg ) AS "indexes" + +-- === Main FROM: pg_class (tables and views) joined with pg_namespace (schemas) === FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE - "ns"."nspname" !~ '^pg_' - AND "ns"."nspname" != 'information_schema' - AND "cls"."relkind" IN ('r', 'v') - AND "cls"."relname" !~ '^pg_' - AND "cls"."relname" !~ '_prisma_migrations' + "ns"."nspname" !~ '^pg_' -- exclude PostgreSQL internal schemas (pg_catalog, pg_toast, etc.) + AND "ns"."nspname" != 'information_schema' -- exclude the information_schema + AND "cls"."relkind" IN ('r', 'v') -- only tables ('r') and views ('v') + AND "cls"."relname" !~ '^pg_' -- exclude system tables starting with pg_ + AND "cls"."relname" !~ '_prisma_migrations' -- exclude Prisma migration tracking table ORDER BY "ns"."nspname", "cls"."relname" ASC; `; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 01cb28e61..03e44eae0 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -25,7 +25,6 @@ export interface IntrospectedTable { pk: boolean; computed: boolean; nullable: boolean; - options: string[]; unique: boolean; unique_name: string | null; default: string | null; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index a7b6eeade..5f7e914fe 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,5 +1,5 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; -import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import { getAttributeRef, getDbName, getFunctionRef, normalizeDecimalDefault, normalizeFloatDefault } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid @@ -128,7 +128,10 @@ export const sqlite: IntrospectionProvider = { return stmt.all() as T[]; }; - // List user tables and views (exclude internal sqlite_*) + // List user tables and views from sqlite_schema (the master catalog). + // sqlite_schema contains one row per table, view, index, and trigger. + // We filter to only tables/views and exclude internal sqlite_* objects. + // The 'sql' column contains the original CREATE TABLE/VIEW statement. const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", ); @@ -156,7 +159,9 @@ export const sqlite: IntrospectionProvider = { // Check if this table has autoincrement (via sqlite_sequence) const hasAutoIncrement = autoIncrementTables.has(tableName); - // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) + // PRAGMA table_xinfo: extended version of table_info that also includes hidden/generated columns. + // Returns one row per column with: cid (column index), name, type, notnull, dflt_value, pk. + // hidden: 0 = normal column, 1 = hidden/internal (e.g., rowid), 2 = generated/computed column. const columnsInfo = all<{ cid: number; name: string; @@ -167,7 +172,10 @@ export const sqlite: IntrospectionProvider = { hidden?: number; }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`); - // Index list (used for both unique inference and index collection) + // PRAGMA index_list: returns all indexes on a table. + // Each row has: seq (index sequence), name, unique (1 if unique), origin ('c'=CREATE INDEX, + // 'u'=UNIQUE constraint, 'pk'=PRIMARY KEY), partial (1 if partial index). + // We exclude sqlite_autoindex_* entries which are auto-generated for UNIQUE constraints. const tableNameEsc = tableName.replace(/'/g, "''"); const idxList = all<{ seq: number; @@ -177,7 +185,9 @@ export const sqlite: IntrospectionProvider = { partial: number; }>(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith('sqlite_autoindex_')); - // Unique columns detection via unique indexes with single column + // Detect single-column unique constraints by inspecting each unique index. + // PRAGMA index_info: returns the columns that make up an index. + // If a unique (non-partial) index has exactly one column, that column is "unique". const uniqueSingleColumn = new Set(); const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1); for (const idx of uniqueIndexRows) { @@ -187,7 +197,9 @@ export const sqlite: IntrospectionProvider = { } } - // Indexes details + // Build detailed index info for each index. + // PRAGMA index_info returns one row per column in the index. + // SQLite doesn't expose access method, predicate, or sort order through PRAGMAs. const indexes: IntrospectedTable['indexes'] = idxList.map((idx) => { const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); return { @@ -208,7 +220,10 @@ export const sqlite: IntrospectionProvider = { }; }); - // Foreign keys mapping by column name + // PRAGMA foreign_key_list: returns all foreign key constraints on a table. + // Each row represents one column in a FK constraint with: id (FK id, shared by multi-column FKs), + // seq (column index within the FK), table (referenced table), from (local column), + // to (referenced column), on_update, on_delete (referential actions). const fkRows = all<{ id: number; seq: number; @@ -293,7 +308,6 @@ export const sqlite: IntrospectionProvider = { computed: hidden === 2, nullable: c.notnull !== 1, default: defaultValue, - options: [], unique: uniqueSingleColumn.has(c.name), unique_name: null, }); From 62cdd735fb172080b139bb006a0fd40229f0dd95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 14:49:52 +0100 Subject: [PATCH 77/83] test(cli): refactor test utilities and modernize test suites --- packages/cli/test/check.test.ts | 24 +++++----- packages/cli/test/db.test.ts | 16 +++---- packages/cli/test/db/pull.test.ts | 48 +++++++++---------- packages/cli/test/db/push.test.ts | 4 +- packages/cli/test/format.test.ts | 8 ++-- packages/cli/test/generate.test.ts | 28 +++++------ packages/cli/test/migrate.test.ts | 24 +++++----- .../cli/test/plugins/custom-plugin.test.ts | 4 +- .../cli/test/plugins/prisma-plugin.test.ts | 20 ++++---- packages/cli/test/utils.ts | 24 ++++------ 10 files changed, 96 insertions(+), 104 deletions(-) diff --git a/packages/cli/test/check.test.ts b/packages/cli/test/check.test.ts index 60f80903e..99d31ecda 100644 --- a/packages/cli/test/check.test.ts +++ b/packages/cli/test/check.test.ts @@ -36,37 +36,37 @@ model Post { `; describe('CLI validate command test', () => { - it('should validate a valid schema successfully', () => { - const workDir = createProject(validModel); + it('should validate a valid schema successfully', async () => { + const { workDir } = await createProject(validModel); // Should not throw an error expect(() => runCli('check', workDir)).not.toThrow(); }); - it('should fail validation for invalid schema', () => { - const workDir = createProject(invalidModel); + it('should fail validation for invalid schema', async () => { + const { workDir } = await createProject(invalidModel); // Should throw an error due to validation failure expect(() => runCli('check', workDir)).toThrow(); }); - it('should respect custom schema location', () => { - const workDir = createProject(validModel); + it('should respect custom schema location', async () => { + const { workDir } = await createProject(validModel); fs.renameSync(path.join(workDir, 'zenstack/schema.zmodel'), path.join(workDir, 'zenstack/custom.zmodel')); // Should not throw an error when using custom schema path expect(() => runCli('check --schema ./zenstack/custom.zmodel', workDir)).not.toThrow(); }); - it('should fail when schema file does not exist', () => { - const workDir = createProject(validModel); + it('should fail when schema file does not exist', async () => { + const { workDir } = await createProject(validModel); // Should throw an error when schema file doesn't exist expect(() => runCli('check --schema ./nonexistent.zmodel', workDir)).toThrow(); }); - it('should respect package.json config', () => { - const workDir = createProject(validModel); + it('should respect package.json config', async () => { + const { workDir } = await createProject(validModel); fs.mkdirSync(path.join(workDir, 'foo')); fs.renameSync(path.join(workDir, 'zenstack/schema.zmodel'), path.join(workDir, 'foo/schema.zmodel')); fs.rmdirSync(path.join(workDir, 'zenstack')); @@ -81,14 +81,14 @@ describe('CLI validate command test', () => { expect(() => runCli('check', workDir)).not.toThrow(); }); - it('should validate schema with syntax errors', () => { + it('should validate schema with syntax errors', async () => { const modelWithSyntaxError = ` model User { id String @id @default(cuid()) email String @unique // Missing closing brace - syntax error `; - const workDir = createProject(modelWithSyntaxError); + const { workDir } = await createProject(modelWithSyntaxError); // Should throw an error due to syntax error expect(() => runCli('check', workDir)).toThrow(); diff --git a/packages/cli/test/db.test.ts b/packages/cli/test/db.test.ts index b17f92e5e..b5b76d4fa 100644 --- a/packages/cli/test/db.test.ts +++ b/packages/cli/test/db.test.ts @@ -10,14 +10,14 @@ model User { `; describe('CLI db commands test', () => { - it('should generate a database with db push', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should generate a database with db push', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); - it('should seed the database with db seed with seed script', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should seed the database with db seed with seed script', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -35,8 +35,8 @@ fs.writeFileSync('seed.txt', 'success'); expect(fs.readFileSync(path.join(workDir, 'seed.txt'), 'utf8')).toBe('success'); }); - it('should seed the database after migrate reset', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should seed the database after migrate reset', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -54,8 +54,8 @@ fs.writeFileSync('seed.txt', 'success'); expect(fs.readFileSync(path.join(workDir, 'seed.txt'), 'utf8')).toBe('success'); }); - it('should skip seeding the database without seed script', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should skip seeding the database without seed script', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('db seed', workDir); }); }); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 367ba1348..4d6adf203 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,7 +1,7 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createFormattedProject, createProject, getDefaultPrelude, runCli } from '../utils'; +import { createProject, getDefaultPrelude, runCli } from '../utils'; import { formatDocument } from '@zenstackhq/language'; import { getTestDbProvider } from '@zenstackhq/testtools'; @@ -10,7 +10,7 @@ const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenst describe('DB pull - Common features (all providers)', () => { describe('Pull from zero - restore complete schema from database', () => { it('should restore basic schema with all supported types', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -43,7 +43,7 @@ describe('DB pull - Common features (all providers)', () => { }); it('should restore schema with relations', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model Post { id Int @id @default(autoincrement()) title String @@ -69,7 +69,7 @@ model User { }); it('should restore schema with many-to-many relations', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model Post { id Int @id @default(autoincrement()) title String @@ -103,7 +103,7 @@ model Tag { }); it('should restore one-to-one relation when FK is the single-column primary key', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model Profile { user User @relation(fields: [id], references: [id], onDelete: Cascade) id Int @id @default(autoincrement()) @@ -128,7 +128,7 @@ model User { }); it('should restore schema with indexes and unique constraints', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -155,7 +155,7 @@ model User { }); it('should restore schema with composite primary keys', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model UserRole { userId String role String @@ -176,7 +176,7 @@ model User { }); it('should preserve Decimal and Float default value precision', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model Product { id Int @id @default(autoincrement()) price Decimal @default(99.99) @@ -202,7 +202,7 @@ model User { describe('Pull with existing schema - preserve schema features', () => { it('should preserve field and table mappings', async () => { - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @map('email_address') @@ -220,7 +220,7 @@ model User { }); it('should not modify a comprehensive schema with all features', async () => { - const { workDir, schema } = await createFormattedProject(`model User { + const { workDir, schema } = await createProject(`model User { id Int @id @default(autoincrement()) email String @unique @map('email_address') name String? @default('Anonymous') @@ -308,7 +308,7 @@ enum users_role { }); it('should preserve imports when pulling with multi-file schema', async () => { - const workDir = createProject('', { customPrelude: true }); + const { workDir } = await createProject('', { customPrelude: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); const modelsDir = path.join(workDir, 'zenstack/models'); @@ -364,7 +364,7 @@ model Post { describe('Pull should update existing field definitions when database changes', () => { it('should update field type when database column type changes', async () => { // Step 1: Create initial schema with String field - const { workDir } = await createFormattedProject( + const { workDir } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -405,7 +405,7 @@ model User { it('should update field optionality when database column nullability changes', async () => { // Step 1: Create initial schema with required field - const { workDir } = await createFormattedProject( + const { workDir } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -446,7 +446,7 @@ model User { it('should update default value when database default changes', async () => { // Step 1: Create initial schema with default value - const { workDir } = await createFormattedProject( + const { workDir } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -494,7 +494,7 @@ describe('DB pull - PostgreSQL specific features', () => { skip(); return; } - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -511,13 +511,13 @@ model Post { @@schema('content') }`, - { provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth'] } }, + { provider: 'postgresql', datasourceFields:{ schemas: ['public', 'content', 'auth'] } }, ); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth']} })); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', datasourceFields:{ schemas: ['public', 'content', 'auth']} })); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); @@ -530,7 +530,7 @@ model Post { skip(); return; } - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -565,7 +565,7 @@ enum UserRole { skip(); return; } - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique @@ -593,7 +593,7 @@ enum UserStatus { INACTIVE SUSPENDED }`, - { provider: 'postgresql', extra:{ schemas: ['public', 'content', 'auth'] } }, + { provider: 'postgresql', datasourceFields:{ schemas: ['public', 'content', 'auth'] } }, ); runCli('db push', workDir); @@ -613,7 +613,7 @@ enum UserStatus { // This test verifies the mapping works correctly. // Note: Default native types (jsonb for Json, bytea for Bytes) are not added when pulling from zero // because they match the default database type for that field type. - const { workDir } = await createFormattedProject( + const { workDir } = await createProject( `model TypeTest { id Int @id @default(autoincrement()) smallNumber Int @db.SmallInt() @@ -666,14 +666,14 @@ describe('DB pull - SQL specific features', () => { return; } - const { workDir, schema } = await createFormattedProject( + const { workDir, schema } = await createProject( `model User { id Int @id @default(autoincrement()) email String @unique - status User_status @default(ACTIVE) + status UserStatus @default(ACTIVE) } -enum User_status { +enum UserStatus { ACTIVE INACTIVE SUSPENDED diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts index 9c688df4d..bba9e05bd 100644 --- a/packages/cli/test/db/push.test.ts +++ b/packages/cli/test/db/push.test.ts @@ -10,8 +10,8 @@ model User { `; describe('CLI db commands test', () => { - it('should generate a database with db push', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should generate a database with db push', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); diff --git a/packages/cli/test/format.test.ts b/packages/cli/test/format.test.ts index 9c95960a3..0bfb32d7f 100644 --- a/packages/cli/test/format.test.ts +++ b/packages/cli/test/format.test.ts @@ -10,8 +10,8 @@ model User { `; describe('CLI format command test', () => { - it('should format a valid schema successfully', () => { - const workDir = createProject(model); + it('should format a valid schema successfully', async () => { + const { workDir } = await createProject(model); expect(() => runCli('format', workDir)).not.toThrow(); const updatedContent = fs.readFileSync(`${workDir}/zenstack/schema.zmodel`, 'utf-8'); expect( @@ -22,12 +22,12 @@ describe('CLI format command test', () => { ).toBeTruthy(); }); - it('should silently ignore invalid schema', () => { + it('should silently ignore invalid schema', async () => { const invalidModel = ` model User { id String @id @default(cuid()) `; - const workDir = createProject(invalidModel); + const { workDir } = await createProject(invalidModel); expect(() => runCli('format', workDir)).not.toThrow(); }); }); diff --git a/packages/cli/test/generate.test.ts b/packages/cli/test/generate.test.ts index 074e88e56..6b270b4a8 100644 --- a/packages/cli/test/generate.test.ts +++ b/packages/cli/test/generate.test.ts @@ -10,28 +10,28 @@ model User { `; describe('CLI generate command test', () => { - it('should generate a TypeScript schema', () => { - const workDir = createProject(model); + it('should generate a TypeScript schema', async () => { + const { workDir } = await createProject(model); runCli('generate', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/schema.ts'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/schema.prisma'))).toBe(false); }); - it('should respect custom output directory', () => { - const workDir = createProject(model); + it('should respect custom output directory', async () => { + const { workDir } = await createProject(model); runCli('generate --output ./zen', workDir); expect(fs.existsSync(path.join(workDir, 'zen/schema.ts'))).toBe(true); }); - it('should respect custom schema location', () => { - const workDir = createProject(model); + it('should respect custom schema location', async () => { + const { workDir } = await createProject(model); fs.renameSync(path.join(workDir, 'zenstack/schema.zmodel'), path.join(workDir, 'zenstack/foo.zmodel')); runCli('generate --schema ./zenstack/foo.zmodel', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/schema.ts'))).toBe(true); }); - it('should respect package.json config', () => { - const workDir = createProject(model); + it('should respect package.json config', async () => { + const { workDir } = await createProject(model); fs.mkdirSync(path.join(workDir, 'foo')); fs.renameSync(path.join(workDir, 'zenstack/schema.zmodel'), path.join(workDir, 'foo/schema.zmodel')); fs.rmdirSync(path.join(workDir, 'zenstack')); @@ -45,8 +45,8 @@ describe('CLI generate command test', () => { expect(fs.existsSync(path.join(workDir, 'bar/schema.ts'))).toBe(true); }); - it('should respect package.json schema dir config', () => { - const workDir = createProject(model); + it('should respect package.json schema dir config', async () => { + const { workDir } = await createProject(model); fs.mkdirSync(path.join(workDir, 'foo')); fs.renameSync(path.join(workDir, 'zenstack/schema.zmodel'), path.join(workDir, 'foo/schema.zmodel')); fs.rmdirSync(path.join(workDir, 'zenstack')); @@ -60,15 +60,15 @@ describe('CLI generate command test', () => { expect(fs.existsSync(path.join(workDir, 'bar/schema.ts'))).toBe(true); }); - it('should respect lite option', () => { - const workDir = createProject(model); + it('should respect lite option', async () => { + const { workDir } = await createProject(model); runCli('generate --lite', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/schema.ts'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/schema-lite.ts'))).toBe(true); }); - it('should respect liteOnly option', () => { - const workDir = createProject(model); + it('should respect liteOnly option', async () => { + const { workDir } = await createProject(model); runCli('generate --lite-only', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/schema.ts'))).toBe(false); expect(fs.existsSync(path.join(workDir, 'zenstack/schema-lite.ts'))).toBe(true); diff --git a/packages/cli/test/migrate.test.ts b/packages/cli/test/migrate.test.ts index 86abc3576..bb3a7cd53 100644 --- a/packages/cli/test/migrate.test.ts +++ b/packages/cli/test/migrate.test.ts @@ -10,37 +10,37 @@ model User { `; describe('CLI migrate commands test', () => { - it('should generate a database with migrate dev', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should generate a database with migrate dev', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/migrations'))).toBe(true); }); - it('should reset the database with migrate reset', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should reset the database with migrate reset', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); runCli('migrate reset --force', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); - it('should reset the database with migrate deploy', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should reset the database with migrate deploy', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); fs.rmSync(path.join(workDir, 'zenstack/test.db')); runCli('migrate deploy', workDir); expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); - it('supports migrate status', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('supports migrate status', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); runCli('migrate status', workDir); }); - it('supports migrate resolve', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('supports migrate resolve', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); // find the migration record "timestamp_init" @@ -65,8 +65,8 @@ describe('CLI migrate commands test', () => { runCli(`migrate resolve --applied ${migration}`, workDir); }); - it('should throw error when neither applied nor rolled-back is provided', () => { - const workDir = createProject(model, { provider: 'sqlite' }); + it('should throw error when neither applied nor rolled-back is provided', async () => { + const { workDir } = await createProject(model, { provider: 'sqlite' }); expect(() => runCli('migrate resolve', workDir)).toThrow(); }); }); diff --git a/packages/cli/test/plugins/custom-plugin.test.ts b/packages/cli/test/plugins/custom-plugin.test.ts index 084bf9cd7..3492dbbe6 100644 --- a/packages/cli/test/plugins/custom-plugin.test.ts +++ b/packages/cli/test/plugins/custom-plugin.test.ts @@ -5,8 +5,8 @@ import { createProject, runCli } from '../utils'; import { execSync } from 'node:child_process'; describe('Custom plugins tests', () => { - it('runs custom plugin generator', () => { - const workDir = createProject(` + it('runs custom plugin generator', async () => { + const { workDir } = await createProject(` plugin custom { provider = '../my-plugin.js' output = '../custom-output' diff --git a/packages/cli/test/plugins/prisma-plugin.test.ts b/packages/cli/test/plugins/prisma-plugin.test.ts index 739252e21..4b619c679 100644 --- a/packages/cli/test/plugins/prisma-plugin.test.ts +++ b/packages/cli/test/plugins/prisma-plugin.test.ts @@ -4,8 +4,8 @@ import { describe, expect, it } from 'vitest'; import { createProject, runCli } from '../utils'; describe('Core plugins tests', () => { - it('can automatically generate a TypeScript schema with default output', () => { - const workDir = createProject(` + it('can automatically generate a TypeScript schema with default output', async () => { + const { workDir } = await createProject(` model User { id String @id @default(cuid()) } @@ -14,8 +14,8 @@ model User { expect(fs.existsSync(path.join(workDir, 'zenstack/schema.ts'))).toBe(true); }); - it('can automatically generate a TypeScript schema with custom output', () => { - const workDir = createProject(` + it('can automatically generate a TypeScript schema with custom output', async () => { + const { workDir } = await createProject(` plugin typescript { provider = '@core/typescript' output = '../generated-schema' @@ -29,8 +29,8 @@ model User { expect(fs.existsSync(path.join(workDir, 'generated-schema/schema.ts'))).toBe(true); }); - it('can generate a Prisma schema with default output', () => { - const workDir = createProject(` + it('can generate a Prisma schema with default output', async () => { + const { workDir } = await createProject(` plugin prisma { provider = '@core/prisma' } @@ -43,8 +43,8 @@ model User { expect(fs.existsSync(path.join(workDir, 'zenstack/schema.prisma'))).toBe(true); }); - it('can generate a Prisma schema with custom output', () => { - const workDir = createProject(` + it('can generate a Prisma schema with custom output', async () => { + const { workDir } = await createProject(` plugin prisma { provider = '@core/prisma' output = '../prisma/schema.prisma' @@ -58,8 +58,8 @@ model User { expect(fs.existsSync(path.join(workDir, 'prisma/schema.prisma'))).toBe(true); }); - it('can generate a Prisma schema with custom output relative to zenstack.output', () => { - const workDir = createProject(` + it('can generate a Prisma schema with custom output relative to zenstack.output', async () => { + const { workDir } = await createProject(` plugin prisma { provider = '@core/prisma' output = './schema.prisma' diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 5052aa558..2c88aa7db 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -42,7 +42,7 @@ function getTestDbName(provider: string) { ); } -export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', datasourceFields?: Record }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); let dbUrl: string; @@ -64,7 +64,7 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' const fields: [string, string][] = [ ['provider', `'${provider}'`], ['url', `'${dbUrl}'`], - ...Object.entries(options?.extra || {}).map(([k, v]) => { + ...Object.entries(options?.datasourceFields || {}).map(([k, v]) => { const value = Array.isArray(v) ? `[${v.map(item => `'${item}'`).join(', ')}]` : `'${v}'`; return [k, value] as [string, string]; }), @@ -78,25 +78,17 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' return ZMODEL_PRELUDE; } -export function createProject( +export async function createProject( zmodel: string, - options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' | 'mysql' }, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' | 'mysql'; datasourceFields?: Record }, ) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}` : zmodel); - return workDir; -} - -export async function createFormattedProject( - zmodel: string, - options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }, -) { - const fullContent = `${getDefaultPrelude({ provider: options?.provider, extra: options?.extra })}\n\n${zmodel}`; - const formatted = await formatDocument(fullContent); - const workDir = createProject(formatted, { customPrelude: true, provider: options?.provider }); - return { workDir, schema: formatted }; + const content = options?.customPrelude ? zmodel : `${getDefaultPrelude({ provider: options?.provider, datasourceFields: options?.datasourceFields })}\n\n${zmodel}`; + const schema = await formatDocument(content); + fs.writeFileSync(schemaPath, schema); + return { workDir, schema }; } export function runCli(command: string, cwd: string) { From 06dd90761e9bb9620ca67cf3828feb2d27def5b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 17:38:36 +0100 Subject: [PATCH 78/83] fix(cli): improve db pull for composite FKs and MySQL uniqueness Enhances database introspection to correctly handle composite foreign keys by mapping columns by position rather than name alone. Improves MySQL introspection by checking statistics tables for single-column unique indexes, ensuring accurate model generation even when column keys are ambiguous. Ensures MySQL synthetic enum names respect requested model casing to prevent unnecessary schema mapping. Adds comprehensive tests for composite relations and database-specific uniqueness detection. --- packages/cli/src/actions/db.ts | 30 +++-- packages/cli/src/actions/pull/index.ts | 120 ++++++++++++------ .../cli/src/actions/pull/provider/mysql.ts | 70 +++++++++- .../src/actions/pull/provider/postgresql.ts | 9 +- .../cli/src/actions/pull/provider/provider.ts | 2 +- .../cli/src/actions/pull/provider/sqlite.ts | 2 +- packages/cli/test/db/pull.test.ts | 79 +++++++++++- 7 files changed, 251 insertions(+), 61 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index ec3b28c36..096ce01ae 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -109,7 +109,7 @@ async function runPull(options: PullOptions) { } spinner.start('Introspecting database...'); - const { enums, tables } = await provider.introspect(datasource.url, { schemas: datasource.allSchemas }); + const { enums, tables } = await provider.introspect(datasource.url, { schemas: datasource.allSchemas, modelCasing: options.modelCasing }); spinner.succeed('Database introspected'); console.log(colors.blue('Syncing schema...')); @@ -156,7 +156,7 @@ async function runPull(options: PullOptions) { rr.references.schema === relation.references.schema && rr.references.table === relation.references.table) || (rr.schema === relation.references.schema && - rr.column === relation.references.column && + rr.columns[0] === relation.references.columns[0] && rr.references.schema === relation.schema && rr.references.table === relation.table)) ); @@ -263,10 +263,12 @@ async function runPull(options: PullOptions) { (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), )?.node; if (ref && f.type.reference) { - (f.type.reference.ref as any) = ref; - // Keep the textual reference in sync with the semantic reference - (f.type.reference as any).$refText = - (ref as any).name ?? (f.type.reference as any).$refText; + // Replace the entire reference object — Langium References + // from parsed documents expose `ref` as a getter-only property. + (f.type as any).reference = { + ref, + $refText: (ref as any).name ?? (f.type.reference as any).$refText, + }; } } }); @@ -356,8 +358,12 @@ async function runPull(options: PullOptions) { const oldRefName = getDbName(oldType.reference.ref); if (newRefName !== oldRefName) { fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`); - (oldType.reference as any).ref = newType.reference.ref; - (oldType.reference as any).$refText = newType.reference.$refText; + // Replace the entire reference object — Langium References + // from parsed documents expose `ref` as a getter-only property. + (oldType as any).reference = { + ref: newType.reference.ref, + $refText: newType.reference.$refText, + }; } } else if (newType.reference?.ref && !oldType.reference) { // Changed from builtin to reference type @@ -441,8 +447,12 @@ async function runPull(options: PullOptions) { (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), )?.node as DataModel | undefined; if (ref) { - (f.type.reference.$refText as any) = ref.name; - (f.type.reference.ref as any) = ref; + // Replace the entire reference object — Langium References + // from parsed documents expose `ref` as a getter-only property. + (f.type as any).reference = { + ref, + $refText: ref.name ?? (f.type.reference as any).$refText, + }; } } return; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index a1239af76..4e1a34c6b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -95,7 +95,7 @@ export function syncEnums({ export type Relation = { schema: string; table: string; - column: string; + columns: string[]; type: 'one' | 'many'; fk_name: string; foreign_key_on_update: Cascade; @@ -104,7 +104,7 @@ export type Relation = { references: { schema: string | null; table: string | null; - column: string | null; + columns: (string | null)[]; type: 'one' | 'many'; }; }; @@ -145,28 +145,42 @@ export function syncTable({ builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } + // Group FK columns by constraint name to handle composite foreign keys. + // Each FK constraint (identified by fk_name) may span multiple columns. + const fkGroups = new Map(); table.columns.forEach((column) => { - if (column.foreign_key_table) { - // Check if this FK column is the table's single-column primary key - // If so, it should be treated as a one-to-one relation - const isSingleColumnPk = !multiPk && column.pk; - relations.push({ - schema: table.schema, - table: table.name, - column: column.name, - type: 'one', - fk_name: column.foreign_key_name!, - foreign_key_on_delete: column.foreign_key_on_delete, - foreign_key_on_update: column.foreign_key_on_update, - nullable: column.nullable, - references: { - schema: column.foreign_key_schema, - table: column.foreign_key_table, - column: column.foreign_key_column, - type: column.unique || isSingleColumnPk ? 'one' : 'many', - }, - }); + if (column.foreign_key_table && column.foreign_key_name) { + const group = fkGroups.get(column.foreign_key_name) ?? []; + group.push(column); + fkGroups.set(column.foreign_key_name, group); } + }); + + for (const [fkName, fkColumns] of fkGroups) { + const firstCol = fkColumns[0]!; + // For single-column FKs, check if the column is the table's single-column PK (one-to-one) + const isSingleColumnPk = fkColumns.length === 1 && !multiPk && firstCol.pk; + // A single-column FK with unique constraint means one-to-one on the opposite side + const isUniqueRelation = (fkColumns.length === 1 && firstCol.unique) || isSingleColumnPk; + relations.push({ + schema: table.schema, + table: table.name, + columns: fkColumns.map((c) => c.name), + type: 'one', + fk_name: fkName, + foreign_key_on_delete: firstCol.foreign_key_on_delete, + foreign_key_on_update: firstCol.foreign_key_on_update, + nullable: firstCol.nullable, + references: { + schema: firstCol.foreign_key_schema, + table: firstCol.foreign_key_table, + columns: fkColumns.map((c) => c.foreign_key_column), + type: isUniqueRelation ? 'one' : 'many', + }, + }); + } + + table.columns.forEach((column) => { const { name, modified } = resolveNameCasing(options.fieldCasing, column.name); @@ -397,25 +411,39 @@ export function syncRelation({ | undefined; if (!sourceModel) return; - const sourceFieldId = sourceModel.fields.findIndex((f) => getDbName(f) === relation.column); - const sourceField = sourceModel.fields[sourceFieldId] as DataField | undefined; - if (!sourceField) return; + // Resolve all source and target fields for the relation (supports composite FKs) + const sourceFields: { field: DataField; index: number }[] = []; + for (const colName of relation.columns) { + const idx = sourceModel.fields.findIndex((f) => getDbName(f) === colName); + const field = sourceModel.fields[idx] as DataField | undefined; + if (!field) return; + sourceFields.push({ field, index: idx }); + } const targetModel = model.declarations.find( (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table, ) as DataModel | undefined; if (!targetModel) return; - const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); - if (!targetField) return; + const targetFields: DataField[] = []; + for (const colName of relation.references.columns) { + const field = targetModel.fields.find((f) => getDbName(f) === colName); + if (!field) return; + targetFields.push(field); + } + + // Use the first source field for naming heuristics + const firstSourceField = sourceFields[0]!.field; + const firstSourceFieldId = sourceFields[0]!.index; + const firstColumn = relation.columns[0]!; const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${relation.table}${similarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; + const relationName = `${relation.table}${similarRelations > 0 ? `_${firstColumn}` : ''}To${relation.references.table}`; // Derive a relation field name from the FK scalar field: if the field ends with "Id", // strip the suffix and use the remainder (e.g., "authorId" -> "author"). - const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing(options.fieldCasing, sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; + const sourceNameFromReference = firstSourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing(options.fieldCasing, firstSourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; // Check if the derived name would clash with an existing field const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference); @@ -426,12 +454,12 @@ export function syncRelation({ let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, similarRelations > 0 - ? `${fieldPrefix}${lowerCaseFirst(sourceModel.name)}_${relation.column}` + ? `${fieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || lowerCaseFirst(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === 'many'? 's' : ''}`, ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { - sourceFieldName = `${sourceFieldName}To${lowerCaseFirst(targetModel.name)}_${relation.references.column}`; + sourceFieldName = `${sourceFieldName}To${lowerCaseFirst(targetModel.name)}_${relation.references.columns[0]}`; } const sourceFieldFactory = new DataFieldFactory() @@ -446,10 +474,24 @@ export function syncRelation({ sourceFieldFactory.addAttribute((ab) => { ab.setDecl(relationAttribute); if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); - ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( - (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), - 'references', - ); + + // Build fields array (all source FK columns) + ab.addArg((ab) => { + const arrayExpr = ab.ArrayExpr; + for (const { field } of sourceFields) { + arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field)); + } + return arrayExpr; + }, 'fields'); + + // Build references array (all target columns) + ab.addArg((ab) => { + const arrayExpr = ab.ArrayExpr; + for (const field of targetFields) { + arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field)); + } + return arrayExpr; + }, 'references'); // Prisma defaults: onDelete is SetNull for optional, Restrict for mandatory const onDeleteDefault = relation.nullable ? 'SET NULL' : 'RESTRICT'; @@ -474,18 +516,20 @@ export function syncRelation({ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); } - if (relation.fk_name && relation.fk_name !== `${relation.table}_${relation.column}_fkey`) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + // Check if the FK constraint name differs from the default pattern + const defaultFkName = `${relation.table}_${relation.columns.join('_')}_fkey`; + if (relation.fk_name && relation.fk_name !== defaultFkName) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); return ab; }); - sourceModel.fields.splice(sourceFieldId, 0, sourceFieldFactory.node); // Insert the relation field before the FK scalar fie + sourceModel.fields.splice(firstSourceFieldId, 0, sourceFieldFactory.node); // Insert the relation field before the first FK scalar field const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, similarRelations > 0 - ? `${oppositeFieldPrefix}${lowerCaseFirst(sourceModel.name)}_${relation.column}` + ? `${oppositeFieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${lowerCaseFirst(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === 'many'? 's' : ''}`, ); diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 2b65d1594..794248298 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -3,6 +3,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef, normalizeDecimalDefault, normalizeFloatDefault } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import { CliError } from '../../../cli-error'; +import { resolveNameCasing } from '../casing'; // Note: We dynamically import mysql2 inside the async function to avoid // requiring it at module load time for environments that don't use MySQL. @@ -114,7 +115,7 @@ export const mysql: IntrospectionProvider = { return { type: 'longblob' }; } }, - async introspect(connectionString: string): Promise { + async introspect(connectionString: string, options: { schemas: string[]; modelCasing: 'pascal' | 'camel' | 'snake' | 'none' }): Promise { const mysql = await import('mysql2/promise'); const connection = await mysql.createConnection(connectionString); @@ -143,7 +144,15 @@ export const mysql: IntrospectionProvider = { .sort( (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) - ); + ) + .map((col: any) => { + // MySQL enum datatype_name is synthetic (TableName_ColumnName). + // Apply model casing so it matches the cased enum_type. + if (col.datatype === 'enum' && col.datatype_name) { + return { ...col, datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name }; + } + return col; + }); // Filter out auto-generated FK indexes (MySQL creates these automatically) // Pattern: {Table}_{column}_fkey for single-column FK indexes @@ -171,10 +180,14 @@ export const mysql: IntrospectionProvider = { const enums: IntrospectedEnum[] = enumRows.map((row) => { // Parse enum values from column_type like "enum('val1','val2','val3')" const values = parseEnumValues(row.column_type); + // MySQL doesn't have standalone enum types; the name is entirely + // synthetic (TableName_ColumnName). Apply model casing here so it + // arrives already cased — there is no raw DB name to @@map back to. + const syntheticName = `${row.table_name}_${row.column_name}`; + const { name } = resolveNameCasing(options.modelCasing, syntheticName); return { schema_name: '', // MySQL doesn't support multi-schema - // Create a unique enum type name based on table and column - enum_type: `${row.table_name}_${row.column_name}`, + enum_type: name, values, }; }); @@ -337,8 +350,53 @@ SELECT END, 'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key - 'unique', c.COLUMN_KEY = 'UNI', -- true if column has a unique constraint - 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, + + -- unique: true if the column has a single-column unique index. + -- COLUMN_KEY = 'UNI' covers most cases, but may not be set when the column + -- also participates in other indexes (showing 'MUL' instead on some MySQL versions). + -- Also check INFORMATION_SCHEMA.STATISTICS for single-column unique indexes + -- (NON_UNIQUE = 0) to match the PostgreSQL introspection behavior. + 'unique', ( + c.COLUMN_KEY = 'UNI' + OR EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.STATISTICS s_uni + WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA + AND s_uni.TABLE_NAME = c.TABLE_NAME + AND s_uni.COLUMN_NAME = c.COLUMN_NAME + AND s_uni.NON_UNIQUE = 0 + AND s_uni.INDEX_NAME != 'PRIMARY' + AND ( + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.STATISTICS s_cnt + WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA + AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME + AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME + ) = 1 + ) + ), + 'unique_name', ( + SELECT COALESCE( + CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, + ( + SELECT s_uni.INDEX_NAME + FROM INFORMATION_SCHEMA.STATISTICS s_uni + WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA + AND s_uni.TABLE_NAME = c.TABLE_NAME + AND s_uni.COLUMN_NAME = c.COLUMN_NAME + AND s_uni.NON_UNIQUE = 0 + AND s_uni.INDEX_NAME != 'PRIMARY' + AND ( + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.STATISTICS s_cnt + WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA + AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME + AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME + ) = 1 + LIMIT 1 + ) + ) + ), -- computed: true if column has a generation expression (virtual or stored) 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 879a0b89d..c781259d2 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -176,7 +176,7 @@ export const postgresql: IntrospectionProvider = { return { type: 'Unsupported' as const, isArray }; } }, - async introspect(connectionString: string, options: { schemas: string[] }): Promise { + async introspect(connectionString: string, options: { schemas: string[]; modelCasing: 'pascal' | 'camel' | 'snake' | 'none' }): Promise { const client = new Client({ connectionString }); await client.connect(); @@ -523,9 +523,12 @@ SELECT -- pg_namespace for FK target: get the schema of the referenced table LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" - -- pg_attribute for FK target column: resolve the referenced column number to its name + -- pg_attribute for FK target column: resolve the referenced column number to its name. + -- Use array_position to correlate by position: find this source column's index in conkey, + -- then pick the referenced attnum at that same index from confkey. + -- This ensures composite FKs correctly map each source column to its corresponding target column. LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" - AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + AND "fk_att"."attnum" = "fk_con"."confkey"[array_position("fk_con"."conkey", "att"."attnum")] -- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr() LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum" diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 03e44eae0..7b3127132 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -61,7 +61,7 @@ export type IntrospectedSchema = { export type DatabaseFeature = 'Schema' | 'NativeEnum'; export interface IntrospectionProvider { - introspect(connectionString: string, options: { schemas: string[] }): Promise; + introspect(connectionString: string, options: { schemas: string[]; modelCasing: 'pascal' | 'camel' | 'snake' | 'none' }): Promise; getBuiltinType(type: string): { type: BuiltinType | 'Unsupported'; isArray: boolean; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5f7e914fe..cf0b8de0e 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -118,7 +118,7 @@ export const sqlite: IntrospectionProvider = { return undefined; }, - async introspect(connectionString: string): Promise { + async introspect(connectionString: string, _options: { schemas: string[]; modelCasing: 'pascal' | 'camel' | 'snake' | 'none' }): Promise { const SQLite = (await import('better-sqlite3')).default; const db = new SQLite(connectionString, { readonly: true }); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 4d6adf203..2afb16f27 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -224,7 +224,7 @@ model User { id Int @id @default(autoincrement()) email String @unique @map('email_address') name String? @default('Anonymous') - role users_role @default(USER) + role UsersRole @default(USER) profile Profile? shared_profile Profile? @relation('shared') posts Post[] @@ -293,7 +293,7 @@ model PostTag { @@map('post_tags') } -enum users_role { +enum UsersRole { USER ADMIN MODERATOR @@ -656,6 +656,81 @@ enum UserStatus { expect(restoredSchema).not.toContain('@db.JsonB'); // jsonb is default for Json expect(restoredSchema).not.toContain('@db.ByteA'); // bytea is default for Bytes }); + + it('should correctly map composite foreign key columns by position', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + // Composite FK: (tenantId, authorId) REFERENCES Tenant(tenantId, userId) + // The introspection must correlate by position, not match each source column + // to every target column. Without the fix, tenantId would incorrectly map to + // both tenantId AND userId in the target table. + const { workDir, schema } = await createProject( + `model Post { + id Int @id @default(autoincrement()) + title String + tenant Tenant @relation(fields: [tenantId, authorId], references: [tenantId, userId], onDelete: Cascade) + tenantId Int + authorId Int + + @@index([tenantId, authorId]) +} + +model Tenant { + tenantId Int + userId Int + name String + posts Post[] + + @@id([tenantId, userId]) +}`, + { provider: 'postgresql' }, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); + }); +}); + +describe('DB pull - MySQL specific features', () => { + it('should detect single-column unique indexes via STATISTICS', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'mysql') { + skip(); + return; + } + // MySQL's COLUMN_KEY may not reliably reflect unique indexes in all cases. + // The introspection should also check INFORMATION_SCHEMA.STATISTICS for + // NON_UNIQUE = 0 single-column indexes to correctly detect uniqueness, + // so that the index-processing skip logic (which checks index.unique + + // single-column) doesn't cause a missing @unique attribute. + const { workDir, schema } = await createProject( + `model User { + id Int @id @default(autoincrement()) + email String @unique + nickname String? @unique +}`, + { provider: 'mysql' }, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + // Pull from zero to test introspection detects unique columns correctly + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'mysql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); + }); }); describe('DB pull - SQL specific features', () => { From 743624317d7a31401f0565413f74d6e6e6cfb5c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 19:35:58 +0100 Subject: [PATCH 79/83] fix: address PR comments --- packages/cli/src/actions/db.ts | 2 +- packages/cli/src/actions/pull/provider/sqlite.ts | 4 ++-- packages/cli/src/actions/pull/utils.ts | 11 ++++++++--- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 096ce01ae..b868566a5 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -657,7 +657,7 @@ async function runPull(options: PullOptions) { parseResult: { value: documentModel }, } of docs) { const zmodelSchema = await formatDocument(generator.generate(documentModel)); - console.log(colors.blue(`Writing to ${uri.path}`)); + console.log(colors.blue(`Writing to ${path.relative(process.cwd(), uri.fsPath).replace(/\\/g, '/')}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); } } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index cf0b8de0e..6cf567d54 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -156,12 +156,12 @@ export const sqlite: IntrospectionProvider = { const tableName = t.name; const schema = ''; - // Check if this table has autoincrement (via sqlite_sequence) + // Check if this table has autoincrement (detected by parsing the CREATE TABLE DDL) const hasAutoIncrement = autoIncrementTables.has(tableName); // PRAGMA table_xinfo: extended version of table_info that also includes hidden/generated columns. // Returns one row per column with: cid (column index), name, type, notnull, dflt_value, pk. - // hidden: 0 = normal column, 1 = hidden/internal (e.g., rowid), 2 = generated/computed column. + // hidden: 0 = normal, 1 = hidden (virtual table), 2 = generated stored, 3 = generated virtual. const columnsInfo = all<{ cid: number; name: string; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 58b879908..e0abcfdfd 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -72,11 +72,16 @@ export function getDatasource(model: Model) { ?.filter((s) => s !== undefined)) as string[] || []; + const provider = getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value, + ); + if (!provider) { + throw new CliError(`Datasource "${datasource.name}" is missing a "provider" field.`); + } + return { name: datasource.name, - provider: getStringLiteral( - datasource.fields.find((f) => f.name === 'provider')?.value, - ) as DataSourceProviderType, + provider: provider as DataSourceProviderType, url, defaultSchema, schemas, From fd463b03578bb24d2546d00c9f11fa859efe07ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 6 Feb 2026 20:22:52 +0100 Subject: [PATCH 80/83] fix(cli): improve SQLite introspection for untyped columns and composite FKs Ensures columns with no declared type are correctly mapped to Bytes following SQLite affinity rules, preventing them from being marked as Unsupported. Updates the DDL parser to correctly identify and map constraint names for composite foreign keys. This ensures that multi-column relations are properly restored during the pull process. Adds regression tests for both untyped columns and composite foreign key restoration. --- .../cli/src/actions/pull/provider/sqlite.ts | 32 +++++--- packages/cli/test/db/pull.test.ts | 74 +++++++++++++++++++ 2 files changed, 96 insertions(+), 10 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 6cf567d54..791647078 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -95,6 +95,10 @@ export const sqlite: IntrospectionProvider = { return { type: 'Boolean', isArray }; default: { + // SQLite affinity rule #3: columns with no declared type have BLOB affinity + if (!t) { + return { type: 'Bytes', isArray }; + } // Fallback: Use SQLite affinity rules for unknown types if (t.includes('int')) { return { type: 'Int', isArray }; @@ -234,21 +238,29 @@ export const sqlite: IntrospectionProvider = { on_delete: any; }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); - // Extract FK constraint names from CREATE TABLE statement - // Pattern: CONSTRAINT "name" FOREIGN KEY("column") or CONSTRAINT name FOREIGN KEY(column) + // Extract FK constraint names from CREATE TABLE statement. + // Captures the constraint name and the full parenthesized column list from + // FOREIGN KEY(...), then splits and parses individual column names so that + // composite FKs (e.g., FOREIGN KEY("col1", "col2")) are handled correctly. const fkConstraintNames = new Map(); if (t.definition) { - // Match: CONSTRAINT "name" FOREIGN KEY("col") or CONSTRAINT name FOREIGN KEY(col) - // Use [^"'`]+ for quoted names to capture full identifier including underscores and other chars - const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(\s*(?:["'`]([^"'`]+)["'`]|(\w+))\s*\)/gi; + // Match: CONSTRAINT "name" FOREIGN KEY() + // Group 1/2: quoted/unquoted constraint name + // Group 3: the full content inside FOREIGN KEY(...) + const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(([^)]+)\)/gi; let match; while ((match = fkRegex.exec(t.definition)) !== null) { - // match[1] = quoted constraint name, match[2] = unquoted constraint name - // match[3] = quoted column name, match[4] = unquoted column name const constraintName = match[1] || match[2]; - const columnName = match[3] || match[4]; - if (constraintName && columnName) { - fkConstraintNames.set(columnName, constraintName); + const columnList = match[3]; + if (constraintName && columnList) { + // Split the column list on commas and strip quotes/whitespace + // to extract each individual column name. + const columns = columnList.split(',').map((col) => col.trim().replace(/^["'`]|["'`]$/g, '')); + for (const col of columns) { + if (col) { + fkConstraintNames.set(col, constraintName); + } + } } } } diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 2afb16f27..5ea030a41 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -733,6 +733,80 @@ describe('DB pull - MySQL specific features', () => { }); }); +describe('DB pull - SQLite specific features', () => { + it('should restore composite foreign key relations', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'sqlite') { + skip(); + return; + } + // Composite FK: (tenantId, authorId) REFERENCES Tenant(tenantId, userId). + // The SQLite introspection extracts FK constraint names by parsing the + // CREATE TABLE DDL. The current regex only captures a single column inside + // FOREIGN KEY(...), so composite FK constraint names are lost. Without a + // constraint name, the downstream relation grouping (pull/index.ts) skips + // the FK columns entirely and the relation is not restored. + const { workDir, schema } = await createProject( + `model Post { + id Int @id @default(autoincrement()) + title String + tenant Tenant @relation(fields: [tenantId, authorId], references: [tenantId, userId], onDelete: Cascade) + tenantId Int + authorId Int + + @@index([tenantId, authorId]) +} + +model Tenant { + tenantId Int + userId Int + name String + posts Post[] + + @@id([tenantId, userId]) +}`, + ); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(schema); + }); + + it('should map columns without a declared type to Bytes', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'sqlite') { + skip(); + return; + } + // Create a minimal project and push to get the database file. + const { workDir } = await createProject(""); + + // Open the SQLite database directly and add a table with an untyped column. + // In SQLite, CREATE TABLE t("data") gives column "data" no declared type, + // which per affinity rules means BLOB affinity — should map to Bytes. + const dbPath = path.join(workDir, 'zenstack', 'test.db'); + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(dbPath); + db.exec('CREATE TABLE "UntypedTest" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "data")'); + db.close(); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + // The untyped "data" column should be pulled as Bytes (BLOB affinity), + // not as Unsupported. + expect(restoredSchema).toContain('data Bytes?'); + expect(restoredSchema).not.toContain('Unsupported'); + }); +}); + describe('DB pull - SQL specific features', () => { it('should restore enum fields from zero', async ({ skip }) => { const provider = getTestDbProvider(); From 3b2e2e39b4f2798e0791139d5b3ea00553fc3f8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sat, 7 Feb 2026 00:55:17 +0100 Subject: [PATCH 81/83] feat(cli): pull generated/computed columns as Unsupported type Improves database introspection by identifying generated columns in MySQL, PostgreSQL, and SQLite. These columns are now pulled as `Unsupported` types containing their full DDL definition, preventing issues where read-only database fields were incorrectly treated as writable application-level fields. Includes normalization for expression formatting and a fix for string literal escaping in the code generator to ensure stable schema output. Relates to ZModel introspection consistency. --- packages/cli/src/actions/pull/index.ts | 11 +- .../cli/src/actions/pull/provider/mysql.ts | 37 ++- .../src/actions/pull/provider/postgresql.ts | 24 +- .../cli/src/actions/pull/provider/sqlite.ts | 93 +++++- packages/cli/test/db/pull.test.ts | 282 +++++++++++++++++- packages/cli/test/utils.ts | 15 +- .../language/src/zmodel-code-generator.ts | 2 +- 7 files changed, 450 insertions(+), 14 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4e1a34c6b..b4424746f 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -193,7 +193,14 @@ export function syncTable({ // Array fields cannot be optional (Prisma/ZenStack limitation) typeBuilder.setOptional(builtinType.isArray ? false : column.nullable); - if (column.datatype === 'enum') { + if (column.computed) { + // Generated/computed columns (e.g., GENERATED ALWAYS AS ... STORED/VIRTUAL) + // are read-only and must be rendered as Unsupported("full type definition"). + // The datatype contains the full DDL type definition including the expression. + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); + } else if (column.datatype === 'enum') { const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype_name) as | Enum | undefined; @@ -230,7 +237,7 @@ export function syncTable({ }); fieldAttrs.forEach(builder.addAttribute.bind(builder)); - if (column.default) { + if (column.default && !column.computed) { const defaultExprBuilder = provider.getDefaultValue({ fieldType: builtinType.type, datatype: column.datatype, diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 794248298..e770cb8a5 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -8,6 +8,21 @@ import { resolveNameCasing } from '../casing'; // Note: We dynamically import mysql2 inside the async function to avoid // requiring it at module load time for environments that don't use MySQL. +function normalizeGenerationExpression(typeDef: string): string { + // MySQL may include character set introducers in generation expressions, e.g. `_utf8mb4' '`. + // Strip them to produce a stable, cleaner expression for `Unsupported("...")`. + // MySQL commonly returns generation expressions with SQL-style quote escaping (e.g. `\\'`), + // which would become an invalid ZModel string after the code generator escapes quotes again. + // Normalize it to raw quotes, letting the ZModel code generator re-escape appropriately. + return ( + typeDef + // Remove character set introducers, with or without escaped quotes. + .replace(/_([0-9A-Za-z_]+)\\?'/g, "'") + // Unescape SQL-style escaped single quotes in the expression. + .replace(/\\'/g, "'") + ); +} + export const mysql: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { @@ -151,6 +166,10 @@ export const mysql: IntrospectionProvider = { if (col.datatype === 'enum' && col.datatype_name) { return { ...col, datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name }; } + // Normalize generated column expressions for stable output. + if (col.computed && typeof col.datatype === 'string') { + return { ...col, datatype: normalizeGenerationExpression(col.datatype) }; + } return col; }); @@ -191,6 +210,7 @@ export const mysql: IntrospectionProvider = { values, }; }); + return { tables, enums }; } finally { await connection.end(); @@ -322,9 +342,22 @@ SELECT 'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting) 'name', c.COLUMN_NAME, -- column name - -- datatype: special-case tinyint(1) as 'boolean' (MySQL's boolean convention), - -- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime') + -- datatype: for generated/computed columns, construct the full DDL-like type definition + -- (e.g., "int GENERATED ALWAYS AS (col1 + col2) STORED") so it can be rendered as + -- Unsupported("..."); special-case tinyint(1) as 'boolean' (MySQL's boolean convention); + -- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime'). 'datatype', CASE + WHEN c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '' THEN + CONCAT( + c.COLUMN_TYPE, + ' GENERATED ALWAYS AS (', + c.GENERATION_EXPRESSION, + ') ', + CASE + WHEN c.EXTRA LIKE '%STORED GENERATED%' THEN 'STORED' + ELSE 'VIRTUAL' + END + ) WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean' ELSE c.DATA_TYPE END, diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index c781259d2..205a16f23 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -378,13 +378,26 @@ SELECT SELECT "att"."attname" AS "name", -- column name - -- datatype: if the type is an enum, report 'enum'; otherwise use the pg_type name + -- datatype: if the type is an enum, report 'enum'; + -- if the column is generated/computed, construct the full DDL-like type definition + -- (e.g., "text GENERATED ALWAYS AS (expr) STORED") so it can be rendered as Unsupported("..."); + -- otherwise use the pg_type name. CASE WHEN EXISTS ( SELECT 1 FROM "pg_catalog"."pg_enum" AS "e" WHERE "e"."enumtypid" = "typ"."oid" ) THEN 'enum' - ELSE "typ"."typname" -- internal type name (e.g., 'int4', 'varchar', 'text') + WHEN "att"."attgenerated" != '' THEN + format_type("att"."atttypid", "att"."atttypmod") + || ' GENERATED ALWAYS AS (' + || pg_get_expr("def"."adbin", "def"."adrelid") + || ') ' + || CASE "att"."attgenerated" + WHEN 's' THEN 'STORED' + WHEN 'v' THEN 'VIRTUAL' + ELSE 'STORED' + END + ELSE "typ"."typname"::text -- internal type name (e.g., 'int4', 'varchar', 'text'); cast to text to prevent CASE from coercing result to name type (max 63 chars) END AS "datatype", -- datatype_name: for enums only, the actual enum type name (used to look up the enum definition) @@ -478,7 +491,12 @@ SELECT ) AS "unique_name", "att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column - pg_get_expr("def"."adbin", "def"."adrelid") AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()') + -- For generated columns, pg_attrdef stores the generation expression (not a default), + -- so we must null it out to avoid emitting a spurious @default(dbgenerated(...)) attribute. + CASE + WHEN "att"."attgenerated" != '' THEN NULL + ELSE pg_get_expr("def"."adbin", "def"."adrelid") + END AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()') "att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values -- options: for enum columns, aggregates all allowed enum labels into a JSON array diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 791647078..f58ad0b58 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -165,7 +165,7 @@ export const sqlite: IntrospectionProvider = { // PRAGMA table_xinfo: extended version of table_info that also includes hidden/generated columns. // Returns one row per column with: cid (column index), name, type, notnull, dflt_value, pk. - // hidden: 0 = normal, 1 = hidden (virtual table), 2 = generated stored, 3 = generated virtual. + // hidden: 0 = normal, 1 = hidden (virtual table), 2 = generated VIRTUAL, 3 = generated STORED. const columnsInfo = all<{ cid: number; name: string; @@ -288,12 +288,22 @@ export const sqlite: IntrospectionProvider = { }); } + // Pre-extract full column type definitions from DDL for generated columns. + // PRAGMA table_xinfo only returns the base type (e.g., "TEXT"), but for + // generated columns we need the full definition including the expression + // (e.g., "TEXT GENERATED ALWAYS AS (...) STORED") so they are pulled as + // Unsupported("...") — matching Prisma's introspection behavior. + const generatedColDefs = t.definition ? extractColumnTypeDefs(t.definition) : new Map(); + const columns: IntrospectedTable['columns'] = []; for (const c of columnsInfo) { - // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed + // hidden: 0 = normal, 1 = hidden (virtual table) → skip, + // 2 = generated VIRTUAL, 3 = generated STORED → mark computed const hidden = c.hidden ?? 0; if (hidden === 1) continue; + const isGenerated = hidden === 2 || hidden === 3; + const fk = fkByColumn.get(c.name); // Determine default value - check for autoincrement @@ -303,9 +313,20 @@ export const sqlite: IntrospectionProvider = { defaultValue = 'autoincrement'; } + // For generated columns, use the full DDL type definition so that + // getBuiltinType returns Unsupported and the column is rendered as + // Unsupported("TYPE GENERATED ALWAYS AS (...) STORED/VIRTUAL"). + let datatype = c.type || ''; + if (isGenerated) { + const fullDef = generatedColDefs.get(c.name); + if (fullDef) { + datatype = fullDef; + } + } + columns.push({ name: c.name, - datatype: c.type || '', + datatype, datatype_name: null, // SQLite doesn't support native enums length: null, precision: null, @@ -317,7 +338,7 @@ export const sqlite: IntrospectionProvider = { foreign_key_on_update: fk?.foreign_key_on_update ?? null, foreign_key_on_delete: fk?.foreign_key_on_delete ?? null, pk: !!c.pk, - computed: hidden === 2, + computed: isGenerated, nullable: c.notnull !== 1, default: defaultValue, unique: uniqueSingleColumn.has(c.name), @@ -390,3 +411,67 @@ export const sqlite: IntrospectionProvider = { return factories; }, }; + +/** + * Extract column type definitions from a CREATE TABLE DDL statement. + * Returns a map of column name → full type definition string (everything after the column name). + * Used to get the complete type including GENERATED ALWAYS AS (...) STORED/VIRTUAL for generated columns. + */ +function extractColumnTypeDefs(ddl: string): Map { + // Find the content inside CREATE TABLE "name" ( ... ) + // Use a paren-depth approach to find the matching closing paren. + const openIdx = ddl.indexOf('('); + if (openIdx === -1) return new Map(); + + let depth = 1; + let closeIdx = -1; + for (let i = openIdx + 1; i < ddl.length; i++) { + if (ddl[i] === '(') depth++; + else if (ddl[i] === ')') { + depth--; + if (depth === 0) { + closeIdx = i; + break; + } + } + } + if (closeIdx === -1) return new Map(); + + const content = ddl.substring(openIdx + 1, closeIdx); + + // Split column definitions on commas, respecting nested parentheses. + const defs: string[] = []; + let current = ''; + depth = 0; + for (const char of content) { + if (char === '(') depth++; + else if (char === ')') depth--; + else if (char === ',' && depth === 0) { + defs.push(current.trim()); + current = ''; + continue; + } + current += char; + } + if (current.trim()) defs.push(current.trim()); + + // Map column name → type definition (everything after the column name). + // Table constraints (CONSTRAINT, PRIMARY KEY, UNIQUE, FOREIGN KEY, CHECK) + // are skipped since they don't define columns. + const result = new Map(); + for (const def of defs) { + // Match: optional quote + column name + optional quote + whitespace + type definition + const nameMatch = def.match(/^(?:["'`]([^"'`]+)["'`]|(\w+))\s+(.+)/s); + if (nameMatch) { + const name = nameMatch[1] || nameMatch[2]; + const typeDef = nameMatch[3]; + // Skip table-level constraints (they start with keywords, not column names, + // but could still match the regex — the map lookup by actual column name + // ensures they never interfere). + if (name && typeDef) { + result.set(name, typeDef.trim()); + } + } + } + return result; +} diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 5ea030a41..487f6a446 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,7 +1,7 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createProject, getDefaultPrelude, runCli } from '../utils'; +import { createProject, getDefaultPrelude, getTestDbName, getTestDbUrl, runCli } from '../utils'; import { formatDocument } from '@zenstackhq/language'; import { getTestDbProvider } from '@zenstackhq/testtools'; @@ -698,6 +698,107 @@ model Tenant { const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(schema); }); + + it('should pull stored generated columns as Unsupported with full expression', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + // PostgreSQL supports GENERATED ALWAYS AS (expr) STORED since PG 12. + // The introspection should include the full generation expression in the + // datatype so it is rendered as Unsupported("type GENERATED ALWAYS AS (expr) STORED"). + + // 1. Create a project with a base table (we need the DB to exist first) + const { workDir } = await createProject( + `model ComputedUsers { + id Int @id @default(autoincrement()) + firstName String + lastName String +}`, + { provider: 'postgresql' }, + ); + runCli('db push', workDir); + + // 2. Add a generated column via raw SQL (can't be defined in ZModel) + const { Client } = await import('pg'); + const dbName = getTestDbName('postgresql'); + const client = new Client({ connectionString: getTestDbUrl('postgresql', dbName) }); + await client.connect(); + try { + await client.query( + `ALTER TABLE "ComputedUsers" ADD COLUMN "fullName" text GENERATED ALWAYS AS ("firstName" || ' ' || "lastName") STORED` + ); + } finally { + await client.end(); + } + + // 3. Pull from zero + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + // The generated column should be pulled as Unsupported with the full expression. + // format_type returns 'text', and pg_get_expr returns the expression. + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude({ provider: 'postgresql' })} + +model ComputedUsers { + id Int @id @default(autoincrement()) + firstName String + lastName String + fullName Unsupported('text GENERATED ALWAYS AS ((("firstName" || \\' \\'::text) || "lastName")) STORED')? +}`)); + }); + + it('should pull virtual generated columns as Unsupported with full expression', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + // PostgreSQL 17+ supports VIRTUAL generated columns. + // For earlier versions, only STORED is supported, so this test may need to be + // adapted. We test STORED here since it's universally supported. + + const { workDir } = await createProject( + `model ComputedProducts { + id Int @id @default(autoincrement()) + price Int @default(0) + qty Int @default(0) +}`, + { provider: 'postgresql' }, + ); + runCli('db push', workDir); + + const { Client } = await import('pg'); + const dbName = getTestDbName('postgresql'); + const client = new Client({ connectionString: getTestDbUrl('postgresql', dbName) }); + await client.connect(); + try { + await client.query( + `ALTER TABLE "ComputedProducts" ADD COLUMN "total" integer GENERATED ALWAYS AS ("price" * "qty") STORED` + ); + } finally { + await client.end(); + } + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude({ provider: 'postgresql' })} + +model ComputedProducts { + id Int @id @default(autoincrement()) + price Int @default(0) + qty Int @default(0) + total Unsupported('integer GENERATED ALWAYS AS ((price * qty)) STORED')? +}`)); + }); }); describe('DB pull - MySQL specific features', () => { @@ -731,6 +832,103 @@ describe('DB pull - MySQL specific features', () => { const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(schema); }); + + it('should pull stored generated columns as Unsupported with full expression', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'mysql') { + skip(); + return; + } + // MySQL supports both VIRTUAL and STORED generated columns. + // The introspection should include the full generation expression in the + // datatype so it is rendered as Unsupported("type GENERATED ALWAYS AS (expr) STORED"). + + // 1. Create a project with a base table (we need the DB to exist first) + const { workDir } = await createProject( + `model ComputedUsers { + id Int @id @default(autoincrement()) + firstName String @db.VarChar(255) + lastName String @db.VarChar(255) +}`, + { provider: 'mysql' }, + ); + runCli('db push', workDir); + + // 2. Add a generated column via raw SQL (can't be defined in ZModel) + const mysql = await import('mysql2/promise'); + const dbName = getTestDbName('mysql'); + const connection = await mysql.createConnection(getTestDbUrl('mysql', dbName)); + try { + await connection.execute( + "ALTER TABLE `ComputedUsers` ADD COLUMN `fullName` varchar(511) GENERATED ALWAYS AS (CONCAT(`firstName`, ' ', `lastName`)) STORED" + ); + } finally { + await connection.end(); + } + + // 3. Pull from zero + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'mysql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + // The generated column should be pulled as Unsupported with the full expression. + // MySQL uses COLUMN_TYPE (e.g., 'varchar(511)') and GENERATION_EXPRESSION for the expr, + // and EXTRA contains 'STORED GENERATED' or 'VIRTUAL GENERATED'. + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude({ provider: 'mysql' })} + +model ComputedUsers { + id Int @id @default(autoincrement()) + firstName String @db.VarChar(255) + lastName String @db.VarChar(255) + fullName Unsupported('varchar(511) GENERATED ALWAYS AS (concat(\`firstName\`,\\' \\',\`lastName\`)) STORED')? +}`)); + }); + + it('should pull virtual generated columns as Unsupported with full expression', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'mysql') { + skip(); + return; + } + + const { workDir } = await createProject( + `model ComputedProducts { + id Int @id @default(autoincrement()) + price Int @default(0) + qty Int @default(0) +}`, + { provider: 'mysql' }, + ); + runCli('db push', workDir); + + const mysql = await import('mysql2/promise'); + const dbName = getTestDbName('mysql'); + const connection = await mysql.createConnection(getTestDbUrl('mysql', dbName)); + try { + await connection.execute( + "ALTER TABLE `ComputedProducts` ADD COLUMN `total` int GENERATED ALWAYS AS (`price` * `qty`) VIRTUAL" + ); + } finally { + await connection.end(); + } + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'mysql' })); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude({ provider: 'mysql' })} + +model ComputedProducts { + id Int @id @default(autoincrement()) + price Int @default(0) + qty Int @default(0) + total Unsupported('int GENERATED ALWAYS AS ((\`price\` * \`qty\`)) VIRTUAL')? +}`)); + }); }); describe('DB pull - SQLite specific features', () => { @@ -805,6 +1003,88 @@ model Tenant { expect(restoredSchema).toContain('data Bytes?'); expect(restoredSchema).not.toContain('Unsupported'); }); + + it('should pull stored generated columns as Unsupported', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'sqlite') { + skip(); + return; + } + // SQLite PRAGMA table_xinfo reports generated columns with hidden values: + // hidden = 2 → VIRTUAL generated column + // hidden = 3 → STORED generated column + // Both types should be pulled as Unsupported("full type definition") + // because generated columns are read-only and cannot be written to. + + const { workDir } = await createProject(''); + + const dbPath = path.join(workDir, 'zenstack', 'test.db'); + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(dbPath); + db.exec(` + CREATE TABLE "ComputedUsers" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "firstName" TEXT NOT NULL, + "lastName" TEXT NOT NULL, + "fullName" TEXT GENERATED ALWAYS AS (firstName || ' ' || lastName) STORED + ) + `); + db.close(); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + // first_name and last_name should be regular String fields + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude()} + +model ComputedUsers { + id Int @id @default(autoincrement()) + firstName String + lastName String + fullName Unsupported('TEXT GENERATED ALWAYS AS (firstName || \\' \\' || lastName) STORED')? +}`)); + }); + + it('should pull virtual generated columns as Unsupported', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'sqlite') { + skip(); + return; + } + + const { workDir } = await createProject(''); + + const dbPath = path.join(workDir, 'zenstack', 'test.db'); + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(dbPath); + db.exec(` + CREATE TABLE "ComputedProducts" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "price" INTEGER NOT NULL DEFAULT 0, + "qty" INTEGER NOT NULL DEFAULT 0, + "total" INTEGER GENERATED ALWAYS AS ("price" * "qty") VIRTUAL + ) + `); + db.close(); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + + expect(restoredSchema).toEqual(await formatDocument(`${getDefaultPrelude()} + +model ComputedProducts { + id Int @id @default(autoincrement()) + price Int @default(0) + qty Int @default(0) + total Unsupported('INTEGER GENERATED ALWAYS AS ("price" * "qty") VIRTUAL')? +}`)); + }); }); describe('DB pull - SQL specific features', () => { diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 2c88aa7db..31a86dfb9 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -20,7 +20,7 @@ const TEST_MYSQL_CONFIG = { password: process.env['TEST_MYSQL_PASSWORD'] ?? 'mysql', }; -function getTestDbName(provider: string) { +export function getTestDbName(provider: string) { if (provider === 'sqlite') { return './test.db'; } @@ -42,6 +42,19 @@ function getTestDbName(provider: string) { ); } +export function getTestDbUrl(provider: 'sqlite' | 'postgresql' | 'mysql', dbName: string): string { + switch (provider) { + case 'sqlite': + return `file:${dbName}`; + case 'postgresql': + return `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + case 'mysql': + return `mysql://${TEST_MYSQL_CONFIG.user}:${TEST_MYSQL_CONFIG.password}@${TEST_MYSQL_CONFIG.host}:${TEST_MYSQL_CONFIG.port}/${dbName}`; + default: + throw new Error(`Unsupported provider: ${provider}`); + } +} + export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', datasourceFields?: Record }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 0b238d60d..5b8373166 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -94,7 +94,7 @@ export class ZModelCodeGenerator { } private quotedStr(val: string): string { - const trimmedVal = val.replace(new RegExp(`${this.quote}`, 'g'), `\\${this.quote}`); + const trimmedVal = val.replace(new RegExp(`(? Date: Sat, 7 Feb 2026 12:18:09 +0100 Subject: [PATCH 82/83] fix(cli): Use parameterized queries for MySQL introspection Switches from template literal interpolation to parameterized queries in MySQL introspection functions. This improves security by preventing potential SQL injection and ensures better handling of database names containing special characters. --- packages/cli/src/actions/pull/provider/mysql.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index e770cb8a5..0329fbc47 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -144,7 +144,7 @@ export const mysql: IntrospectionProvider = { } // Introspect tables - const [tableRows] = (await connection.execute(getTableIntrospectionQuery(databaseName))) as [ + const [tableRows] = (await connection.execute(getTableIntrospectionQuery(), [databaseName])) as [ IntrospectedTable[], unknown, ]; @@ -191,7 +191,7 @@ export const mysql: IntrospectionProvider = { } // Introspect enums (MySQL stores enum values in column definitions) - const [enumRows] = (await connection.execute(getEnumIntrospectionQuery(databaseName))) as [ + const [enumRows] = (await connection.execute(getEnumIntrospectionQuery(), [databaseName])) as [ { table_name: string; column_name: string; column_type: string }[], unknown, ]; @@ -314,7 +314,7 @@ export const mysql: IntrospectionProvider = { }, }; -function getTableIntrospectionQuery(databaseName: string) { +function getTableIntrospectionQuery() { // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG. // MySQL doesn't support multi-schema, so we don't include schema in the result. @@ -520,14 +520,14 @@ FROM INFORMATION_SCHEMA.TABLES t -- Join VIEWS to get VIEW_DEFINITION for view tables LEFT JOIN INFORMATION_SCHEMA.VIEWS v ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME -WHERE t.TABLE_SCHEMA = '${databaseName}' -- only the target database +WHERE t.TABLE_SCHEMA = ? -- only the target database AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table ORDER BY t.TABLE_NAME; `; } -function getEnumIntrospectionQuery(databaseName: string) { +function getEnumIntrospectionQuery() { // MySQL doesn't have standalone enum types like PostgreSQL's CREATE TYPE. // Instead, enum values are embedded in column definitions (e.g., COLUMN_TYPE = "enum('a','b','c')"). // This query finds all enum columns so we can extract their allowed values. @@ -537,7 +537,7 @@ SELECT c.COLUMN_NAME AS column_name, -- column name c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')") FROM INFORMATION_SCHEMA.COLUMNS c -WHERE c.TABLE_SCHEMA = '${databaseName}' -- only the target database +WHERE c.TABLE_SCHEMA = ? -- only the target database AND c.DATA_TYPE = 'enum' -- only enum columns ORDER BY c.TABLE_NAME, c.COLUMN_NAME; `; From a264722aa6ea4c916f61c8bb29a33fc209f423a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sat, 7 Feb 2026 12:34:01 +0100 Subject: [PATCH 83/83] fix(cli): use nullish coalescing for precision check --- packages/cli/src/actions/pull/provider/mysql.ts | 2 +- packages/cli/src/actions/pull/provider/postgresql.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 0329fbc47..895a9cb53 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -300,7 +300,7 @@ export const mysql: IntrospectionProvider = { defaultDatabaseType && (defaultDatabaseType.type !== datatype || (defaultDatabaseType.precision && - defaultDatabaseType.precision !== (length || precision))) + defaultDatabaseType.precision !== (length ?? precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); const sizeValue = length ?? precision; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 205a16f23..bf54e5658 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -329,7 +329,7 @@ export const postgresql: IntrospectionProvider = { defaultDatabaseType && (defaultDatabaseType.type !== normalizedDatatype || (defaultDatabaseType.precision && - defaultDatabaseType.precision !== (length || precision))) + defaultDatabaseType.precision !== (length ?? precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); // Only add length/precision if it's meaningful (not the standard bit width for the type)