diff --git a/src/context/diff-context/diff-context.tsx b/src/context/diff-context/diff-context.tsx index bb153243..191b72da 100644 --- a/src/context/diff-context/diff-context.tsx +++ b/src/context/diff-context/diff-context.tsx @@ -105,6 +105,11 @@ export interface DiffContext { }: { fieldId: string; }) => { old: number; new: number } | null; + getFieldNewIsArray: ({ + fieldId, + }: { + fieldId: string; + }) => { old: boolean; new: boolean } | null; // relationship diff checkIfNewRelationship: ({ diff --git a/src/context/diff-context/diff-provider.tsx b/src/context/diff-context/diff-provider.tsx index 5181234b..59daad00 100644 --- a/src/context/diff-context/diff-provider.tsx +++ b/src/context/diff-context/diff-provider.tsx @@ -447,6 +447,30 @@ export const DiffProvider: React.FC = ({ [diffMap] ); + const getFieldNewIsArray = useCallback( + ({ fieldId }) => { + const fieldKey = getDiffMapKey({ + diffObject: 'field', + objectId: fieldId, + attribute: 'isArray', + }); + + if (diffMap.has(fieldKey)) { + const diff = diffMap.get(fieldKey); + + if (diff?.type === 'changed') { + return { + old: diff.oldValue as boolean, + new: diff.newValue as boolean, + }; + } + } + + return null; + }, + [diffMap] + ); + const checkIfNewRelationship = useCallback< DiffContext['checkIfNewRelationship'] >( @@ -520,6 +544,7 @@ export const DiffProvider: React.FC = ({ getFieldNewCharacterMaximumLength, getFieldNewScale, getFieldNewPrecision, + getFieldNewIsArray, // relationship diff checkIfNewRelationship, diff --git a/src/dialogs/common/import-database/import-database.tsx b/src/dialogs/common/import-database/import-database.tsx index e69a8162..07187a88 100644 --- a/src/dialogs/common/import-database/import-database.tsx +++ b/src/dialogs/common/import-database/import-database.tsx @@ -140,7 +140,7 @@ export const ImportDatabase: React.FC = ({ if (importMethod === 'dbml') { // Validate DBML by parsing it - const validateResponse = verifyDBML(scriptResult); + const validateResponse = verifyDBML(scriptResult, { databaseType }); if (!validateResponse.hasError) { setErrorMessage(''); setSqlValidation({ diff --git a/src/hooks/use-update-table-field.ts b/src/hooks/use-update-table-field.ts index 001844c6..72225c08 100644 --- a/src/hooks/use-update-table-field.ts +++ b/src/hooks/use-update-table-field.ts @@ -1,7 +1,7 @@ import { useCallback, useMemo, useState, useEffect, useRef } from 'react'; import { useChartDB } from './use-chartdb'; import { useDebounce } from './use-debounce-v2'; -import type { DBField, DBTable } from '@/lib/domain'; +import type { DatabaseType, DBField, DBTable } from '@/lib/domain'; import type { SelectBoxOption, SelectBoxProps, @@ -9,49 +9,60 @@ import type { import { dataTypeDataToDataType, sortedDataTypeMap, + supportsArrayDataType, } from '@/lib/data/data-types/data-types'; import { generateDBFieldSuffix } from '@/lib/domain/db-field'; import type { DataTypeData } from '@/lib/data/data-types/data-types'; const generateFieldRegexPatterns = ( - dataType: DataTypeData + dataType: DataTypeData, + databaseType: DatabaseType ): { regex?: string; extractRegex?: RegExp; } => { + const typeName = dataType.name; + const supportsArrays = supportsArrayDataType(dataType.id, databaseType); + const arrayPattern = supportsArrays ? '(\\[\\])?' : ''; + if (!dataType.fieldAttributes) { - return { regex: undefined, extractRegex: undefined }; + // For types without field attributes, support plain type + optional array notation + return { + regex: `^${typeName}${arrayPattern}$`, + extractRegex: new RegExp(`^${typeName}${arrayPattern}$`), + }; } - const typeName = dataType.name; const fieldAttributes = dataType.fieldAttributes; if (fieldAttributes.hasCharMaxLength) { if (fieldAttributes.hasCharMaxLengthOption) { return { - regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`, - extractRegex: /\((\d+|max)\)/i, + regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)${arrayPattern}$`, + extractRegex: supportsArrays + ? /\((\d+|max)\)(\[\])?/i + : /\((\d+|max)\)/i, }; } return { - regex: `^${typeName}\\(\\d+\\)$`, - extractRegex: /\((\d+)\)/, + regex: `^${typeName}\\(\\d+\\)${arrayPattern}$`, + extractRegex: supportsArrays ? /\((\d+)\)(\[\])?/ : /\((\d+)\)/, }; } if (fieldAttributes.precision && fieldAttributes.scale) { return { - regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`, + regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)${arrayPattern}$`, extractRegex: new RegExp( - `${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)` + `${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)${arrayPattern}` ), }; } if (fieldAttributes.precision) { return { - regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`, - extractRegex: /\((\d+)\)/, + regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)${arrayPattern}$`, + extractRegex: supportsArrays ? /\((\d+)\)(\[\])?/ : /\((\d+)\)/, }; } @@ -118,7 +129,10 @@ export const useUpdateTableField = ( const standardTypes: SelectBoxOption[] = sortedDataTypeMap[ databaseType ].map((type) => { - const regexPatterns = generateFieldRegexPatterns(type); + const regexPatterns = generateFieldRegexPatterns( + type, + databaseType + ); return { label: type.name, @@ -162,8 +176,13 @@ export const useUpdateTableField = ( let characterMaximumLength: string | undefined = undefined; let precision: number | undefined = undefined; let scale: number | undefined = undefined; + let isArray: boolean | undefined = undefined; if (regexMatches?.length) { + // Check if the last captured group is the array indicator [] + const lastMatch = regexMatches[regexMatches.length - 1]; + const hasArrayIndicator = lastMatch === '[]'; + if (dataType?.fieldAttributes?.hasCharMaxLength) { characterMaximumLength = regexMatches[1]?.toLowerCase(); } else if ( @@ -177,6 +196,17 @@ export const useUpdateTableField = ( } else if (dataType?.fieldAttributes?.precision) { precision = parseInt(regexMatches[1]); } + + // Set isArray if the array indicator was found and the type supports arrays + if (hasArrayIndicator) { + const typeId = value as string; + if (supportsArrayDataType(typeId, databaseType)) { + isArray = true; + } + } else { + // Explicitly set to false/undefined if no array indicator + isArray = undefined; + } } else { if ( dataType?.fieldAttributes?.hasCharMaxLength && @@ -198,6 +228,7 @@ export const useUpdateTableField = ( characterMaximumLength, precision, scale, + isArray, increment: undefined, default: undefined, type: dataTypeDataToDataType( @@ -299,11 +330,17 @@ export const useUpdateTableField = ( // Utility function to generate field suffix for display const generateFieldSuffix = useCallback( (typeId?: string) => { - return generateDBFieldSuffix(field, { - databaseType, - forceExtended: true, - typeId, - }); + return generateDBFieldSuffix( + { + ...field, + isArray: field.isArray && typeId === field.type.id, + }, + { + databaseType, + forceExtended: true, + typeId, + } + ); }, [field, databaseType] ); diff --git a/src/lib/data/data-types/clickhouse-data-types.ts b/src/lib/data/data-types/clickhouse-data-types.ts index 6f4c2d68..0199ac4f 100644 --- a/src/lib/data/data-types/clickhouse-data-types.ts +++ b/src/lib/data/data-types/clickhouse-data-types.ts @@ -129,9 +129,6 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [ { name: 'enum', id: 'enum' }, { name: 'lowcardinality', id: 'lowcardinality' }, - // Array Type - { name: 'array', id: 'array' }, - // Tuple Type { name: 'tuple', id: 'tuple' }, { name: 'map', id: 'map' }, diff --git a/src/lib/data/data-types/data-types.ts b/src/lib/data/data-types/data-types.ts index e8cd17fa..f44b1910 100644 --- a/src/lib/data/data-types/data-types.ts +++ b/src/lib/data/data-types/data-types.ts @@ -1,5 +1,6 @@ import { z } from 'zod'; import { DatabaseType } from '../../domain/database-type'; +import { databaseSupportsArrays } from '../../domain/database-capabilities'; import { clickhouseDataTypes } from './clickhouse-data-types'; import { genericDataTypes } from './generic-data-types'; import { mariadbDataTypes } from './mariadb-data-types'; @@ -165,3 +166,22 @@ export const supportsAutoIncrementDataType = ( 'decimal', ].includes(dataTypeName.toLocaleLowerCase()); }; + +const ARRAY_INCOMPATIBLE_TYPES = [ + 'serial', + 'bigserial', + 'smallserial', +] as const; + +export const supportsArrayDataType = ( + dataTypeName: string, + databaseType: DatabaseType +): boolean => { + if (!databaseSupportsArrays(databaseType)) { + return false; + } + + return !ARRAY_INCOMPATIBLE_TYPES.includes( + dataTypeName.toLowerCase() as (typeof ARRAY_INCOMPATIBLE_TYPES)[number] + ); +}; diff --git a/src/lib/data/data-types/postgres-data-types.ts b/src/lib/data/data-types/postgres-data-types.ts index 81a68061..65a8c156 100644 --- a/src/lib/data/data-types/postgres-data-types.ts +++ b/src/lib/data/data-types/postgres-data-types.ts @@ -97,7 +97,6 @@ export const postgresDataTypes: readonly DataTypeData[] = [ { name: 'tsvector', id: 'tsvector' }, { name: 'tsquery', id: 'tsquery' }, { name: 'xml', id: 'xml' }, - { name: 'array', id: 'array' }, { name: 'int4range', id: 'int4range' }, { name: 'int8range', id: 'int8range' }, { name: 'numrange', id: 'numrange' }, diff --git a/src/lib/data/sql-export/__tests__/array-fields.test.ts b/src/lib/data/sql-export/__tests__/array-fields.test.ts new file mode 100644 index 00000000..c5d849a2 --- /dev/null +++ b/src/lib/data/sql-export/__tests__/array-fields.test.ts @@ -0,0 +1,356 @@ +import { describe, it, expect } from 'vitest'; +import { generateId } from '@/lib/utils'; +import { exportBaseSQL } from '../export-sql-script'; +import { DatabaseType } from '@/lib/domain/database-type'; +import type { Diagram } from '@/lib/domain/diagram'; + +describe('SQL Export - Array Fields (Fantasy RPG Theme)', () => { + it('should export array fields for magical spell components', () => { + const diagram: Diagram = { + id: 'test-diagram', + name: 'Magical Spell System', + databaseType: DatabaseType.POSTGRESQL, + tables: [ + { + id: generateId(), + name: 'spells', + schema: '', + fields: [ + { + id: generateId(), + name: 'id', + type: { id: 'uuid', name: 'uuid' }, + primaryKey: true, + unique: true, + nullable: false, + createdAt: Date.now(), + }, + { + id: generateId(), + name: 'name', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + characterMaximumLength: '200', + }, + { + id: generateId(), + name: 'components', + type: { id: 'text', name: 'text' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + isArray: true, + comments: 'Magical components needed for the spell', + }, + { + id: generateId(), + name: 'elemental_types', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + characterMaximumLength: '50', + isArray: true, + comments: + 'Elements involved: fire, water, earth, air', + }, + ], + indexes: [], + x: 0, + y: 0, + color: '#3b82f6', + isView: false, + createdAt: Date.now(), + order: 0, + }, + ], + relationships: [], + createdAt: new Date(), + updatedAt: new Date(), + }; + + const sql = exportBaseSQL({ + diagram, + targetDatabaseType: DatabaseType.POSTGRESQL, + isDBMLFlow: true, + }); + + expect(sql).toContain('CREATE TABLE "spells"'); + expect(sql).toContain('"components" text[]'); + expect(sql).toContain('"elemental_types" varchar(50)[]'); + }); + + it('should export array fields for hero inventory system', () => { + const diagram: Diagram = { + id: 'test-diagram', + name: 'RPG Inventory System', + databaseType: DatabaseType.POSTGRESQL, + tables: [ + { + id: generateId(), + name: 'heroes', + schema: 'game', + fields: [ + { + id: generateId(), + name: 'id', + type: { id: 'bigint', name: 'bigint' }, + primaryKey: true, + unique: true, + nullable: false, + createdAt: Date.now(), + }, + { + id: generateId(), + name: 'name', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + characterMaximumLength: '100', + }, + { + id: generateId(), + name: 'abilities', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + characterMaximumLength: '100', + isArray: true, + comments: + 'Special abilities like Stealth, Fireball, etc', + }, + { + id: generateId(), + name: 'inventory_slots', + type: { id: 'integer', name: 'integer' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + isArray: true, + comments: 'Item IDs in inventory', + }, + { + id: generateId(), + name: 'skill_levels', + type: { id: 'decimal', name: 'decimal' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + precision: 5, + scale: 2, + isArray: true, + comments: 'Skill proficiency levels', + }, + ], + indexes: [], + x: 0, + y: 0, + color: '#ef4444', + isView: false, + createdAt: Date.now(), + order: 0, + }, + ], + relationships: [], + createdAt: new Date(), + updatedAt: new Date(), + }; + + const sql = exportBaseSQL({ + diagram, + targetDatabaseType: DatabaseType.POSTGRESQL, + isDBMLFlow: true, + }); + + expect(sql).toContain('CREATE TABLE "game"."heroes"'); + expect(sql).toContain('"abilities" varchar(100)[]'); + expect(sql).toContain('"inventory_slots" integer[]'); + expect(sql).toContain('"skill_levels" decimal(5, 2)[]'); + }); + + it('should export non-array fields normally when isArray is false or undefined', () => { + const diagram: Diagram = { + id: 'test-diagram', + name: 'Quest System', + databaseType: DatabaseType.POSTGRESQL, + tables: [ + { + id: generateId(), + name: 'quests', + schema: '', + fields: [ + { + id: generateId(), + name: 'id', + type: { id: 'uuid', name: 'uuid' }, + primaryKey: true, + unique: true, + nullable: false, + createdAt: Date.now(), + }, + { + id: generateId(), + name: 'title', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + characterMaximumLength: '200', + isArray: false, + }, + { + id: generateId(), + name: 'description', + type: { id: 'text', name: 'text' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + // isArray is undefined - should not be treated as array + }, + ], + indexes: [], + x: 0, + y: 0, + color: '#8b5cf6', + isView: false, + createdAt: Date.now(), + order: 0, + }, + ], + relationships: [], + createdAt: new Date(), + updatedAt: new Date(), + }; + + const sql = exportBaseSQL({ + diagram, + targetDatabaseType: DatabaseType.POSTGRESQL, + isDBMLFlow: true, + }); + + expect(sql).toContain('"title" varchar(200)'); + expect(sql).not.toContain('"title" varchar(200)[]'); + expect(sql).toContain('"description" text'); + expect(sql).not.toContain('"description" text[]'); + }); + + it('should handle mixed array and non-array fields in magical creatures table', () => { + const diagram: Diagram = { + id: 'test-diagram', + name: 'Bestiary System', + databaseType: DatabaseType.POSTGRESQL, + tables: [ + { + id: generateId(), + name: 'magical_creatures', + schema: 'bestiary', + fields: [ + { + id: generateId(), + name: 'id', + type: { id: 'bigint', name: 'bigint' }, + primaryKey: true, + unique: true, + nullable: false, + createdAt: Date.now(), + }, + { + id: generateId(), + name: 'species_name', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + characterMaximumLength: '100', + }, + { + id: generateId(), + name: 'habitats', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + characterMaximumLength: '80', + isArray: true, + comments: + 'Preferred habitats: forest, mountain, swamp', + }, + { + id: generateId(), + name: 'danger_level', + type: { id: 'integer', name: 'integer' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + default: '1', + }, + { + id: generateId(), + name: 'resistances', + type: { id: 'varchar', name: 'varchar' }, + primaryKey: false, + unique: false, + nullable: true, + createdAt: Date.now(), + characterMaximumLength: '50', + isArray: true, + comments: 'Damage resistances', + }, + { + id: generateId(), + name: 'is_tameable', + type: { id: 'boolean', name: 'boolean' }, + primaryKey: false, + unique: false, + nullable: false, + createdAt: Date.now(), + default: 'false', + }, + ], + indexes: [], + x: 0, + y: 0, + color: '#10b981', + isView: false, + createdAt: Date.now(), + order: 0, + }, + ], + relationships: [], + createdAt: new Date(), + updatedAt: new Date(), + }; + + const sql = exportBaseSQL({ + diagram, + targetDatabaseType: DatabaseType.POSTGRESQL, + isDBMLFlow: true, + }); + + expect(sql).toContain('CREATE TABLE "bestiary"."magical_creatures"'); + expect(sql).toContain('"species_name" varchar(100)'); + expect(sql).not.toContain('"species_name" varchar(100)[]'); + expect(sql).toContain('"habitats" varchar(80)[]'); + expect(sql).toContain('"danger_level" integer'); + expect(sql).not.toContain('"danger_level" integer[]'); + expect(sql).toContain('"resistances" varchar(50)[]'); + expect(sql).toContain('"is_tameable" boolean'); + expect(sql).not.toContain('"is_tameable" boolean[]'); + }); +}); diff --git a/src/lib/data/sql-export/export-sql-script.ts b/src/lib/data/sql-export/export-sql-script.ts index d7b5e21d..345041a5 100644 --- a/src/lib/data/sql-export/export-sql-script.ts +++ b/src/lib/data/sql-export/export-sql-script.ts @@ -1,9 +1,6 @@ import type { Diagram } from '../../domain/diagram'; import { OPENAI_API_KEY, OPENAI_API_ENDPOINT, LLM_MODEL_NAME } from '@/lib/env'; -import { - DatabaseType, - databaseTypesWithCommentSupport, -} from '@/lib/domain/database-type'; +import { DatabaseType } from '@/lib/domain/database-type'; import type { DBTable } from '@/lib/domain/db-table'; import { dataTypeMap, type DataType } from '../data-types/data-types'; import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache'; @@ -12,6 +9,7 @@ import { exportPostgreSQL } from './export-per-type/postgresql'; import { exportSQLite } from './export-per-type/sqlite'; import { exportMySQL } from './export-per-type/mysql'; import { escapeSQLComment } from './export-per-type/common'; +import { databaseTypesWithCommentSupport } from '@/lib/domain/database-capabilities'; // Function to format default values with proper quoting const formatDefaultValue = (value: string): string => { @@ -343,6 +341,7 @@ export const exportBaseSQL = ({ } const quotedFieldName = getQuotedFieldName(field.name, isDBMLFlow); + sqlScript += ` ${quotedFieldName} ${typeName}`; // Add size for character types @@ -385,6 +384,11 @@ export const exportBaseSQL = ({ } } + // Add array suffix if field is an array (after type size and precision) + if (field.isArray) { + sqlScript += '[]'; + } + // Handle NOT NULL constraint if (!field.nullable) { sqlScript += ' NOT NULL'; diff --git a/src/lib/dbml/dbml-import/__tests__/dbml-array-fields.test.ts b/src/lib/dbml/dbml-import/__tests__/dbml-array-fields.test.ts new file mode 100644 index 00000000..943858b9 --- /dev/null +++ b/src/lib/dbml/dbml-import/__tests__/dbml-array-fields.test.ts @@ -0,0 +1,317 @@ +import { describe, it, expect } from 'vitest'; +import { importDBMLToDiagram } from '../dbml-import'; +import { generateDBMLFromDiagram } from '../../dbml-export/dbml-export'; +import { DatabaseType } from '@/lib/domain/database-type'; + +describe('DBML Array Fields - Fantasy RPG Theme', () => { + describe('Import - Spell and Magic Arrays', () => { + it('should import spell components as array fields', async () => { + const dbml = ` +Table "magic"."spells" { + "id" uuid [pk, not null] + "name" varchar(200) [not null] + "level" integer [not null] + "components" text[] [note: 'Magical components: bat wing, dragon scale, phoenix feather'] + "elemental_types" varchar(50)[] [note: 'Elements: fire, water, earth, air'] + "mana_cost" integer [not null] + "created_at" timestamp [not null] + + Indexes { + (name, level) [unique, name: "unique_spell"] + } +} +`; + + const result = await importDBMLToDiagram(dbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + expect(result.tables).toHaveLength(1); + + const table = result.tables![0]; + expect(table.name).toBe('spells'); + expect(table.schema).toBe('magic'); + + // Find the array fields + const components = table.fields.find( + (f) => f.name === 'components' + ); + const elementalTypes = table.fields.find( + (f) => f.name === 'elemental_types' + ); + + // Verify they are marked as arrays + expect(components).toBeDefined(); + expect(components?.isArray).toBe(true); + expect(components?.type.name).toBe('text'); + + expect(elementalTypes).toBeDefined(); + expect(elementalTypes?.isArray).toBe(true); + expect(elementalTypes?.type.name).toBe('varchar'); + expect(elementalTypes?.characterMaximumLength).toBe('50'); + + // Verify non-array fields don't have isArray set + const idField = table.fields.find((f) => f.name === 'id'); + expect(idField?.isArray).toBeUndefined(); + }); + + it('should import hero inventory with various array types', async () => { + const dbml = ` +Table "heroes" { + "id" bigint [pk] + "name" varchar(100) [not null] + "abilities" varchar(100)[] + "inventory_slots" integer[] + "skill_levels" decimal(5, 2)[] + "quest_log" text[] +} +`; + + const result = await importDBMLToDiagram(dbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + const table = result.tables![0]; + + const abilities = table.fields.find((f) => f.name === 'abilities'); + expect(abilities?.isArray).toBe(true); + expect(abilities?.type.name).toBe('varchar'); + expect(abilities?.characterMaximumLength).toBe('100'); + + const inventorySlots = table.fields.find( + (f) => f.name === 'inventory_slots' + ); + expect(inventorySlots?.isArray).toBe(true); + expect(inventorySlots?.type.name).toBe('integer'); + + const skillLevels = table.fields.find( + (f) => f.name === 'skill_levels' + ); + expect(skillLevels?.isArray).toBe(true); + expect(skillLevels?.type.name).toBe('decimal'); + expect(skillLevels?.precision).toBe(5); + expect(skillLevels?.scale).toBe(2); + + const questLog = table.fields.find((f) => f.name === 'quest_log'); + expect(questLog?.isArray).toBe(true); + expect(questLog?.type.name).toBe('text'); + }); + + it('should handle mixed array and non-array fields in creature table', async () => { + const dbml = ` +Table "bestiary"."creatures" { + "id" uuid [pk] + "species_name" varchar(100) [not null] + "habitats" varchar(50)[] + "danger_level" integer [not null] + "resistances" varchar(50)[] + "is_tameable" boolean [not null] +} +`; + + const result = await importDBMLToDiagram(dbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + const table = result.tables![0]; + + // Non-array fields + const id = table.fields.find((f) => f.name === 'id'); + expect(id?.isArray).toBeUndefined(); + + const speciesName = table.fields.find( + (f) => f.name === 'species_name' + ); + expect(speciesName?.isArray).toBeUndefined(); + + const dangerLevel = table.fields.find( + (f) => f.name === 'danger_level' + ); + expect(dangerLevel?.isArray).toBeUndefined(); + + // Array fields + const habitats = table.fields.find((f) => f.name === 'habitats'); + expect(habitats?.isArray).toBe(true); + + const resistances = table.fields.find( + (f) => f.name === 'resistances' + ); + expect(resistances?.isArray).toBe(true); + }); + }); + + describe('Round-trip - Quest and Adventure Arrays', () => { + it('should preserve quest rewards array through export and re-import', async () => { + const originalDbml = ` +Table "adventures"."quests" { + "id" uuid [pk, not null] + "title" varchar(200) [not null] + "difficulty" varchar(20) [not null] + "reward_items" text[] [note: 'Legendary sword, enchanted armor, healing potion'] + "required_skills" varchar(100)[] + "experience_points" integer [not null] + "gold_reward" decimal(10, 2) [not null] + "created_at" timestamp [not null] + + Indexes { + (title, difficulty) [unique, name: "unique_quest"] + } +} +`; + + // Import the DBML + const diagram = await importDBMLToDiagram(originalDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + // Verify array fields were imported correctly + const table = diagram.tables![0]; + const rewardItems = table.fields.find( + (f) => f.name === 'reward_items' + ); + const requiredSkills = table.fields.find( + (f) => f.name === 'required_skills' + ); + + expect(rewardItems?.isArray).toBe(true); + expect(requiredSkills?.isArray).toBe(true); + + // Export back to DBML + const { standardDbml: exportedDbml } = + generateDBMLFromDiagram(diagram); + + // Verify the exported DBML contains array syntax + expect(exportedDbml).toContain('text[]'); + expect(exportedDbml).toContain('"reward_items" text[]'); + expect(exportedDbml).toContain('"required_skills" varchar(100)[]'); + + // Re-import the exported DBML + const reimportedDiagram = await importDBMLToDiagram(exportedDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + // Verify array fields are still marked as arrays + const reimportedTable = reimportedDiagram.tables![0]; + const reimportedRewards = reimportedTable.fields.find( + (f) => f.name === 'reward_items' + ); + const reimportedSkills = reimportedTable.fields.find( + (f) => f.name === 'required_skills' + ); + + expect(reimportedRewards?.isArray).toBe(true); + expect(reimportedSkills?.isArray).toBe(true); + }); + + it('should handle guild members with different array types in round-trip', async () => { + const originalDbml = ` +Table "guilds"."members" { + "id" uuid [pk] + "name" varchar(100) [not null] + "class_specializations" varchar(50)[] + "completed_quest_ids" integer[] + "skill_ratings" decimal(3, 1)[] + "titles_earned" text[] +} +`; + + // Import + const diagram = await importDBMLToDiagram(originalDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + // Export + const { standardDbml: exportedDbml } = + generateDBMLFromDiagram(diagram); + + // Verify exported DBML has correct array syntax with types + expect(exportedDbml).toContain('varchar(50)[]'); + expect(exportedDbml).toContain('integer[]'); + expect(exportedDbml).toContain('decimal(3,1)[]'); + expect(exportedDbml).toContain('text[]'); + + // Re-import + const reimportedDiagram = await importDBMLToDiagram(exportedDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + const table = reimportedDiagram.tables![0]; + + const classSpecs = table.fields.find( + (f) => f.name === 'class_specializations' + ); + expect(classSpecs?.isArray).toBe(true); + expect(classSpecs?.characterMaximumLength).toBe('50'); + + const questIds = table.fields.find( + (f) => f.name === 'completed_quest_ids' + ); + expect(questIds?.isArray).toBe(true); + + const skillRatings = table.fields.find( + (f) => f.name === 'skill_ratings' + ); + expect(skillRatings?.isArray).toBe(true); + expect(skillRatings?.precision).toBe(3); + expect(skillRatings?.scale).toBe(1); + + const titles = table.fields.find((f) => f.name === 'titles_earned'); + expect(titles?.isArray).toBe(true); + }); + + it('should preserve dungeon loot tables with mixed array and non-array fields', async () => { + const originalDbml = ` +Table "dungeons"."loot_tables" { + "id" bigint [pk] + "dungeon_name" varchar(150) [not null] + "boss_name" varchar(100) + "common_drops" text[] + "rare_drops" text[] + "legendary_drops" text[] + "gold_range_min" integer [not null] + "gold_range_max" integer [not null] + "drop_rates" decimal(5, 2)[] +} +`; + + // Import, export, and re-import + const diagram = await importDBMLToDiagram(originalDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + const { standardDbml: exportedDbml } = + generateDBMLFromDiagram(diagram); + + const reimportedDiagram = await importDBMLToDiagram(exportedDbml, { + databaseType: DatabaseType.POSTGRESQL, + }); + + const table = reimportedDiagram.tables![0]; + + // Verify non-array fields + expect( + table.fields.find((f) => f.name === 'id')?.isArray + ).toBeUndefined(); + expect( + table.fields.find((f) => f.name === 'dungeon_name')?.isArray + ).toBeUndefined(); + expect( + table.fields.find((f) => f.name === 'gold_range_min')?.isArray + ).toBeUndefined(); + + // Verify array fields + expect( + table.fields.find((f) => f.name === 'common_drops')?.isArray + ).toBe(true); + expect( + table.fields.find((f) => f.name === 'rare_drops')?.isArray + ).toBe(true); + expect( + table.fields.find((f) => f.name === 'legendary_drops')?.isArray + ).toBe(true); + expect( + table.fields.find((f) => f.name === 'drop_rates')?.isArray + ).toBe(true); + }); + }); +}); diff --git a/src/lib/dbml/dbml-import/__tests__/dbml-import-fantasy-examples.test.ts b/src/lib/dbml/dbml-import/__tests__/dbml-import-fantasy-examples.test.ts index c81be0a1..1393db70 100644 --- a/src/lib/dbml/dbml-import/__tests__/dbml-import-fantasy-examples.test.ts +++ b/src/lib/dbml/dbml-import/__tests__/dbml-import-fantasy-examples.test.ts @@ -1,6 +1,7 @@ import { describe, it, expect } from 'vitest'; import { importDBMLToDiagram } from '../dbml-import'; import { DBCustomTypeKind } from '@/lib/domain/db-custom-type'; +import { DatabaseType } from '@/lib/domain/database-type'; describe('DBML Import - Fantasy Examples', () => { describe('Magical Academy System', () => { @@ -149,7 +150,9 @@ Table ranks { max_spell_level integer [not null] }`; - const diagram = await importDBMLToDiagram(magicalAcademyDBML); + const diagram = await importDBMLToDiagram(magicalAcademyDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify tables expect(diagram.tables).toHaveLength(8); @@ -366,7 +369,9 @@ Note marketplace_note { 'This marketplace handles both standard purchases and barter trades' }`; - const diagram = await importDBMLToDiagram(marketplaceDBML); + const diagram = await importDBMLToDiagram(marketplaceDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify tables expect(diagram.tables).toHaveLength(7); @@ -567,7 +572,9 @@ Note quest_system_note { 'Quest difficulty and status use enums that will be converted to varchar' }`; - const diagram = await importDBMLToDiagram(questSystemDBML); + const diagram = await importDBMLToDiagram(questSystemDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify tables expect(diagram.tables).toHaveLength(7); @@ -657,7 +664,9 @@ Table projects { priority enum // inline enum without values - will be converted to varchar }`; - const diagram = await importDBMLToDiagram(dbmlWithEnums); + const diagram = await importDBMLToDiagram(dbmlWithEnums, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify customTypes are created for enums expect(diagram.customTypes).toBeDefined(); @@ -744,7 +753,9 @@ Table orders { status order_status [not null] }`; - const diagram = await importDBMLToDiagram(dbmlWithEnumNotes); + const diagram = await importDBMLToDiagram(dbmlWithEnumNotes, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify enum is created expect(diagram.customTypes).toHaveLength(1); @@ -788,7 +799,9 @@ Table admin.users { status admin.status }`; - const diagram = await importDBMLToDiagram(dbmlWithSameEnumNames); + const diagram = await importDBMLToDiagram(dbmlWithSameEnumNames, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify both enums are created expect(diagram.customTypes).toHaveLength(2); @@ -891,7 +904,9 @@ Note dragon_note { 'Dragons are very protective of their hoards!' }`; - const diagram = await importDBMLToDiagram(edgeCaseDBML); + const diagram = await importDBMLToDiagram(edgeCaseDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify preprocessing worked expect(diagram.tables).toHaveLength(2); @@ -956,7 +971,9 @@ Note dragon_note { it('should handle empty DBML gracefully', async () => { const emptyDBML = ''; - const diagram = await importDBMLToDiagram(emptyDBML); + const diagram = await importDBMLToDiagram(emptyDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(0); expect(diagram.relationships).toHaveLength(0); @@ -969,7 +986,9 @@ Note dragon_note { /* Multi-line comment */ `; - const diagram = await importDBMLToDiagram(commentOnlyDBML); + const diagram = await importDBMLToDiagram(commentOnlyDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(0); expect(diagram.relationships).toHaveLength(0); @@ -980,7 +999,9 @@ Note dragon_note { Table empty_table { id int }`; - const diagram = await importDBMLToDiagram(minimalDBML); + const diagram = await importDBMLToDiagram(minimalDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(1); expect(diagram.tables?.[0]?.fields).toHaveLength(1); @@ -996,7 +1017,9 @@ Table "aa"."users" { Table "bb"."users" { id integer [primary key] }`; - const diagram = await importDBMLToDiagram(dbml); + const diagram = await importDBMLToDiagram(dbml, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(2); @@ -1071,7 +1094,9 @@ Table "public_3"."comments" { id [unique, name: "public_3_index_1"] } }`; - const diagram = await importDBMLToDiagram(dbml); + const diagram = await importDBMLToDiagram(dbml, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify tables expect(diagram.tables).toHaveLength(3); @@ -1256,7 +1281,9 @@ Table products { Note: 'This table stores product information' }`; - const diagram = await importDBMLToDiagram(dbmlWithTableNote); + const diagram = await importDBMLToDiagram(dbmlWithTableNote, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(1); const productsTable = diagram.tables?.[0]; @@ -1273,7 +1300,9 @@ Table orders { total numeric(10,2) [note: 'Order total including tax'] }`; - const diagram = await importDBMLToDiagram(dbmlWithFieldNote); + const diagram = await importDBMLToDiagram(dbmlWithFieldNote, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(1); const ordersTable = diagram.tables?.[0]; diff --git a/src/lib/dbml/dbml-import/__tests__/dbml-import.test.ts b/src/lib/dbml/dbml-import/__tests__/dbml-import.test.ts index 694853b8..850d6f77 100644 --- a/src/lib/dbml/dbml-import/__tests__/dbml-import.test.ts +++ b/src/lib/dbml/dbml-import/__tests__/dbml-import.test.ts @@ -5,6 +5,7 @@ import { importDBMLToDiagram, } from '../dbml-import'; import { Parser } from '@dbml/core'; +import { DatabaseType } from '@/lib/domain/database-type'; describe('DBML Import', () => { describe('preprocessDBML', () => { @@ -22,7 +23,7 @@ TableGroup "Test Group" [color: #CA4243] { Table posts { id int }`; - const result = preprocessDBML(dbml); + const { content: result } = preprocessDBML(dbml); expect(result).not.toContain('TableGroup'); expect(result).toContain('Table users'); expect(result).toContain('Table posts'); @@ -37,20 +38,20 @@ Table users { Note note_test { 'This is a note' }`; - const result = preprocessDBML(dbml); + const { content: result } = preprocessDBML(dbml); expect(result).not.toContain('Note'); expect(result).toContain('Table users'); }); - it('should convert array types to text', () => { + it('should remove array syntax while preserving base type', () => { const dbml = ` Table users { tags text[] domains varchar[] }`; - const result = preprocessDBML(dbml); + const { content: result } = preprocessDBML(dbml); expect(result).toContain('tags text'); - expect(result).toContain('domains text'); + expect(result).toContain('domains varchar'); expect(result).not.toContain('[]'); }); @@ -60,7 +61,7 @@ Table users { status enum verification_type enum // comment here }`; - const result = preprocessDBML(dbml); + const { content: result } = preprocessDBML(dbml); expect(result).toContain('status varchar'); expect(result).toContain('verification_type varchar'); expect(result).not.toContain('enum'); @@ -71,7 +72,7 @@ Table users { Table users [headercolor: #24BAB1] { id int }`; - const result = preprocessDBML(dbml); + const { content: result } = preprocessDBML(dbml); expect(result).toContain('Table users {'); expect(result).not.toContain('headercolor'); }); @@ -105,7 +106,9 @@ Note note_test { 'This is a test note' }`; - const diagram = await importDBMLToDiagram(complexDBML); + const diagram = await importDBMLToDiagram(complexDBML, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(2); expect(diagram.relationships).toHaveLength(1); @@ -149,7 +152,7 @@ Note note_1750185617764 { }`; // Test that preprocessing handles all issues - const preprocessed = preprocessDBML(problematicDBML); + const { content: preprocessed } = preprocessDBML(problematicDBML); const sanitized = sanitizeDBML(preprocessed); // Should not throw diff --git a/src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts b/src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts index fdda08e9..aa82b397 100644 --- a/src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts +++ b/src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts @@ -38,7 +38,9 @@ Note test_note { 'This is a test note' }`; - const diagram = await importDBMLToDiagram(dbmlContent); + const diagram = await importDBMLToDiagram(dbmlContent, { + databaseType: DatabaseType.POSTGRESQL, + }); // Verify basic structure expect(diagram).toBeDefined(); @@ -96,7 +98,9 @@ Table products [headercolor: #FF0000] { Ref: products.id < users.favorite_product_id`; - const diagram = await importDBMLToDiagram(dbmlContent); + const diagram = await importDBMLToDiagram(dbmlContent, { + databaseType: DatabaseType.POSTGRESQL, + }); expect(diagram.tables).toHaveLength(2); @@ -119,12 +123,16 @@ Ref: products.id < users.favorite_product_id`; it('should handle empty or invalid DBML gracefully', async () => { // Empty DBML - const emptyDiagram = await importDBMLToDiagram(''); + const emptyDiagram = await importDBMLToDiagram('', { + databaseType: DatabaseType.POSTGRESQL, + }); expect(emptyDiagram.tables).toHaveLength(0); expect(emptyDiagram.relationships).toHaveLength(0); // Only comments - const commentDiagram = await importDBMLToDiagram('// Just a comment'); + const commentDiagram = await importDBMLToDiagram('// Just a comment', { + databaseType: DatabaseType.POSTGRESQL, + }); expect(commentDiagram.tables).toHaveLength(0); expect(commentDiagram.relationships).toHaveLength(0); }); @@ -133,7 +141,9 @@ Ref: products.id < users.favorite_product_id`; const dbmlContent = `Table test { id int [pk] }`; - const diagram = await importDBMLToDiagram(dbmlContent); + const diagram = await importDBMLToDiagram(dbmlContent, { + databaseType: DatabaseType.GENERIC, + }); // Default values expect(diagram.name).toBe('DBML Import'); diff --git a/src/lib/dbml/dbml-import/dbml-import-error.ts b/src/lib/dbml/dbml-import/dbml-import-error.ts index f6286f43..a13ec284 100644 --- a/src/lib/dbml/dbml-import/dbml-import-error.ts +++ b/src/lib/dbml/dbml-import/dbml-import-error.ts @@ -1,4 +1,6 @@ import type { CompilerError } from '@dbml/core/types/parse/error'; +import type { DatabaseType } from '@/lib/domain/database-type'; +import { databaseSupportsArrays } from '@/lib/domain/database-capabilities'; export interface DBMLError { message: string; @@ -6,8 +8,59 @@ export interface DBMLError { column: number; } +export class DBMLValidationError extends Error { + public readonly dbmlError: DBMLError; + + constructor(message: string, line: number, column: number = 1) { + super(message); + this.name = 'DBMLValidationError'; + this.dbmlError = { message, line, column }; + } +} + +export const getPositionFromIndex = ( + content: string, + matchIndex: number +): { line: number; column: number } => { + const lines = content.substring(0, matchIndex).split('\n'); + return { + line: lines.length, + column: lines[lines.length - 1].length + 1, + }; +}; + +export const validateArrayTypesForDatabase = ( + content: string, + databaseType: DatabaseType +): void => { + // Only validate if database doesn't support arrays + if (databaseSupportsArrays(databaseType)) { + return; + } + + const arrayFieldPattern = /"?(\w+)"?\s+(\w+(?:\(\d+(?:,\s*\d+)?\))?)\[\]/g; + const matches = [...content.matchAll(arrayFieldPattern)]; + + for (const match of matches) { + const fieldName = match[1]; + const dataType = match[2]; + const { line, column } = getPositionFromIndex(content, match.index!); + + throw new DBMLValidationError( + `Array types are not supported for ${databaseType} database. Field "${fieldName}" has array type "${dataType}[]" which is not allowed.`, + line, + column + ); + } +}; + export function parseDBMLError(error: unknown): DBMLError | null { try { + // Check for our custom DBMLValidationError + if (error instanceof DBMLValidationError) { + return error.dbmlError; + } + if (typeof error === 'string') { const parsed = JSON.parse(error); if (parsed.diags?.[0]) { diff --git a/src/lib/dbml/dbml-import/dbml-import.ts b/src/lib/dbml/dbml-import/dbml-import.ts index b9c7b33d..95c9711f 100644 --- a/src/lib/dbml/dbml-import/dbml-import.ts +++ b/src/lib/dbml/dbml-import/dbml-import.ts @@ -14,13 +14,21 @@ import { DBCustomTypeKind, type DBCustomType, } from '@/lib/domain/db-custom-type'; +import { validateArrayTypesForDatabase } from './dbml-import-error'; export const defaultDBMLDiagramName = 'DBML Import'; -// Preprocess DBML to handle unsupported features -export const preprocessDBML = (content: string): string => { +interface PreprocessDBMLResult { + content: string; + arrayFields: Map>; +} + +export const preprocessDBML = (content: string): PreprocessDBMLResult => { let processed = content; + // Track array fields found during preprocessing + const arrayFields = new Map>(); + // Remove TableGroup blocks (not supported by parser) processed = processed.replace(/TableGroup\s+[^{]*\{[^}]*\}/gs, ''); @@ -30,8 +38,37 @@ export const preprocessDBML = (content: string): string => { // Don't remove enum definitions - we'll parse them // processed = processed.replace(/enum\s+\w+\s*\{[^}]*\}/gs, ''); - // Handle array types by converting them to text - processed = processed.replace(/(\w+)\[\]/g, 'text'); + // Handle array types by tracking them and converting syntax for DBML parser + // Note: DBML doesn't officially support array syntax, so we convert type[] to type + // but track which fields should be arrays + + // First, find all array field declarations and track them + const tablePattern = + /Table\s+(?:"([^"]+)"\.)?(?:"([^"]+)"|(\w+))\s*(?:\[[^\]]*\])?\s*\{([^}]+)\}/gs; + let match; + + while ((match = tablePattern.exec(content)) !== null) { + const schema = match[1] || ''; + const tableName = match[2] || match[3]; + const tableBody = match[4]; + const fullTableName = schema ? `${schema}.${tableName}` : tableName; + + // Find array field declarations within this table + const fieldPattern = /"?(\w+)"?\s+(\w+(?:\([^)]+\))?)\[\]/g; + let fieldMatch; + + while ((fieldMatch = fieldPattern.exec(tableBody)) !== null) { + const fieldName = fieldMatch[1]; + + if (!arrayFields.has(fullTableName)) { + arrayFields.set(fullTableName, new Set()); + } + arrayFields.get(fullTableName)!.add(fieldName); + } + } + + // Now convert array syntax for DBML parser (keep the base type, remove []) + processed = processed.replace(/(\w+(?:\(\d+(?:,\s*\d+)?\))?)\[\]/g, '$1'); // Handle inline enum types without values by converting to varchar processed = processed.replace( @@ -46,7 +83,7 @@ export const preprocessDBML = (content: string): string => { 'Table $1 {' ); - return processed; + return { content: processed, arrayFields }; }; // Simple function to replace Spanish special characters @@ -85,6 +122,7 @@ interface DBMLField { pk?: boolean; not_null?: boolean; increment?: boolean; + isArray?: boolean; characterMaximumLength?: string | null; precision?: number | null; scale?: number | null; @@ -190,8 +228,8 @@ const determineCardinality = ( export const importDBMLToDiagram = async ( dbmlContent: string, - options?: { - databaseType?: DatabaseType; + options: { + databaseType: DatabaseType; } ): Promise => { try { @@ -208,9 +246,13 @@ export const importDBMLToDiagram = async ( }; } + // Validate array types BEFORE preprocessing (preprocessing removes []) + validateArrayTypesForDatabase(dbmlContent, options.databaseType); + const parser = new Parser(); // Preprocess and sanitize DBML content - const preprocessedContent = preprocessDBML(dbmlContent); + const { content: preprocessedContent, arrayFields } = + preprocessDBML(dbmlContent); const sanitizedContent = sanitizeDBML(preprocessedContent); // Handle content that becomes empty after preprocessing @@ -344,11 +386,24 @@ export const importDBMLToDiagram = async ( const rawDefault = String( field.dbdefault.value ); - // Remove ALL quotes (single, double, backticks) to clean the value - // The SQL export layer will handle adding proper quotes when needed defaultValue = rawDefault.replace(/['"`]/g, ''); } + // Check if this field should be an array + const fullTableName = schemaName + ? `${schemaName}.${table.name}` + : table.name; + + let isArray = arrayFields + .get(fullTableName) + ?.has(field.name); + + if (!isArray && schemaName) { + isArray = arrayFields + .get(table.name) + ?.has(field.name); + } + return { name: field.name, type: field.type, @@ -356,6 +411,7 @@ export const importDBMLToDiagram = async ( pk: field.pk, not_null: field.not_null, increment: field.increment, + isArray: isArray || undefined, note: field.note, default: defaultValue, ...getFieldExtraAttributes(field, allEnums), @@ -503,6 +559,8 @@ export const importDBMLToDiagram = async ( characterMaximumLength: field.characterMaximumLength, precision: field.precision, scale: field.scale, + ...(field.increment ? { increment: field.increment } : {}), + ...(field.isArray ? { isArray: field.isArray } : {}), ...(fieldComment ? { comments: fieldComment } : {}), ...(field.default ? { default: field.default } : {}), }; diff --git a/src/lib/dbml/dbml-import/verify-dbml.ts b/src/lib/dbml/dbml-import/verify-dbml.ts index 765b6183..9bada183 100644 --- a/src/lib/dbml/dbml-import/verify-dbml.ts +++ b/src/lib/dbml/dbml-import/verify-dbml.ts @@ -1,10 +1,19 @@ import { Parser } from '@dbml/core'; import { preprocessDBML, sanitizeDBML } from './dbml-import'; import type { DBMLError } from './dbml-import-error'; -import { parseDBMLError } from './dbml-import-error'; +import { + parseDBMLError, + validateArrayTypesForDatabase, +} from './dbml-import-error'; +import type { DatabaseType } from '@/lib/domain/database-type'; export const verifyDBML = ( - content: string + content: string, + { + databaseType, + }: { + databaseType: DatabaseType; + } ): | { hasError: true; @@ -16,8 +25,12 @@ export const verifyDBML = ( hasError: false; } => { try { - const preprocessedContent = preprocessDBML(content); + // Validate array types BEFORE preprocessing (preprocessing removes []) + validateArrayTypesForDatabase(content, databaseType); + + const { content: preprocessedContent } = preprocessDBML(content); const sanitizedContent = sanitizeDBML(preprocessedContent); + const parser = new Parser(); parser.parse(sanitizedContent, 'dbmlv2'); } catch (e) { diff --git a/src/lib/domain/database-capabilities.ts b/src/lib/domain/database-capabilities.ts new file mode 100644 index 00000000..8fad91bb --- /dev/null +++ b/src/lib/domain/database-capabilities.ts @@ -0,0 +1,57 @@ +import { DatabaseType } from './database-type'; + +export interface DatabaseCapabilities { + supportsArrays?: boolean; + supportsCustomTypes?: boolean; + supportsSchemas?: boolean; + supportsComments?: boolean; +} + +export const DATABASE_CAPABILITIES: Record = + { + [DatabaseType.POSTGRESQL]: { + supportsArrays: true, + supportsCustomTypes: true, + supportsSchemas: true, + supportsComments: true, + }, + [DatabaseType.COCKROACHDB]: { + supportsArrays: true, + supportsSchemas: true, + supportsComments: true, + }, + [DatabaseType.MYSQL]: {}, + [DatabaseType.MARIADB]: {}, + [DatabaseType.SQL_SERVER]: { + supportsSchemas: true, + }, + [DatabaseType.SQLITE]: {}, + [DatabaseType.CLICKHOUSE]: { + supportsSchemas: true, + }, + [DatabaseType.ORACLE]: { + supportsSchemas: true, + supportsComments: true, + }, + [DatabaseType.GENERIC]: {}, + }; + +export const getDatabaseCapabilities = ( + databaseType: DatabaseType +): DatabaseCapabilities => { + return DATABASE_CAPABILITIES[databaseType]; +}; + +export const databaseSupportsArrays = (databaseType: DatabaseType): boolean => { + return getDatabaseCapabilities(databaseType).supportsArrays ?? false; +}; + +export const databaseTypesWithCommentSupport: DatabaseType[] = Object.keys( + DATABASE_CAPABILITIES +).filter( + (dbType) => DATABASE_CAPABILITIES[dbType as DatabaseType].supportsComments +) as DatabaseType[]; + +export const supportsCustomTypes = (databaseType: DatabaseType): boolean => { + return getDatabaseCapabilities(databaseType).supportsCustomTypes ?? false; +}; diff --git a/src/lib/domain/database-type.ts b/src/lib/domain/database-type.ts index 426cd6da..5168cdfe 100644 --- a/src/lib/domain/database-type.ts +++ b/src/lib/domain/database-type.ts @@ -9,9 +9,3 @@ export enum DatabaseType { COCKROACHDB = 'cockroachdb', ORACLE = 'oracle', } - -export const databaseTypesWithCommentSupport: DatabaseType[] = [ - DatabaseType.POSTGRESQL, - DatabaseType.COCKROACHDB, - DatabaseType.ORACLE, -]; diff --git a/src/lib/domain/db-field.ts b/src/lib/domain/db-field.ts index 042530e3..3f7ef153 100644 --- a/src/lib/domain/db-field.ts +++ b/src/lib/domain/db-field.ts @@ -2,9 +2,10 @@ import { z } from 'zod'; import { dataTypeSchema, findDataTypeDataById, + supportsArrayDataType, type DataType, } from '../data/data-types/data-types'; -import type { DatabaseType } from './database-type'; +import { DatabaseType } from './database-type'; export interface DBField { id: string; @@ -14,6 +15,7 @@ export interface DBField { unique: boolean; nullable: boolean; increment?: boolean | null; + isArray?: boolean | null; createdAt: number; characterMaximumLength?: string | null; precision?: number | null; @@ -31,6 +33,7 @@ export const dbFieldSchema: z.ZodType = z.object({ unique: z.boolean(), nullable: z.boolean(), increment: z.boolean().or(z.null()).optional(), + isArray: z.boolean().or(z.null()).optional(), createdAt: z.number(), characterMaximumLength: z.string().or(z.null()).optional(), precision: z.number().or(z.null()).optional(), @@ -52,11 +55,26 @@ export const generateDBFieldSuffix = ( typeId?: string; } = {} ): string => { + let suffix = ''; + if (databaseType && forceExtended && typeId) { - return generateExtendedSuffix(field, databaseType, typeId); + suffix = generateExtendedSuffix(field, databaseType, typeId); + } else { + suffix = generateStandardSuffix(field); } - return generateStandardSuffix(field); + // Add array notation if field is an array + if ( + field.isArray && + supportsArrayDataType( + typeId ?? field.type.id, + databaseType ?? DatabaseType.GENERIC + ) + ) { + suffix += '[]'; + } + + return suffix; }; const generateExtendedSuffix = ( diff --git a/src/lib/domain/db-schema.ts b/src/lib/domain/db-schema.ts index e1fe87f9..3cafa895 100644 --- a/src/lib/domain/db-schema.ts +++ b/src/lib/domain/db-schema.ts @@ -1,4 +1,5 @@ -import { DatabaseType } from './database-type'; +import { DATABASE_CAPABILITIES } from './database-capabilities'; +import type { DatabaseType } from './database-type'; export interface DBSchema { id: string; @@ -18,10 +19,8 @@ export const schemaNameToDomainSchemaName = ( ? undefined : schema?.trim(); -export const databasesWithSchemas: DatabaseType[] = [ - DatabaseType.POSTGRESQL, - DatabaseType.SQL_SERVER, - DatabaseType.CLICKHOUSE, - DatabaseType.COCKROACHDB, - DatabaseType.ORACLE, -]; +export const databasesWithSchemas: DatabaseType[] = Object.keys( + DATABASE_CAPABILITIES +).filter( + (dbType) => DATABASE_CAPABILITIES[dbType as DatabaseType].supportsSchemas +) as DatabaseType[]; diff --git a/src/lib/domain/diff/diff-check/diff-check.ts b/src/lib/domain/diff/diff-check/diff-check.ts index 62c949e4..e82658ca 100644 --- a/src/lib/domain/diff/diff-check/diff-check.ts +++ b/src/lib/domain/diff/diff-check/diff-check.ts @@ -28,6 +28,16 @@ export function getDiffMapKey({ : `${diffObject}-${objectId}`; } +const isOneOfDefined = ( + ...values: (string | number | boolean | undefined | null)[] +): boolean => { + return values.some((value) => value !== undefined && value !== null); +}; + +const normalizeBoolean = (value: boolean | undefined | null): boolean => { + return value === true; +}; + export interface GenerateDiffOptions { includeTables?: boolean; includeFields?: boolean; @@ -552,6 +562,8 @@ function compareFieldProperties({ 'characterMaximumLength', 'scale', 'precision', + 'increment', + 'isArray', ]; const changedAttributes: FieldDiffAttribute[] = []; @@ -620,6 +632,24 @@ function compareFieldProperties({ changedAttributes.push('precision'); } + if ( + attributesToCheck.includes('increment') && + isOneOfDefined(newField.increment, oldField.increment) && + normalizeBoolean(oldField.increment) !== + normalizeBoolean(newField.increment) + ) { + changedAttributes.push('increment'); + } + + if ( + attributesToCheck.includes('isArray') && + isOneOfDefined(newField.isArray, oldField.isArray) && + normalizeBoolean(oldField.isArray) !== + normalizeBoolean(newField.isArray) + ) { + changedAttributes.push('isArray'); + } + if (changedAttributes.length > 0) { for (const attribute of changedAttributes) { diffMap.set( diff --git a/src/lib/domain/diff/field-diff.ts b/src/lib/domain/diff/field-diff.ts index b7fc20e1..93d4ad23 100644 --- a/src/lib/domain/diff/field-diff.ts +++ b/src/lib/domain/diff/field-diff.ts @@ -15,7 +15,9 @@ export type FieldDiffAttribute = | 'comments' | 'characterMaximumLength' | 'precision' - | 'scale'; + | 'scale' + | 'increment' + | 'isArray'; export const fieldDiffAttributeSchema: z.ZodType = z.union([ z.literal('name'), diff --git a/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode-field.tsx b/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode-field.tsx index 3a75054d..69778c22 100644 --- a/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode-field.tsx +++ b/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode-field.tsx @@ -2,7 +2,7 @@ import React, { useEffect } from 'react'; import { KeyRound, Trash2 } from 'lucide-react'; import { Input } from '@/components/input/input'; import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field'; -import type { DBTable } from '@/lib/domain'; +import type { DatabaseType, DBTable } from '@/lib/domain'; import { useUpdateTableField } from '@/hooks/use-update-table-field'; import { Tooltip, @@ -18,10 +18,11 @@ export interface TableEditModeFieldProps { table: DBTable; field: DBField; focused?: boolean; + databaseType: DatabaseType; } export const TableEditModeField: React.FC = React.memo( - ({ table, field, focused = false }) => { + ({ table, field, focused = false, databaseType }) => { const { t } = useTranslation(); const [showHighlight, setShowHighlight] = React.useState(false); @@ -102,7 +103,9 @@ export const TableEditModeField: React.FC = React.memo( 'side_panel.tables_section.table.field_type' )} value={field.type.id} - valueSuffix={generateDBFieldSuffix(field)} + valueSuffix={generateDBFieldSuffix(field, { + databaseType, + })} optionSuffix={(option) => generateFieldSuffix(option.value) } @@ -119,9 +122,9 @@ export const TableEditModeField: React.FC = React.memo( {field.type.name} - {field.characterMaximumLength - ? `(${field.characterMaximumLength})` - : ''} + {generateDBFieldSuffix(field, { + databaseType, + })} diff --git a/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode.tsx b/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode.tsx index c62f4ce3..08073007 100644 --- a/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode.tsx +++ b/src/pages/editor-page/canvas/table-node/table-edit-mode/table-edit-mode.tsx @@ -300,6 +300,7 @@ export const TableEditMode: React.FC = React.memo( table={table} field={field} focused={focusFieldId === field.id} + databaseType={databaseType} /> ))} diff --git a/src/pages/editor-page/canvas/table-node/table-node-field.tsx b/src/pages/editor-page/canvas/table-node/table-node-field.tsx index 8d54ee96..d9f420b4 100644 --- a/src/pages/editor-page/canvas/table-node/table-node-field.tsx +++ b/src/pages/editor-page/canvas/table-node/table-node-field.tsx @@ -67,6 +67,7 @@ const arePropsEqual = ( nextProps.field.characterMaximumLength && prevProps.field.precision === nextProps.field.precision && prevProps.field.scale === nextProps.field.scale && + prevProps.field.isArray === nextProps.field.isArray && prevProps.focused === nextProps.focused && prevProps.highlighted === nextProps.highlighted && prevProps.visible === nextProps.visible && @@ -77,7 +78,8 @@ const arePropsEqual = ( export const TableNodeField: React.FC = React.memo( ({ field, focused, tableNodeId, highlighted, visible, isConnectable }) => { - const { relationships, readonly, highlightedCustomType } = useChartDB(); + const { relationships, readonly, highlightedCustomType, databaseType } = + useChartDB(); const updateNodeInternals = useUpdateNodeInternals(); const connection = useConnection(); @@ -152,6 +154,7 @@ export const TableNodeField: React.FC = React.memo( getFieldNewCharacterMaximumLength, getFieldNewPrecision, getFieldNewScale, + getFieldNewIsArray, checkIfFieldHasChange, isSummaryOnly, } = useDiff(); @@ -170,6 +173,7 @@ export const TableNodeField: React.FC = React.memo( fieldDiffChangedPrimaryKey: ReturnType< typeof getFieldNewPrimaryKey >; + fieldDiffChangedIsArray: ReturnType; isDiffFieldChanged: boolean; }>({ isDiffFieldRemoved: false, @@ -181,6 +185,7 @@ export const TableNodeField: React.FC = React.memo( fieldDiffChangedScale: null, fieldDiffChangedPrecision: null, fieldDiffChangedPrimaryKey: null, + fieldDiffChangedIsArray: null, isDiffFieldChanged: false, }); @@ -214,6 +219,9 @@ export const TableNodeField: React.FC = React.memo( fieldDiffChangedPrecision: getFieldNewPrecision({ fieldId: field.id, }), + fieldDiffChangedIsArray: getFieldNewIsArray({ + fieldId: field.id, + }), isDiffFieldChanged: checkIfFieldHasChange({ fieldId: field.id, tableId: tableNodeId, @@ -232,6 +240,7 @@ export const TableNodeField: React.FC = React.memo( getFieldNewCharacterMaximumLength, getFieldNewPrecision, getFieldNewScale, + getFieldNewIsArray, field.id, tableNodeId, ]); @@ -247,8 +256,23 @@ export const TableNodeField: React.FC = React.memo( fieldDiffChangedCharacterMaximumLength, fieldDiffChangedScale, fieldDiffChangedPrecision, + fieldDiffChangedIsArray, } = diffState; + const isFieldAttributeChanged = useMemo(() => { + return ( + fieldDiffChangedCharacterMaximumLength || + fieldDiffChangedScale || + fieldDiffChangedPrecision || + fieldDiffChangedIsArray + ); + }, [ + fieldDiffChangedCharacterMaximumLength, + fieldDiffChangedScale, + fieldDiffChangedPrecision, + fieldDiffChangedIsArray, + ]); + const isCustomTypeHighlighted = useMemo(() => { if (!highlightedCustomType) return false; return field.type.name === highlightedCustomType.name; @@ -342,17 +366,14 @@ export const TableNodeField: React.FC = React.memo( )}
{isDiffFieldRemoved ? ( - + ) : isDiffNewField ? ( - + ) : isDiffFieldChanged && !isSummaryOnly ? ( ) : null} @@ -392,13 +413,17 @@ export const TableNodeField: React.FC = React.memo( ) : null}
-
+
{(field.primaryKey && !fieldDiffChangedPrimaryKey?.old) || fieldDiffChangedPrimaryKey?.new ? (
= React.memo(
) : null} -
= React.memo( )} > - {fieldDiffChangedType ? ( - <> - + { + // fieldDiffChangedType ? ( + // <> + // + // { + // fieldDiffChangedType.old.name.split( + // ' ' + // )[0] + // } + // {' '} + // { + // fieldDiffChangedType.new.name.split( + // ' ' + // )[0] + // } + // + // ) : + isFieldAttributeChanged || + fieldDiffChangedType ? ( + <> + + { + ( + fieldDiffChangedType?.old + ?.name ?? + field.type.name + ).split(' ')[0] + } + {showFieldAttributes + ? generateDBFieldSuffix( + { + ...field, + ...{ + precision: + fieldDiffChangedPrecision?.old ?? + field.precision, + scale: + fieldDiffChangedScale?.old ?? + field.scale, + characterMaximumLength: + fieldDiffChangedCharacterMaximumLength?.old ?? + field.characterMaximumLength, + isArray: + fieldDiffChangedIsArray?.old ?? + field.isArray, + }, + }, + { + databaseType, + } + ) + : field.isArray + ? '[]' + : ''} + {' '} { - fieldDiffChangedType.old.name.split( - ' ' - )[0] + ( + fieldDiffChangedType?.new + ?.name ?? field.type.name + ).split(' ')[0] } - {' '} - { - fieldDiffChangedType.new.name.split( - ' ' - )[0] - } - - ) : ( - `${field.type.name.split(' ')[0]}${ - showFieldAttributes - ? generateDBFieldSuffix({ - ...field, - ...{ - precision: - fieldDiffChangedPrecision?.new ?? - field.precision, - scale: - fieldDiffChangedScale?.new ?? - field.scale, - characterMaximumLength: - fieldDiffChangedCharacterMaximumLength?.new ?? - field.characterMaximumLength, - }, - }) - : '' - }` - )} + {showFieldAttributes + ? generateDBFieldSuffix( + { + ...field, + ...{ + precision: + fieldDiffChangedPrecision?.new ?? + field.precision, + scale: + fieldDiffChangedScale?.new ?? + field.scale, + characterMaximumLength: + fieldDiffChangedCharacterMaximumLength?.new ?? + field.characterMaximumLength, + isArray: + fieldDiffChangedIsArray?.new ?? + field.isArray, + }, + }, + { + databaseType, + } + ) + : (fieldDiffChangedIsArray?.new ?? + field.isArray) + ? '[]' + : ''} + + ) : ( + `${field.type.name.split(' ')[0]}${ + showFieldAttributes + ? generateDBFieldSuffix(field, { + databaseType, + }) + : field.isArray + ? '[]' + : '' + }` + ) + } {fieldDiffChangedNullable ? ( fieldDiffChangedNullable.new ? ( ? @@ -485,21 +573,21 @@ export const TableNodeField: React.FC = React.memo( )}
- {readonly ? null : ( -
- -
- )}
+ {readonly ? null : ( +
+ +
+ )}
); }, diff --git a/src/pages/editor-page/side-panel/tables-section/table-list/table-list-item/table-list-item-content/table-field/table-field-modal/table-field-modal.tsx b/src/pages/editor-page/side-panel/tables-section/table-list/table-list-item/table-list-item-content/table-field/table-field-modal/table-field-modal.tsx index 0151170c..124dd445 100644 --- a/src/pages/editor-page/side-panel/tables-section/table-list/table-list-item/table-list-item-content/table-field/table-field-modal/table-field-modal.tsx +++ b/src/pages/editor-page/side-panel/tables-section/table-list/table-list-item/table-list-item-content/table-field/table-field-modal/table-field-modal.tsx @@ -8,6 +8,7 @@ import type { FieldAttributeRange } from '@/lib/data/data-types/data-types'; import { findDataTypeDataById, supportsAutoIncrementDataType, + supportsArrayDataType, } from '@/lib/data/data-types/data-types'; import { Popover, @@ -89,6 +90,7 @@ export const TableFieldPopover: React.FC = ({ unique: localField.unique, default: localField.default, increment: localField.increment, + isArray: localField.isArray, }); } prevFieldRef.current = localField; @@ -104,6 +106,11 @@ export const TableFieldPopover: React.FC = ({ [field.type.name] ); + const supportsArray = useMemo( + () => supportsArrayDataType(field.type.name, databaseType), + [field.type.name, databaseType] + ); + return ( = ({ /> ) : null} + {supportsArray ? ( +
+ + + setLocalField((current) => ({ + ...current, + isArray: !!value, + })) + } + /> +
+ ) : null}