mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-23 07:11:56 +00:00
fix: add support for arrays (#949)
* feat: add array field support with diff visualization * some refactor * fix * fix * fix --------- Co-authored-by: Guy Ben-Aharon <baguy3@gmail.com>
This commit is contained in:
@@ -105,6 +105,11 @@ export interface DiffContext {
|
|||||||
}: {
|
}: {
|
||||||
fieldId: string;
|
fieldId: string;
|
||||||
}) => { old: number; new: number } | null;
|
}) => { old: number; new: number } | null;
|
||||||
|
getFieldNewIsArray: ({
|
||||||
|
fieldId,
|
||||||
|
}: {
|
||||||
|
fieldId: string;
|
||||||
|
}) => { old: boolean; new: boolean } | null;
|
||||||
|
|
||||||
// relationship diff
|
// relationship diff
|
||||||
checkIfNewRelationship: ({
|
checkIfNewRelationship: ({
|
||||||
|
@@ -447,6 +447,30 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
|||||||
[diffMap]
|
[diffMap]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const getFieldNewIsArray = useCallback<DiffContext['getFieldNewIsArray']>(
|
||||||
|
({ fieldId }) => {
|
||||||
|
const fieldKey = getDiffMapKey({
|
||||||
|
diffObject: 'field',
|
||||||
|
objectId: fieldId,
|
||||||
|
attribute: 'isArray',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (diffMap.has(fieldKey)) {
|
||||||
|
const diff = diffMap.get(fieldKey);
|
||||||
|
|
||||||
|
if (diff?.type === 'changed') {
|
||||||
|
return {
|
||||||
|
old: diff.oldValue as boolean,
|
||||||
|
new: diff.newValue as boolean,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
[diffMap]
|
||||||
|
);
|
||||||
|
|
||||||
const checkIfNewRelationship = useCallback<
|
const checkIfNewRelationship = useCallback<
|
||||||
DiffContext['checkIfNewRelationship']
|
DiffContext['checkIfNewRelationship']
|
||||||
>(
|
>(
|
||||||
@@ -520,6 +544,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
|||||||
getFieldNewCharacterMaximumLength,
|
getFieldNewCharacterMaximumLength,
|
||||||
getFieldNewScale,
|
getFieldNewScale,
|
||||||
getFieldNewPrecision,
|
getFieldNewPrecision,
|
||||||
|
getFieldNewIsArray,
|
||||||
|
|
||||||
// relationship diff
|
// relationship diff
|
||||||
checkIfNewRelationship,
|
checkIfNewRelationship,
|
||||||
|
@@ -140,7 +140,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
|||||||
|
|
||||||
if (importMethod === 'dbml') {
|
if (importMethod === 'dbml') {
|
||||||
// Validate DBML by parsing it
|
// Validate DBML by parsing it
|
||||||
const validateResponse = verifyDBML(scriptResult);
|
const validateResponse = verifyDBML(scriptResult, { databaseType });
|
||||||
if (!validateResponse.hasError) {
|
if (!validateResponse.hasError) {
|
||||||
setErrorMessage('');
|
setErrorMessage('');
|
||||||
setSqlValidation({
|
setSqlValidation({
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import { useCallback, useMemo, useState, useEffect, useRef } from 'react';
|
import { useCallback, useMemo, useState, useEffect, useRef } from 'react';
|
||||||
import { useChartDB } from './use-chartdb';
|
import { useChartDB } from './use-chartdb';
|
||||||
import { useDebounce } from './use-debounce-v2';
|
import { useDebounce } from './use-debounce-v2';
|
||||||
import type { DBField, DBTable } from '@/lib/domain';
|
import type { DatabaseType, DBField, DBTable } from '@/lib/domain';
|
||||||
import type {
|
import type {
|
||||||
SelectBoxOption,
|
SelectBoxOption,
|
||||||
SelectBoxProps,
|
SelectBoxProps,
|
||||||
@@ -9,49 +9,60 @@ import type {
|
|||||||
import {
|
import {
|
||||||
dataTypeDataToDataType,
|
dataTypeDataToDataType,
|
||||||
sortedDataTypeMap,
|
sortedDataTypeMap,
|
||||||
|
supportsArrayDataType,
|
||||||
} from '@/lib/data/data-types/data-types';
|
} from '@/lib/data/data-types/data-types';
|
||||||
import { generateDBFieldSuffix } from '@/lib/domain/db-field';
|
import { generateDBFieldSuffix } from '@/lib/domain/db-field';
|
||||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||||
|
|
||||||
const generateFieldRegexPatterns = (
|
const generateFieldRegexPatterns = (
|
||||||
dataType: DataTypeData
|
dataType: DataTypeData,
|
||||||
|
databaseType: DatabaseType
|
||||||
): {
|
): {
|
||||||
regex?: string;
|
regex?: string;
|
||||||
extractRegex?: RegExp;
|
extractRegex?: RegExp;
|
||||||
} => {
|
} => {
|
||||||
|
const typeName = dataType.name;
|
||||||
|
const supportsArrays = supportsArrayDataType(dataType.id, databaseType);
|
||||||
|
const arrayPattern = supportsArrays ? '(\\[\\])?' : '';
|
||||||
|
|
||||||
if (!dataType.fieldAttributes) {
|
if (!dataType.fieldAttributes) {
|
||||||
return { regex: undefined, extractRegex: undefined };
|
// For types without field attributes, support plain type + optional array notation
|
||||||
|
return {
|
||||||
|
regex: `^${typeName}${arrayPattern}$`,
|
||||||
|
extractRegex: new RegExp(`^${typeName}${arrayPattern}$`),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const typeName = dataType.name;
|
|
||||||
const fieldAttributes = dataType.fieldAttributes;
|
const fieldAttributes = dataType.fieldAttributes;
|
||||||
|
|
||||||
if (fieldAttributes.hasCharMaxLength) {
|
if (fieldAttributes.hasCharMaxLength) {
|
||||||
if (fieldAttributes.hasCharMaxLengthOption) {
|
if (fieldAttributes.hasCharMaxLengthOption) {
|
||||||
return {
|
return {
|
||||||
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
|
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)${arrayPattern}$`,
|
||||||
extractRegex: /\((\d+|max)\)/i,
|
extractRegex: supportsArrays
|
||||||
|
? /\((\d+|max)\)(\[\])?/i
|
||||||
|
: /\((\d+|max)\)/i,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
regex: `^${typeName}\\(\\d+\\)$`,
|
regex: `^${typeName}\\(\\d+\\)${arrayPattern}$`,
|
||||||
extractRegex: /\((\d+)\)/,
|
extractRegex: supportsArrays ? /\((\d+)\)(\[\])?/ : /\((\d+)\)/,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||||
return {
|
return {
|
||||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
|
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)${arrayPattern}$`,
|
||||||
extractRegex: new RegExp(
|
extractRegex: new RegExp(
|
||||||
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
|
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)${arrayPattern}`
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fieldAttributes.precision) {
|
if (fieldAttributes.precision) {
|
||||||
return {
|
return {
|
||||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
|
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)${arrayPattern}$`,
|
||||||
extractRegex: /\((\d+)\)/,
|
extractRegex: supportsArrays ? /\((\d+)\)(\[\])?/ : /\((\d+)\)/,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,7 +129,10 @@ export const useUpdateTableField = (
|
|||||||
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
||||||
databaseType
|
databaseType
|
||||||
].map((type) => {
|
].map((type) => {
|
||||||
const regexPatterns = generateFieldRegexPatterns(type);
|
const regexPatterns = generateFieldRegexPatterns(
|
||||||
|
type,
|
||||||
|
databaseType
|
||||||
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
label: type.name,
|
label: type.name,
|
||||||
@@ -162,8 +176,13 @@ export const useUpdateTableField = (
|
|||||||
let characterMaximumLength: string | undefined = undefined;
|
let characterMaximumLength: string | undefined = undefined;
|
||||||
let precision: number | undefined = undefined;
|
let precision: number | undefined = undefined;
|
||||||
let scale: number | undefined = undefined;
|
let scale: number | undefined = undefined;
|
||||||
|
let isArray: boolean | undefined = undefined;
|
||||||
|
|
||||||
if (regexMatches?.length) {
|
if (regexMatches?.length) {
|
||||||
|
// Check if the last captured group is the array indicator []
|
||||||
|
const lastMatch = regexMatches[regexMatches.length - 1];
|
||||||
|
const hasArrayIndicator = lastMatch === '[]';
|
||||||
|
|
||||||
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
||||||
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
||||||
} else if (
|
} else if (
|
||||||
@@ -177,6 +196,17 @@ export const useUpdateTableField = (
|
|||||||
} else if (dataType?.fieldAttributes?.precision) {
|
} else if (dataType?.fieldAttributes?.precision) {
|
||||||
precision = parseInt(regexMatches[1]);
|
precision = parseInt(regexMatches[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set isArray if the array indicator was found and the type supports arrays
|
||||||
|
if (hasArrayIndicator) {
|
||||||
|
const typeId = value as string;
|
||||||
|
if (supportsArrayDataType(typeId, databaseType)) {
|
||||||
|
isArray = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Explicitly set to false/undefined if no array indicator
|
||||||
|
isArray = undefined;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (
|
if (
|
||||||
dataType?.fieldAttributes?.hasCharMaxLength &&
|
dataType?.fieldAttributes?.hasCharMaxLength &&
|
||||||
@@ -198,6 +228,7 @@ export const useUpdateTableField = (
|
|||||||
characterMaximumLength,
|
characterMaximumLength,
|
||||||
precision,
|
precision,
|
||||||
scale,
|
scale,
|
||||||
|
isArray,
|
||||||
increment: undefined,
|
increment: undefined,
|
||||||
default: undefined,
|
default: undefined,
|
||||||
type: dataTypeDataToDataType(
|
type: dataTypeDataToDataType(
|
||||||
@@ -299,11 +330,17 @@ export const useUpdateTableField = (
|
|||||||
// Utility function to generate field suffix for display
|
// Utility function to generate field suffix for display
|
||||||
const generateFieldSuffix = useCallback(
|
const generateFieldSuffix = useCallback(
|
||||||
(typeId?: string) => {
|
(typeId?: string) => {
|
||||||
return generateDBFieldSuffix(field, {
|
return generateDBFieldSuffix(
|
||||||
databaseType,
|
{
|
||||||
forceExtended: true,
|
...field,
|
||||||
typeId,
|
isArray: field.isArray && typeId === field.type.id,
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
databaseType,
|
||||||
|
forceExtended: true,
|
||||||
|
typeId,
|
||||||
|
}
|
||||||
|
);
|
||||||
},
|
},
|
||||||
[field, databaseType]
|
[field, databaseType]
|
||||||
);
|
);
|
||||||
|
@@ -129,9 +129,6 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [
|
|||||||
{ name: 'enum', id: 'enum' },
|
{ name: 'enum', id: 'enum' },
|
||||||
{ name: 'lowcardinality', id: 'lowcardinality' },
|
{ name: 'lowcardinality', id: 'lowcardinality' },
|
||||||
|
|
||||||
// Array Type
|
|
||||||
{ name: 'array', id: 'array' },
|
|
||||||
|
|
||||||
// Tuple Type
|
// Tuple Type
|
||||||
{ name: 'tuple', id: 'tuple' },
|
{ name: 'tuple', id: 'tuple' },
|
||||||
{ name: 'map', id: 'map' },
|
{ name: 'map', id: 'map' },
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import { DatabaseType } from '../../domain/database-type';
|
import { DatabaseType } from '../../domain/database-type';
|
||||||
|
import { databaseSupportsArrays } from '../../domain/database-capabilities';
|
||||||
import { clickhouseDataTypes } from './clickhouse-data-types';
|
import { clickhouseDataTypes } from './clickhouse-data-types';
|
||||||
import { genericDataTypes } from './generic-data-types';
|
import { genericDataTypes } from './generic-data-types';
|
||||||
import { mariadbDataTypes } from './mariadb-data-types';
|
import { mariadbDataTypes } from './mariadb-data-types';
|
||||||
@@ -165,3 +166,22 @@ export const supportsAutoIncrementDataType = (
|
|||||||
'decimal',
|
'decimal',
|
||||||
].includes(dataTypeName.toLocaleLowerCase());
|
].includes(dataTypeName.toLocaleLowerCase());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ARRAY_INCOMPATIBLE_TYPES = [
|
||||||
|
'serial',
|
||||||
|
'bigserial',
|
||||||
|
'smallserial',
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
export const supportsArrayDataType = (
|
||||||
|
dataTypeName: string,
|
||||||
|
databaseType: DatabaseType
|
||||||
|
): boolean => {
|
||||||
|
if (!databaseSupportsArrays(databaseType)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return !ARRAY_INCOMPATIBLE_TYPES.includes(
|
||||||
|
dataTypeName.toLowerCase() as (typeof ARRAY_INCOMPATIBLE_TYPES)[number]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
@@ -97,7 +97,6 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
|||||||
{ name: 'tsvector', id: 'tsvector' },
|
{ name: 'tsvector', id: 'tsvector' },
|
||||||
{ name: 'tsquery', id: 'tsquery' },
|
{ name: 'tsquery', id: 'tsquery' },
|
||||||
{ name: 'xml', id: 'xml' },
|
{ name: 'xml', id: 'xml' },
|
||||||
{ name: 'array', id: 'array' },
|
|
||||||
{ name: 'int4range', id: 'int4range' },
|
{ name: 'int4range', id: 'int4range' },
|
||||||
{ name: 'int8range', id: 'int8range' },
|
{ name: 'int8range', id: 'int8range' },
|
||||||
{ name: 'numrange', id: 'numrange' },
|
{ name: 'numrange', id: 'numrange' },
|
||||||
|
356
src/lib/data/sql-export/__tests__/array-fields.test.ts
Normal file
356
src/lib/data/sql-export/__tests__/array-fields.test.ts
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { generateId } from '@/lib/utils';
|
||||||
|
import { exportBaseSQL } from '../export-sql-script';
|
||||||
|
import { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
import type { Diagram } from '@/lib/domain/diagram';
|
||||||
|
|
||||||
|
describe('SQL Export - Array Fields (Fantasy RPG Theme)', () => {
|
||||||
|
it('should export array fields for magical spell components', () => {
|
||||||
|
const diagram: Diagram = {
|
||||||
|
id: 'test-diagram',
|
||||||
|
name: 'Magical Spell System',
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
tables: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'spells',
|
||||||
|
schema: '',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'id',
|
||||||
|
type: { id: 'uuid', name: 'uuid' },
|
||||||
|
primaryKey: true,
|
||||||
|
unique: true,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'name',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '200',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'components',
|
||||||
|
type: { id: 'text', name: 'text' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
isArray: true,
|
||||||
|
comments: 'Magical components needed for the spell',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'elemental_types',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '50',
|
||||||
|
isArray: true,
|
||||||
|
comments:
|
||||||
|
'Elements involved: fire, water, earth, air',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
indexes: [],
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
color: '#3b82f6',
|
||||||
|
isView: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
order: 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
relationships: [],
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const sql = exportBaseSQL({
|
||||||
|
diagram,
|
||||||
|
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||||
|
isDBMLFlow: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(sql).toContain('CREATE TABLE "spells"');
|
||||||
|
expect(sql).toContain('"components" text[]');
|
||||||
|
expect(sql).toContain('"elemental_types" varchar(50)[]');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should export array fields for hero inventory system', () => {
|
||||||
|
const diagram: Diagram = {
|
||||||
|
id: 'test-diagram',
|
||||||
|
name: 'RPG Inventory System',
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
tables: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'heroes',
|
||||||
|
schema: 'game',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'id',
|
||||||
|
type: { id: 'bigint', name: 'bigint' },
|
||||||
|
primaryKey: true,
|
||||||
|
unique: true,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'name',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '100',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'abilities',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '100',
|
||||||
|
isArray: true,
|
||||||
|
comments:
|
||||||
|
'Special abilities like Stealth, Fireball, etc',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'inventory_slots',
|
||||||
|
type: { id: 'integer', name: 'integer' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
isArray: true,
|
||||||
|
comments: 'Item IDs in inventory',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'skill_levels',
|
||||||
|
type: { id: 'decimal', name: 'decimal' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
precision: 5,
|
||||||
|
scale: 2,
|
||||||
|
isArray: true,
|
||||||
|
comments: 'Skill proficiency levels',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
indexes: [],
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
color: '#ef4444',
|
||||||
|
isView: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
order: 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
relationships: [],
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const sql = exportBaseSQL({
|
||||||
|
diagram,
|
||||||
|
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||||
|
isDBMLFlow: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(sql).toContain('CREATE TABLE "game"."heroes"');
|
||||||
|
expect(sql).toContain('"abilities" varchar(100)[]');
|
||||||
|
expect(sql).toContain('"inventory_slots" integer[]');
|
||||||
|
expect(sql).toContain('"skill_levels" decimal(5, 2)[]');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should export non-array fields normally when isArray is false or undefined', () => {
|
||||||
|
const diagram: Diagram = {
|
||||||
|
id: 'test-diagram',
|
||||||
|
name: 'Quest System',
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
tables: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'quests',
|
||||||
|
schema: '',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'id',
|
||||||
|
type: { id: 'uuid', name: 'uuid' },
|
||||||
|
primaryKey: true,
|
||||||
|
unique: true,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'title',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '200',
|
||||||
|
isArray: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'description',
|
||||||
|
type: { id: 'text', name: 'text' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
// isArray is undefined - should not be treated as array
|
||||||
|
},
|
||||||
|
],
|
||||||
|
indexes: [],
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
color: '#8b5cf6',
|
||||||
|
isView: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
order: 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
relationships: [],
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const sql = exportBaseSQL({
|
||||||
|
diagram,
|
||||||
|
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||||
|
isDBMLFlow: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(sql).toContain('"title" varchar(200)');
|
||||||
|
expect(sql).not.toContain('"title" varchar(200)[]');
|
||||||
|
expect(sql).toContain('"description" text');
|
||||||
|
expect(sql).not.toContain('"description" text[]');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle mixed array and non-array fields in magical creatures table', () => {
|
||||||
|
const diagram: Diagram = {
|
||||||
|
id: 'test-diagram',
|
||||||
|
name: 'Bestiary System',
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
tables: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'magical_creatures',
|
||||||
|
schema: 'bestiary',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'id',
|
||||||
|
type: { id: 'bigint', name: 'bigint' },
|
||||||
|
primaryKey: true,
|
||||||
|
unique: true,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'species_name',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '100',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'habitats',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '80',
|
||||||
|
isArray: true,
|
||||||
|
comments:
|
||||||
|
'Preferred habitats: forest, mountain, swamp',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'danger_level',
|
||||||
|
type: { id: 'integer', name: 'integer' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
default: '1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'resistances',
|
||||||
|
type: { id: 'varchar', name: 'varchar' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: true,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
characterMaximumLength: '50',
|
||||||
|
isArray: true,
|
||||||
|
comments: 'Damage resistances',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generateId(),
|
||||||
|
name: 'is_tameable',
|
||||||
|
type: { id: 'boolean', name: 'boolean' },
|
||||||
|
primaryKey: false,
|
||||||
|
unique: false,
|
||||||
|
nullable: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
default: 'false',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
indexes: [],
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
color: '#10b981',
|
||||||
|
isView: false,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
order: 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
relationships: [],
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const sql = exportBaseSQL({
|
||||||
|
diagram,
|
||||||
|
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||||
|
isDBMLFlow: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(sql).toContain('CREATE TABLE "bestiary"."magical_creatures"');
|
||||||
|
expect(sql).toContain('"species_name" varchar(100)');
|
||||||
|
expect(sql).not.toContain('"species_name" varchar(100)[]');
|
||||||
|
expect(sql).toContain('"habitats" varchar(80)[]');
|
||||||
|
expect(sql).toContain('"danger_level" integer');
|
||||||
|
expect(sql).not.toContain('"danger_level" integer[]');
|
||||||
|
expect(sql).toContain('"resistances" varchar(50)[]');
|
||||||
|
expect(sql).toContain('"is_tameable" boolean');
|
||||||
|
expect(sql).not.toContain('"is_tameable" boolean[]');
|
||||||
|
});
|
||||||
|
});
|
@@ -1,9 +1,6 @@
|
|||||||
import type { Diagram } from '../../domain/diagram';
|
import type { Diagram } from '../../domain/diagram';
|
||||||
import { OPENAI_API_KEY, OPENAI_API_ENDPOINT, LLM_MODEL_NAME } from '@/lib/env';
|
import { OPENAI_API_KEY, OPENAI_API_ENDPOINT, LLM_MODEL_NAME } from '@/lib/env';
|
||||||
import {
|
import { DatabaseType } from '@/lib/domain/database-type';
|
||||||
DatabaseType,
|
|
||||||
databaseTypesWithCommentSupport,
|
|
||||||
} from '@/lib/domain/database-type';
|
|
||||||
import type { DBTable } from '@/lib/domain/db-table';
|
import type { DBTable } from '@/lib/domain/db-table';
|
||||||
import { dataTypeMap, type DataType } from '../data-types/data-types';
|
import { dataTypeMap, type DataType } from '../data-types/data-types';
|
||||||
import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache';
|
import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache';
|
||||||
@@ -12,6 +9,7 @@ import { exportPostgreSQL } from './export-per-type/postgresql';
|
|||||||
import { exportSQLite } from './export-per-type/sqlite';
|
import { exportSQLite } from './export-per-type/sqlite';
|
||||||
import { exportMySQL } from './export-per-type/mysql';
|
import { exportMySQL } from './export-per-type/mysql';
|
||||||
import { escapeSQLComment } from './export-per-type/common';
|
import { escapeSQLComment } from './export-per-type/common';
|
||||||
|
import { databaseTypesWithCommentSupport } from '@/lib/domain/database-capabilities';
|
||||||
|
|
||||||
// Function to format default values with proper quoting
|
// Function to format default values with proper quoting
|
||||||
const formatDefaultValue = (value: string): string => {
|
const formatDefaultValue = (value: string): string => {
|
||||||
@@ -343,6 +341,7 @@ export const exportBaseSQL = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const quotedFieldName = getQuotedFieldName(field.name, isDBMLFlow);
|
const quotedFieldName = getQuotedFieldName(field.name, isDBMLFlow);
|
||||||
|
|
||||||
sqlScript += ` ${quotedFieldName} ${typeName}`;
|
sqlScript += ` ${quotedFieldName} ${typeName}`;
|
||||||
|
|
||||||
// Add size for character types
|
// Add size for character types
|
||||||
@@ -385,6 +384,11 @@ export const exportBaseSQL = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add array suffix if field is an array (after type size and precision)
|
||||||
|
if (field.isArray) {
|
||||||
|
sqlScript += '[]';
|
||||||
|
}
|
||||||
|
|
||||||
// Handle NOT NULL constraint
|
// Handle NOT NULL constraint
|
||||||
if (!field.nullable) {
|
if (!field.nullable) {
|
||||||
sqlScript += ' NOT NULL';
|
sqlScript += ' NOT NULL';
|
||||||
|
317
src/lib/dbml/dbml-import/__tests__/dbml-array-fields.test.ts
Normal file
317
src/lib/dbml/dbml-import/__tests__/dbml-array-fields.test.ts
Normal file
@@ -0,0 +1,317 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { importDBMLToDiagram } from '../dbml-import';
|
||||||
|
import { generateDBMLFromDiagram } from '../../dbml-export/dbml-export';
|
||||||
|
import { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
|
||||||
|
describe('DBML Array Fields - Fantasy RPG Theme', () => {
|
||||||
|
describe('Import - Spell and Magic Arrays', () => {
|
||||||
|
it('should import spell components as array fields', async () => {
|
||||||
|
const dbml = `
|
||||||
|
Table "magic"."spells" {
|
||||||
|
"id" uuid [pk, not null]
|
||||||
|
"name" varchar(200) [not null]
|
||||||
|
"level" integer [not null]
|
||||||
|
"components" text[] [note: 'Magical components: bat wing, dragon scale, phoenix feather']
|
||||||
|
"elemental_types" varchar(50)[] [note: 'Elements: fire, water, earth, air']
|
||||||
|
"mana_cost" integer [not null]
|
||||||
|
"created_at" timestamp [not null]
|
||||||
|
|
||||||
|
Indexes {
|
||||||
|
(name, level) [unique, name: "unique_spell"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await importDBMLToDiagram(dbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.tables).toHaveLength(1);
|
||||||
|
|
||||||
|
const table = result.tables![0];
|
||||||
|
expect(table.name).toBe('spells');
|
||||||
|
expect(table.schema).toBe('magic');
|
||||||
|
|
||||||
|
// Find the array fields
|
||||||
|
const components = table.fields.find(
|
||||||
|
(f) => f.name === 'components'
|
||||||
|
);
|
||||||
|
const elementalTypes = table.fields.find(
|
||||||
|
(f) => f.name === 'elemental_types'
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify they are marked as arrays
|
||||||
|
expect(components).toBeDefined();
|
||||||
|
expect(components?.isArray).toBe(true);
|
||||||
|
expect(components?.type.name).toBe('text');
|
||||||
|
|
||||||
|
expect(elementalTypes).toBeDefined();
|
||||||
|
expect(elementalTypes?.isArray).toBe(true);
|
||||||
|
expect(elementalTypes?.type.name).toBe('varchar');
|
||||||
|
expect(elementalTypes?.characterMaximumLength).toBe('50');
|
||||||
|
|
||||||
|
// Verify non-array fields don't have isArray set
|
||||||
|
const idField = table.fields.find((f) => f.name === 'id');
|
||||||
|
expect(idField?.isArray).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should import hero inventory with various array types', async () => {
|
||||||
|
const dbml = `
|
||||||
|
Table "heroes" {
|
||||||
|
"id" bigint [pk]
|
||||||
|
"name" varchar(100) [not null]
|
||||||
|
"abilities" varchar(100)[]
|
||||||
|
"inventory_slots" integer[]
|
||||||
|
"skill_levels" decimal(5, 2)[]
|
||||||
|
"quest_log" text[]
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await importDBMLToDiagram(dbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const table = result.tables![0];
|
||||||
|
|
||||||
|
const abilities = table.fields.find((f) => f.name === 'abilities');
|
||||||
|
expect(abilities?.isArray).toBe(true);
|
||||||
|
expect(abilities?.type.name).toBe('varchar');
|
||||||
|
expect(abilities?.characterMaximumLength).toBe('100');
|
||||||
|
|
||||||
|
const inventorySlots = table.fields.find(
|
||||||
|
(f) => f.name === 'inventory_slots'
|
||||||
|
);
|
||||||
|
expect(inventorySlots?.isArray).toBe(true);
|
||||||
|
expect(inventorySlots?.type.name).toBe('integer');
|
||||||
|
|
||||||
|
const skillLevels = table.fields.find(
|
||||||
|
(f) => f.name === 'skill_levels'
|
||||||
|
);
|
||||||
|
expect(skillLevels?.isArray).toBe(true);
|
||||||
|
expect(skillLevels?.type.name).toBe('decimal');
|
||||||
|
expect(skillLevels?.precision).toBe(5);
|
||||||
|
expect(skillLevels?.scale).toBe(2);
|
||||||
|
|
||||||
|
const questLog = table.fields.find((f) => f.name === 'quest_log');
|
||||||
|
expect(questLog?.isArray).toBe(true);
|
||||||
|
expect(questLog?.type.name).toBe('text');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle mixed array and non-array fields in creature table', async () => {
|
||||||
|
const dbml = `
|
||||||
|
Table "bestiary"."creatures" {
|
||||||
|
"id" uuid [pk]
|
||||||
|
"species_name" varchar(100) [not null]
|
||||||
|
"habitats" varchar(50)[]
|
||||||
|
"danger_level" integer [not null]
|
||||||
|
"resistances" varchar(50)[]
|
||||||
|
"is_tameable" boolean [not null]
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await importDBMLToDiagram(dbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const table = result.tables![0];
|
||||||
|
|
||||||
|
// Non-array fields
|
||||||
|
const id = table.fields.find((f) => f.name === 'id');
|
||||||
|
expect(id?.isArray).toBeUndefined();
|
||||||
|
|
||||||
|
const speciesName = table.fields.find(
|
||||||
|
(f) => f.name === 'species_name'
|
||||||
|
);
|
||||||
|
expect(speciesName?.isArray).toBeUndefined();
|
||||||
|
|
||||||
|
const dangerLevel = table.fields.find(
|
||||||
|
(f) => f.name === 'danger_level'
|
||||||
|
);
|
||||||
|
expect(dangerLevel?.isArray).toBeUndefined();
|
||||||
|
|
||||||
|
// Array fields
|
||||||
|
const habitats = table.fields.find((f) => f.name === 'habitats');
|
||||||
|
expect(habitats?.isArray).toBe(true);
|
||||||
|
|
||||||
|
const resistances = table.fields.find(
|
||||||
|
(f) => f.name === 'resistances'
|
||||||
|
);
|
||||||
|
expect(resistances?.isArray).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Round-trip - Quest and Adventure Arrays', () => {
|
||||||
|
it('should preserve quest rewards array through export and re-import', async () => {
|
||||||
|
const originalDbml = `
|
||||||
|
Table "adventures"."quests" {
|
||||||
|
"id" uuid [pk, not null]
|
||||||
|
"title" varchar(200) [not null]
|
||||||
|
"difficulty" varchar(20) [not null]
|
||||||
|
"reward_items" text[] [note: 'Legendary sword, enchanted armor, healing potion']
|
||||||
|
"required_skills" varchar(100)[]
|
||||||
|
"experience_points" integer [not null]
|
||||||
|
"gold_reward" decimal(10, 2) [not null]
|
||||||
|
"created_at" timestamp [not null]
|
||||||
|
|
||||||
|
Indexes {
|
||||||
|
(title, difficulty) [unique, name: "unique_quest"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Import the DBML
|
||||||
|
const diagram = await importDBMLToDiagram(originalDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify array fields were imported correctly
|
||||||
|
const table = diagram.tables![0];
|
||||||
|
const rewardItems = table.fields.find(
|
||||||
|
(f) => f.name === 'reward_items'
|
||||||
|
);
|
||||||
|
const requiredSkills = table.fields.find(
|
||||||
|
(f) => f.name === 'required_skills'
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(rewardItems?.isArray).toBe(true);
|
||||||
|
expect(requiredSkills?.isArray).toBe(true);
|
||||||
|
|
||||||
|
// Export back to DBML
|
||||||
|
const { standardDbml: exportedDbml } =
|
||||||
|
generateDBMLFromDiagram(diagram);
|
||||||
|
|
||||||
|
// Verify the exported DBML contains array syntax
|
||||||
|
expect(exportedDbml).toContain('text[]');
|
||||||
|
expect(exportedDbml).toContain('"reward_items" text[]');
|
||||||
|
expect(exportedDbml).toContain('"required_skills" varchar(100)[]');
|
||||||
|
|
||||||
|
// Re-import the exported DBML
|
||||||
|
const reimportedDiagram = await importDBMLToDiagram(exportedDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify array fields are still marked as arrays
|
||||||
|
const reimportedTable = reimportedDiagram.tables![0];
|
||||||
|
const reimportedRewards = reimportedTable.fields.find(
|
||||||
|
(f) => f.name === 'reward_items'
|
||||||
|
);
|
||||||
|
const reimportedSkills = reimportedTable.fields.find(
|
||||||
|
(f) => f.name === 'required_skills'
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(reimportedRewards?.isArray).toBe(true);
|
||||||
|
expect(reimportedSkills?.isArray).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle guild members with different array types in round-trip', async () => {
|
||||||
|
const originalDbml = `
|
||||||
|
Table "guilds"."members" {
|
||||||
|
"id" uuid [pk]
|
||||||
|
"name" varchar(100) [not null]
|
||||||
|
"class_specializations" varchar(50)[]
|
||||||
|
"completed_quest_ids" integer[]
|
||||||
|
"skill_ratings" decimal(3, 1)[]
|
||||||
|
"titles_earned" text[]
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Import
|
||||||
|
const diagram = await importDBMLToDiagram(originalDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Export
|
||||||
|
const { standardDbml: exportedDbml } =
|
||||||
|
generateDBMLFromDiagram(diagram);
|
||||||
|
|
||||||
|
// Verify exported DBML has correct array syntax with types
|
||||||
|
expect(exportedDbml).toContain('varchar(50)[]');
|
||||||
|
expect(exportedDbml).toContain('integer[]');
|
||||||
|
expect(exportedDbml).toContain('decimal(3,1)[]');
|
||||||
|
expect(exportedDbml).toContain('text[]');
|
||||||
|
|
||||||
|
// Re-import
|
||||||
|
const reimportedDiagram = await importDBMLToDiagram(exportedDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const table = reimportedDiagram.tables![0];
|
||||||
|
|
||||||
|
const classSpecs = table.fields.find(
|
||||||
|
(f) => f.name === 'class_specializations'
|
||||||
|
);
|
||||||
|
expect(classSpecs?.isArray).toBe(true);
|
||||||
|
expect(classSpecs?.characterMaximumLength).toBe('50');
|
||||||
|
|
||||||
|
const questIds = table.fields.find(
|
||||||
|
(f) => f.name === 'completed_quest_ids'
|
||||||
|
);
|
||||||
|
expect(questIds?.isArray).toBe(true);
|
||||||
|
|
||||||
|
const skillRatings = table.fields.find(
|
||||||
|
(f) => f.name === 'skill_ratings'
|
||||||
|
);
|
||||||
|
expect(skillRatings?.isArray).toBe(true);
|
||||||
|
expect(skillRatings?.precision).toBe(3);
|
||||||
|
expect(skillRatings?.scale).toBe(1);
|
||||||
|
|
||||||
|
const titles = table.fields.find((f) => f.name === 'titles_earned');
|
||||||
|
expect(titles?.isArray).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve dungeon loot tables with mixed array and non-array fields', async () => {
|
||||||
|
const originalDbml = `
|
||||||
|
Table "dungeons"."loot_tables" {
|
||||||
|
"id" bigint [pk]
|
||||||
|
"dungeon_name" varchar(150) [not null]
|
||||||
|
"boss_name" varchar(100)
|
||||||
|
"common_drops" text[]
|
||||||
|
"rare_drops" text[]
|
||||||
|
"legendary_drops" text[]
|
||||||
|
"gold_range_min" integer [not null]
|
||||||
|
"gold_range_max" integer [not null]
|
||||||
|
"drop_rates" decimal(5, 2)[]
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Import, export, and re-import
|
||||||
|
const diagram = await importDBMLToDiagram(originalDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { standardDbml: exportedDbml } =
|
||||||
|
generateDBMLFromDiagram(diagram);
|
||||||
|
|
||||||
|
const reimportedDiagram = await importDBMLToDiagram(exportedDbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const table = reimportedDiagram.tables![0];
|
||||||
|
|
||||||
|
// Verify non-array fields
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'id')?.isArray
|
||||||
|
).toBeUndefined();
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'dungeon_name')?.isArray
|
||||||
|
).toBeUndefined();
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'gold_range_min')?.isArray
|
||||||
|
).toBeUndefined();
|
||||||
|
|
||||||
|
// Verify array fields
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'common_drops')?.isArray
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'rare_drops')?.isArray
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'legendary_drops')?.isArray
|
||||||
|
).toBe(true);
|
||||||
|
expect(
|
||||||
|
table.fields.find((f) => f.name === 'drop_rates')?.isArray
|
||||||
|
).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -1,6 +1,7 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
import { describe, it, expect } from 'vitest';
|
||||||
import { importDBMLToDiagram } from '../dbml-import';
|
import { importDBMLToDiagram } from '../dbml-import';
|
||||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||||
|
import { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
|
||||||
describe('DBML Import - Fantasy Examples', () => {
|
describe('DBML Import - Fantasy Examples', () => {
|
||||||
describe('Magical Academy System', () => {
|
describe('Magical Academy System', () => {
|
||||||
@@ -149,7 +150,9 @@ Table ranks {
|
|||||||
max_spell_level integer [not null]
|
max_spell_level integer [not null]
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(magicalAcademyDBML);
|
const diagram = await importDBMLToDiagram(magicalAcademyDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify tables
|
// Verify tables
|
||||||
expect(diagram.tables).toHaveLength(8);
|
expect(diagram.tables).toHaveLength(8);
|
||||||
@@ -366,7 +369,9 @@ Note marketplace_note {
|
|||||||
'This marketplace handles both standard purchases and barter trades'
|
'This marketplace handles both standard purchases and barter trades'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(marketplaceDBML);
|
const diagram = await importDBMLToDiagram(marketplaceDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify tables
|
// Verify tables
|
||||||
expect(diagram.tables).toHaveLength(7);
|
expect(diagram.tables).toHaveLength(7);
|
||||||
@@ -567,7 +572,9 @@ Note quest_system_note {
|
|||||||
'Quest difficulty and status use enums that will be converted to varchar'
|
'Quest difficulty and status use enums that will be converted to varchar'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(questSystemDBML);
|
const diagram = await importDBMLToDiagram(questSystemDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify tables
|
// Verify tables
|
||||||
expect(diagram.tables).toHaveLength(7);
|
expect(diagram.tables).toHaveLength(7);
|
||||||
@@ -657,7 +664,9 @@ Table projects {
|
|||||||
priority enum // inline enum without values - will be converted to varchar
|
priority enum // inline enum without values - will be converted to varchar
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlWithEnums);
|
const diagram = await importDBMLToDiagram(dbmlWithEnums, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify customTypes are created for enums
|
// Verify customTypes are created for enums
|
||||||
expect(diagram.customTypes).toBeDefined();
|
expect(diagram.customTypes).toBeDefined();
|
||||||
@@ -744,7 +753,9 @@ Table orders {
|
|||||||
status order_status [not null]
|
status order_status [not null]
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlWithEnumNotes);
|
const diagram = await importDBMLToDiagram(dbmlWithEnumNotes, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify enum is created
|
// Verify enum is created
|
||||||
expect(diagram.customTypes).toHaveLength(1);
|
expect(diagram.customTypes).toHaveLength(1);
|
||||||
@@ -788,7 +799,9 @@ Table admin.users {
|
|||||||
status admin.status
|
status admin.status
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlWithSameEnumNames);
|
const diagram = await importDBMLToDiagram(dbmlWithSameEnumNames, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify both enums are created
|
// Verify both enums are created
|
||||||
expect(diagram.customTypes).toHaveLength(2);
|
expect(diagram.customTypes).toHaveLength(2);
|
||||||
@@ -891,7 +904,9 @@ Note dragon_note {
|
|||||||
'Dragons are very protective of their hoards!'
|
'Dragons are very protective of their hoards!'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(edgeCaseDBML);
|
const diagram = await importDBMLToDiagram(edgeCaseDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify preprocessing worked
|
// Verify preprocessing worked
|
||||||
expect(diagram.tables).toHaveLength(2);
|
expect(diagram.tables).toHaveLength(2);
|
||||||
@@ -956,7 +971,9 @@ Note dragon_note {
|
|||||||
|
|
||||||
it('should handle empty DBML gracefully', async () => {
|
it('should handle empty DBML gracefully', async () => {
|
||||||
const emptyDBML = '';
|
const emptyDBML = '';
|
||||||
const diagram = await importDBMLToDiagram(emptyDBML);
|
const diagram = await importDBMLToDiagram(emptyDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(0);
|
expect(diagram.tables).toHaveLength(0);
|
||||||
expect(diagram.relationships).toHaveLength(0);
|
expect(diagram.relationships).toHaveLength(0);
|
||||||
@@ -969,7 +986,9 @@ Note dragon_note {
|
|||||||
/* Multi-line
|
/* Multi-line
|
||||||
comment */
|
comment */
|
||||||
`;
|
`;
|
||||||
const diagram = await importDBMLToDiagram(commentOnlyDBML);
|
const diagram = await importDBMLToDiagram(commentOnlyDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(0);
|
expect(diagram.tables).toHaveLength(0);
|
||||||
expect(diagram.relationships).toHaveLength(0);
|
expect(diagram.relationships).toHaveLength(0);
|
||||||
@@ -980,7 +999,9 @@ Note dragon_note {
|
|||||||
Table empty_table {
|
Table empty_table {
|
||||||
id int
|
id int
|
||||||
}`;
|
}`;
|
||||||
const diagram = await importDBMLToDiagram(minimalDBML);
|
const diagram = await importDBMLToDiagram(minimalDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(1);
|
expect(diagram.tables).toHaveLength(1);
|
||||||
expect(diagram.tables?.[0]?.fields).toHaveLength(1);
|
expect(diagram.tables?.[0]?.fields).toHaveLength(1);
|
||||||
@@ -996,7 +1017,9 @@ Table "aa"."users" {
|
|||||||
Table "bb"."users" {
|
Table "bb"."users" {
|
||||||
id integer [primary key]
|
id integer [primary key]
|
||||||
}`;
|
}`;
|
||||||
const diagram = await importDBMLToDiagram(dbml);
|
const diagram = await importDBMLToDiagram(dbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(2);
|
expect(diagram.tables).toHaveLength(2);
|
||||||
|
|
||||||
@@ -1071,7 +1094,9 @@ Table "public_3"."comments" {
|
|||||||
id [unique, name: "public_3_index_1"]
|
id [unique, name: "public_3_index_1"]
|
||||||
}
|
}
|
||||||
}`;
|
}`;
|
||||||
const diagram = await importDBMLToDiagram(dbml);
|
const diagram = await importDBMLToDiagram(dbml, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify tables
|
// Verify tables
|
||||||
expect(diagram.tables).toHaveLength(3);
|
expect(diagram.tables).toHaveLength(3);
|
||||||
@@ -1256,7 +1281,9 @@ Table products {
|
|||||||
Note: 'This table stores product information'
|
Note: 'This table stores product information'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlWithTableNote);
|
const diagram = await importDBMLToDiagram(dbmlWithTableNote, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(1);
|
expect(diagram.tables).toHaveLength(1);
|
||||||
const productsTable = diagram.tables?.[0];
|
const productsTable = diagram.tables?.[0];
|
||||||
@@ -1273,7 +1300,9 @@ Table orders {
|
|||||||
total numeric(10,2) [note: 'Order total including tax']
|
total numeric(10,2) [note: 'Order total including tax']
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlWithFieldNote);
|
const diagram = await importDBMLToDiagram(dbmlWithFieldNote, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(1);
|
expect(diagram.tables).toHaveLength(1);
|
||||||
const ordersTable = diagram.tables?.[0];
|
const ordersTable = diagram.tables?.[0];
|
||||||
|
@@ -5,6 +5,7 @@ import {
|
|||||||
importDBMLToDiagram,
|
importDBMLToDiagram,
|
||||||
} from '../dbml-import';
|
} from '../dbml-import';
|
||||||
import { Parser } from '@dbml/core';
|
import { Parser } from '@dbml/core';
|
||||||
|
import { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
|
||||||
describe('DBML Import', () => {
|
describe('DBML Import', () => {
|
||||||
describe('preprocessDBML', () => {
|
describe('preprocessDBML', () => {
|
||||||
@@ -22,7 +23,7 @@ TableGroup "Test Group" [color: #CA4243] {
|
|||||||
Table posts {
|
Table posts {
|
||||||
id int
|
id int
|
||||||
}`;
|
}`;
|
||||||
const result = preprocessDBML(dbml);
|
const { content: result } = preprocessDBML(dbml);
|
||||||
expect(result).not.toContain('TableGroup');
|
expect(result).not.toContain('TableGroup');
|
||||||
expect(result).toContain('Table users');
|
expect(result).toContain('Table users');
|
||||||
expect(result).toContain('Table posts');
|
expect(result).toContain('Table posts');
|
||||||
@@ -37,20 +38,20 @@ Table users {
|
|||||||
Note note_test {
|
Note note_test {
|
||||||
'This is a note'
|
'This is a note'
|
||||||
}`;
|
}`;
|
||||||
const result = preprocessDBML(dbml);
|
const { content: result } = preprocessDBML(dbml);
|
||||||
expect(result).not.toContain('Note');
|
expect(result).not.toContain('Note');
|
||||||
expect(result).toContain('Table users');
|
expect(result).toContain('Table users');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should convert array types to text', () => {
|
it('should remove array syntax while preserving base type', () => {
|
||||||
const dbml = `
|
const dbml = `
|
||||||
Table users {
|
Table users {
|
||||||
tags text[]
|
tags text[]
|
||||||
domains varchar[]
|
domains varchar[]
|
||||||
}`;
|
}`;
|
||||||
const result = preprocessDBML(dbml);
|
const { content: result } = preprocessDBML(dbml);
|
||||||
expect(result).toContain('tags text');
|
expect(result).toContain('tags text');
|
||||||
expect(result).toContain('domains text');
|
expect(result).toContain('domains varchar');
|
||||||
expect(result).not.toContain('[]');
|
expect(result).not.toContain('[]');
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -60,7 +61,7 @@ Table users {
|
|||||||
status enum
|
status enum
|
||||||
verification_type enum // comment here
|
verification_type enum // comment here
|
||||||
}`;
|
}`;
|
||||||
const result = preprocessDBML(dbml);
|
const { content: result } = preprocessDBML(dbml);
|
||||||
expect(result).toContain('status varchar');
|
expect(result).toContain('status varchar');
|
||||||
expect(result).toContain('verification_type varchar');
|
expect(result).toContain('verification_type varchar');
|
||||||
expect(result).not.toContain('enum');
|
expect(result).not.toContain('enum');
|
||||||
@@ -71,7 +72,7 @@ Table users {
|
|||||||
Table users [headercolor: #24BAB1] {
|
Table users [headercolor: #24BAB1] {
|
||||||
id int
|
id int
|
||||||
}`;
|
}`;
|
||||||
const result = preprocessDBML(dbml);
|
const { content: result } = preprocessDBML(dbml);
|
||||||
expect(result).toContain('Table users {');
|
expect(result).toContain('Table users {');
|
||||||
expect(result).not.toContain('headercolor');
|
expect(result).not.toContain('headercolor');
|
||||||
});
|
});
|
||||||
@@ -105,7 +106,9 @@ Note note_test {
|
|||||||
'This is a test note'
|
'This is a test note'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(complexDBML);
|
const diagram = await importDBMLToDiagram(complexDBML, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(2);
|
expect(diagram.tables).toHaveLength(2);
|
||||||
expect(diagram.relationships).toHaveLength(1);
|
expect(diagram.relationships).toHaveLength(1);
|
||||||
@@ -149,7 +152,7 @@ Note note_1750185617764 {
|
|||||||
}`;
|
}`;
|
||||||
|
|
||||||
// Test that preprocessing handles all issues
|
// Test that preprocessing handles all issues
|
||||||
const preprocessed = preprocessDBML(problematicDBML);
|
const { content: preprocessed } = preprocessDBML(problematicDBML);
|
||||||
const sanitized = sanitizeDBML(preprocessed);
|
const sanitized = sanitizeDBML(preprocessed);
|
||||||
|
|
||||||
// Should not throw
|
// Should not throw
|
||||||
|
@@ -38,7 +38,9 @@ Note test_note {
|
|||||||
'This is a test note'
|
'This is a test note'
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
// Verify basic structure
|
// Verify basic structure
|
||||||
expect(diagram).toBeDefined();
|
expect(diagram).toBeDefined();
|
||||||
@@ -96,7 +98,9 @@ Table products [headercolor: #FF0000] {
|
|||||||
|
|
||||||
Ref: products.id < users.favorite_product_id`;
|
Ref: products.id < users.favorite_product_id`;
|
||||||
|
|
||||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
|
|
||||||
expect(diagram.tables).toHaveLength(2);
|
expect(diagram.tables).toHaveLength(2);
|
||||||
|
|
||||||
@@ -119,12 +123,16 @@ Ref: products.id < users.favorite_product_id`;
|
|||||||
|
|
||||||
it('should handle empty or invalid DBML gracefully', async () => {
|
it('should handle empty or invalid DBML gracefully', async () => {
|
||||||
// Empty DBML
|
// Empty DBML
|
||||||
const emptyDiagram = await importDBMLToDiagram('');
|
const emptyDiagram = await importDBMLToDiagram('', {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
expect(emptyDiagram.tables).toHaveLength(0);
|
expect(emptyDiagram.tables).toHaveLength(0);
|
||||||
expect(emptyDiagram.relationships).toHaveLength(0);
|
expect(emptyDiagram.relationships).toHaveLength(0);
|
||||||
|
|
||||||
// Only comments
|
// Only comments
|
||||||
const commentDiagram = await importDBMLToDiagram('// Just a comment');
|
const commentDiagram = await importDBMLToDiagram('// Just a comment', {
|
||||||
|
databaseType: DatabaseType.POSTGRESQL,
|
||||||
|
});
|
||||||
expect(commentDiagram.tables).toHaveLength(0);
|
expect(commentDiagram.tables).toHaveLength(0);
|
||||||
expect(commentDiagram.relationships).toHaveLength(0);
|
expect(commentDiagram.relationships).toHaveLength(0);
|
||||||
});
|
});
|
||||||
@@ -133,7 +141,9 @@ Ref: products.id < users.favorite_product_id`;
|
|||||||
const dbmlContent = `Table test {
|
const dbmlContent = `Table test {
|
||||||
id int [pk]
|
id int [pk]
|
||||||
}`;
|
}`;
|
||||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||||
|
databaseType: DatabaseType.GENERIC,
|
||||||
|
});
|
||||||
|
|
||||||
// Default values
|
// Default values
|
||||||
expect(diagram.name).toBe('DBML Import');
|
expect(diagram.name).toBe('DBML Import');
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
import type { CompilerError } from '@dbml/core/types/parse/error';
|
import type { CompilerError } from '@dbml/core/types/parse/error';
|
||||||
|
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
import { databaseSupportsArrays } from '@/lib/domain/database-capabilities';
|
||||||
|
|
||||||
export interface DBMLError {
|
export interface DBMLError {
|
||||||
message: string;
|
message: string;
|
||||||
@@ -6,8 +8,59 @@ export interface DBMLError {
|
|||||||
column: number;
|
column: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class DBMLValidationError extends Error {
|
||||||
|
public readonly dbmlError: DBMLError;
|
||||||
|
|
||||||
|
constructor(message: string, line: number, column: number = 1) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'DBMLValidationError';
|
||||||
|
this.dbmlError = { message, line, column };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getPositionFromIndex = (
|
||||||
|
content: string,
|
||||||
|
matchIndex: number
|
||||||
|
): { line: number; column: number } => {
|
||||||
|
const lines = content.substring(0, matchIndex).split('\n');
|
||||||
|
return {
|
||||||
|
line: lines.length,
|
||||||
|
column: lines[lines.length - 1].length + 1,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const validateArrayTypesForDatabase = (
|
||||||
|
content: string,
|
||||||
|
databaseType: DatabaseType
|
||||||
|
): void => {
|
||||||
|
// Only validate if database doesn't support arrays
|
||||||
|
if (databaseSupportsArrays(databaseType)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayFieldPattern = /"?(\w+)"?\s+(\w+(?:\(\d+(?:,\s*\d+)?\))?)\[\]/g;
|
||||||
|
const matches = [...content.matchAll(arrayFieldPattern)];
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
const fieldName = match[1];
|
||||||
|
const dataType = match[2];
|
||||||
|
const { line, column } = getPositionFromIndex(content, match.index!);
|
||||||
|
|
||||||
|
throw new DBMLValidationError(
|
||||||
|
`Array types are not supported for ${databaseType} database. Field "${fieldName}" has array type "${dataType}[]" which is not allowed.`,
|
||||||
|
line,
|
||||||
|
column
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export function parseDBMLError(error: unknown): DBMLError | null {
|
export function parseDBMLError(error: unknown): DBMLError | null {
|
||||||
try {
|
try {
|
||||||
|
// Check for our custom DBMLValidationError
|
||||||
|
if (error instanceof DBMLValidationError) {
|
||||||
|
return error.dbmlError;
|
||||||
|
}
|
||||||
|
|
||||||
if (typeof error === 'string') {
|
if (typeof error === 'string') {
|
||||||
const parsed = JSON.parse(error);
|
const parsed = JSON.parse(error);
|
||||||
if (parsed.diags?.[0]) {
|
if (parsed.diags?.[0]) {
|
||||||
|
@@ -14,13 +14,21 @@ import {
|
|||||||
DBCustomTypeKind,
|
DBCustomTypeKind,
|
||||||
type DBCustomType,
|
type DBCustomType,
|
||||||
} from '@/lib/domain/db-custom-type';
|
} from '@/lib/domain/db-custom-type';
|
||||||
|
import { validateArrayTypesForDatabase } from './dbml-import-error';
|
||||||
|
|
||||||
export const defaultDBMLDiagramName = 'DBML Import';
|
export const defaultDBMLDiagramName = 'DBML Import';
|
||||||
|
|
||||||
// Preprocess DBML to handle unsupported features
|
interface PreprocessDBMLResult {
|
||||||
export const preprocessDBML = (content: string): string => {
|
content: string;
|
||||||
|
arrayFields: Map<string, Set<string>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const preprocessDBML = (content: string): PreprocessDBMLResult => {
|
||||||
let processed = content;
|
let processed = content;
|
||||||
|
|
||||||
|
// Track array fields found during preprocessing
|
||||||
|
const arrayFields = new Map<string, Set<string>>();
|
||||||
|
|
||||||
// Remove TableGroup blocks (not supported by parser)
|
// Remove TableGroup blocks (not supported by parser)
|
||||||
processed = processed.replace(/TableGroup\s+[^{]*\{[^}]*\}/gs, '');
|
processed = processed.replace(/TableGroup\s+[^{]*\{[^}]*\}/gs, '');
|
||||||
|
|
||||||
@@ -30,8 +38,37 @@ export const preprocessDBML = (content: string): string => {
|
|||||||
// Don't remove enum definitions - we'll parse them
|
// Don't remove enum definitions - we'll parse them
|
||||||
// processed = processed.replace(/enum\s+\w+\s*\{[^}]*\}/gs, '');
|
// processed = processed.replace(/enum\s+\w+\s*\{[^}]*\}/gs, '');
|
||||||
|
|
||||||
// Handle array types by converting them to text
|
// Handle array types by tracking them and converting syntax for DBML parser
|
||||||
processed = processed.replace(/(\w+)\[\]/g, 'text');
|
// Note: DBML doesn't officially support array syntax, so we convert type[] to type
|
||||||
|
// but track which fields should be arrays
|
||||||
|
|
||||||
|
// First, find all array field declarations and track them
|
||||||
|
const tablePattern =
|
||||||
|
/Table\s+(?:"([^"]+)"\.)?(?:"([^"]+)"|(\w+))\s*(?:\[[^\]]*\])?\s*\{([^}]+)\}/gs;
|
||||||
|
let match;
|
||||||
|
|
||||||
|
while ((match = tablePattern.exec(content)) !== null) {
|
||||||
|
const schema = match[1] || '';
|
||||||
|
const tableName = match[2] || match[3];
|
||||||
|
const tableBody = match[4];
|
||||||
|
const fullTableName = schema ? `${schema}.${tableName}` : tableName;
|
||||||
|
|
||||||
|
// Find array field declarations within this table
|
||||||
|
const fieldPattern = /"?(\w+)"?\s+(\w+(?:\([^)]+\))?)\[\]/g;
|
||||||
|
let fieldMatch;
|
||||||
|
|
||||||
|
while ((fieldMatch = fieldPattern.exec(tableBody)) !== null) {
|
||||||
|
const fieldName = fieldMatch[1];
|
||||||
|
|
||||||
|
if (!arrayFields.has(fullTableName)) {
|
||||||
|
arrayFields.set(fullTableName, new Set());
|
||||||
|
}
|
||||||
|
arrayFields.get(fullTableName)!.add(fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now convert array syntax for DBML parser (keep the base type, remove [])
|
||||||
|
processed = processed.replace(/(\w+(?:\(\d+(?:,\s*\d+)?\))?)\[\]/g, '$1');
|
||||||
|
|
||||||
// Handle inline enum types without values by converting to varchar
|
// Handle inline enum types without values by converting to varchar
|
||||||
processed = processed.replace(
|
processed = processed.replace(
|
||||||
@@ -46,7 +83,7 @@ export const preprocessDBML = (content: string): string => {
|
|||||||
'Table $1 {'
|
'Table $1 {'
|
||||||
);
|
);
|
||||||
|
|
||||||
return processed;
|
return { content: processed, arrayFields };
|
||||||
};
|
};
|
||||||
|
|
||||||
// Simple function to replace Spanish special characters
|
// Simple function to replace Spanish special characters
|
||||||
@@ -85,6 +122,7 @@ interface DBMLField {
|
|||||||
pk?: boolean;
|
pk?: boolean;
|
||||||
not_null?: boolean;
|
not_null?: boolean;
|
||||||
increment?: boolean;
|
increment?: boolean;
|
||||||
|
isArray?: boolean;
|
||||||
characterMaximumLength?: string | null;
|
characterMaximumLength?: string | null;
|
||||||
precision?: number | null;
|
precision?: number | null;
|
||||||
scale?: number | null;
|
scale?: number | null;
|
||||||
@@ -190,8 +228,8 @@ const determineCardinality = (
|
|||||||
|
|
||||||
export const importDBMLToDiagram = async (
|
export const importDBMLToDiagram = async (
|
||||||
dbmlContent: string,
|
dbmlContent: string,
|
||||||
options?: {
|
options: {
|
||||||
databaseType?: DatabaseType;
|
databaseType: DatabaseType;
|
||||||
}
|
}
|
||||||
): Promise<Diagram> => {
|
): Promise<Diagram> => {
|
||||||
try {
|
try {
|
||||||
@@ -208,9 +246,13 @@ export const importDBMLToDiagram = async (
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate array types BEFORE preprocessing (preprocessing removes [])
|
||||||
|
validateArrayTypesForDatabase(dbmlContent, options.databaseType);
|
||||||
|
|
||||||
const parser = new Parser();
|
const parser = new Parser();
|
||||||
// Preprocess and sanitize DBML content
|
// Preprocess and sanitize DBML content
|
||||||
const preprocessedContent = preprocessDBML(dbmlContent);
|
const { content: preprocessedContent, arrayFields } =
|
||||||
|
preprocessDBML(dbmlContent);
|
||||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||||
|
|
||||||
// Handle content that becomes empty after preprocessing
|
// Handle content that becomes empty after preprocessing
|
||||||
@@ -344,11 +386,24 @@ export const importDBMLToDiagram = async (
|
|||||||
const rawDefault = String(
|
const rawDefault = String(
|
||||||
field.dbdefault.value
|
field.dbdefault.value
|
||||||
);
|
);
|
||||||
// Remove ALL quotes (single, double, backticks) to clean the value
|
|
||||||
// The SQL export layer will handle adding proper quotes when needed
|
|
||||||
defaultValue = rawDefault.replace(/['"`]/g, '');
|
defaultValue = rawDefault.replace(/['"`]/g, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if this field should be an array
|
||||||
|
const fullTableName = schemaName
|
||||||
|
? `${schemaName}.${table.name}`
|
||||||
|
: table.name;
|
||||||
|
|
||||||
|
let isArray = arrayFields
|
||||||
|
.get(fullTableName)
|
||||||
|
?.has(field.name);
|
||||||
|
|
||||||
|
if (!isArray && schemaName) {
|
||||||
|
isArray = arrayFields
|
||||||
|
.get(table.name)
|
||||||
|
?.has(field.name);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: field.name,
|
name: field.name,
|
||||||
type: field.type,
|
type: field.type,
|
||||||
@@ -356,6 +411,7 @@ export const importDBMLToDiagram = async (
|
|||||||
pk: field.pk,
|
pk: field.pk,
|
||||||
not_null: field.not_null,
|
not_null: field.not_null,
|
||||||
increment: field.increment,
|
increment: field.increment,
|
||||||
|
isArray: isArray || undefined,
|
||||||
note: field.note,
|
note: field.note,
|
||||||
default: defaultValue,
|
default: defaultValue,
|
||||||
...getFieldExtraAttributes(field, allEnums),
|
...getFieldExtraAttributes(field, allEnums),
|
||||||
@@ -503,6 +559,8 @@ export const importDBMLToDiagram = async (
|
|||||||
characterMaximumLength: field.characterMaximumLength,
|
characterMaximumLength: field.characterMaximumLength,
|
||||||
precision: field.precision,
|
precision: field.precision,
|
||||||
scale: field.scale,
|
scale: field.scale,
|
||||||
|
...(field.increment ? { increment: field.increment } : {}),
|
||||||
|
...(field.isArray ? { isArray: field.isArray } : {}),
|
||||||
...(fieldComment ? { comments: fieldComment } : {}),
|
...(fieldComment ? { comments: fieldComment } : {}),
|
||||||
...(field.default ? { default: field.default } : {}),
|
...(field.default ? { default: field.default } : {}),
|
||||||
};
|
};
|
||||||
|
@@ -1,10 +1,19 @@
|
|||||||
import { Parser } from '@dbml/core';
|
import { Parser } from '@dbml/core';
|
||||||
import { preprocessDBML, sanitizeDBML } from './dbml-import';
|
import { preprocessDBML, sanitizeDBML } from './dbml-import';
|
||||||
import type { DBMLError } from './dbml-import-error';
|
import type { DBMLError } from './dbml-import-error';
|
||||||
import { parseDBMLError } from './dbml-import-error';
|
import {
|
||||||
|
parseDBMLError,
|
||||||
|
validateArrayTypesForDatabase,
|
||||||
|
} from './dbml-import-error';
|
||||||
|
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||||
|
|
||||||
export const verifyDBML = (
|
export const verifyDBML = (
|
||||||
content: string
|
content: string,
|
||||||
|
{
|
||||||
|
databaseType,
|
||||||
|
}: {
|
||||||
|
databaseType: DatabaseType;
|
||||||
|
}
|
||||||
):
|
):
|
||||||
| {
|
| {
|
||||||
hasError: true;
|
hasError: true;
|
||||||
@@ -16,8 +25,12 @@ export const verifyDBML = (
|
|||||||
hasError: false;
|
hasError: false;
|
||||||
} => {
|
} => {
|
||||||
try {
|
try {
|
||||||
const preprocessedContent = preprocessDBML(content);
|
// Validate array types BEFORE preprocessing (preprocessing removes [])
|
||||||
|
validateArrayTypesForDatabase(content, databaseType);
|
||||||
|
|
||||||
|
const { content: preprocessedContent } = preprocessDBML(content);
|
||||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||||
|
|
||||||
const parser = new Parser();
|
const parser = new Parser();
|
||||||
parser.parse(sanitizedContent, 'dbmlv2');
|
parser.parse(sanitizedContent, 'dbmlv2');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
57
src/lib/domain/database-capabilities.ts
Normal file
57
src/lib/domain/database-capabilities.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import { DatabaseType } from './database-type';
|
||||||
|
|
||||||
|
export interface DatabaseCapabilities {
|
||||||
|
supportsArrays?: boolean;
|
||||||
|
supportsCustomTypes?: boolean;
|
||||||
|
supportsSchemas?: boolean;
|
||||||
|
supportsComments?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DATABASE_CAPABILITIES: Record<DatabaseType, DatabaseCapabilities> =
|
||||||
|
{
|
||||||
|
[DatabaseType.POSTGRESQL]: {
|
||||||
|
supportsArrays: true,
|
||||||
|
supportsCustomTypes: true,
|
||||||
|
supportsSchemas: true,
|
||||||
|
supportsComments: true,
|
||||||
|
},
|
||||||
|
[DatabaseType.COCKROACHDB]: {
|
||||||
|
supportsArrays: true,
|
||||||
|
supportsSchemas: true,
|
||||||
|
supportsComments: true,
|
||||||
|
},
|
||||||
|
[DatabaseType.MYSQL]: {},
|
||||||
|
[DatabaseType.MARIADB]: {},
|
||||||
|
[DatabaseType.SQL_SERVER]: {
|
||||||
|
supportsSchemas: true,
|
||||||
|
},
|
||||||
|
[DatabaseType.SQLITE]: {},
|
||||||
|
[DatabaseType.CLICKHOUSE]: {
|
||||||
|
supportsSchemas: true,
|
||||||
|
},
|
||||||
|
[DatabaseType.ORACLE]: {
|
||||||
|
supportsSchemas: true,
|
||||||
|
supportsComments: true,
|
||||||
|
},
|
||||||
|
[DatabaseType.GENERIC]: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getDatabaseCapabilities = (
|
||||||
|
databaseType: DatabaseType
|
||||||
|
): DatabaseCapabilities => {
|
||||||
|
return DATABASE_CAPABILITIES[databaseType];
|
||||||
|
};
|
||||||
|
|
||||||
|
export const databaseSupportsArrays = (databaseType: DatabaseType): boolean => {
|
||||||
|
return getDatabaseCapabilities(databaseType).supportsArrays ?? false;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const databaseTypesWithCommentSupport: DatabaseType[] = Object.keys(
|
||||||
|
DATABASE_CAPABILITIES
|
||||||
|
).filter(
|
||||||
|
(dbType) => DATABASE_CAPABILITIES[dbType as DatabaseType].supportsComments
|
||||||
|
) as DatabaseType[];
|
||||||
|
|
||||||
|
export const supportsCustomTypes = (databaseType: DatabaseType): boolean => {
|
||||||
|
return getDatabaseCapabilities(databaseType).supportsCustomTypes ?? false;
|
||||||
|
};
|
@@ -9,9 +9,3 @@ export enum DatabaseType {
|
|||||||
COCKROACHDB = 'cockroachdb',
|
COCKROACHDB = 'cockroachdb',
|
||||||
ORACLE = 'oracle',
|
ORACLE = 'oracle',
|
||||||
}
|
}
|
||||||
|
|
||||||
export const databaseTypesWithCommentSupport: DatabaseType[] = [
|
|
||||||
DatabaseType.POSTGRESQL,
|
|
||||||
DatabaseType.COCKROACHDB,
|
|
||||||
DatabaseType.ORACLE,
|
|
||||||
];
|
|
||||||
|
@@ -2,9 +2,10 @@ import { z } from 'zod';
|
|||||||
import {
|
import {
|
||||||
dataTypeSchema,
|
dataTypeSchema,
|
||||||
findDataTypeDataById,
|
findDataTypeDataById,
|
||||||
|
supportsArrayDataType,
|
||||||
type DataType,
|
type DataType,
|
||||||
} from '../data/data-types/data-types';
|
} from '../data/data-types/data-types';
|
||||||
import type { DatabaseType } from './database-type';
|
import { DatabaseType } from './database-type';
|
||||||
|
|
||||||
export interface DBField {
|
export interface DBField {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -14,6 +15,7 @@ export interface DBField {
|
|||||||
unique: boolean;
|
unique: boolean;
|
||||||
nullable: boolean;
|
nullable: boolean;
|
||||||
increment?: boolean | null;
|
increment?: boolean | null;
|
||||||
|
isArray?: boolean | null;
|
||||||
createdAt: number;
|
createdAt: number;
|
||||||
characterMaximumLength?: string | null;
|
characterMaximumLength?: string | null;
|
||||||
precision?: number | null;
|
precision?: number | null;
|
||||||
@@ -31,6 +33,7 @@ export const dbFieldSchema: z.ZodType<DBField> = z.object({
|
|||||||
unique: z.boolean(),
|
unique: z.boolean(),
|
||||||
nullable: z.boolean(),
|
nullable: z.boolean(),
|
||||||
increment: z.boolean().or(z.null()).optional(),
|
increment: z.boolean().or(z.null()).optional(),
|
||||||
|
isArray: z.boolean().or(z.null()).optional(),
|
||||||
createdAt: z.number(),
|
createdAt: z.number(),
|
||||||
characterMaximumLength: z.string().or(z.null()).optional(),
|
characterMaximumLength: z.string().or(z.null()).optional(),
|
||||||
precision: z.number().or(z.null()).optional(),
|
precision: z.number().or(z.null()).optional(),
|
||||||
@@ -52,11 +55,26 @@ export const generateDBFieldSuffix = (
|
|||||||
typeId?: string;
|
typeId?: string;
|
||||||
} = {}
|
} = {}
|
||||||
): string => {
|
): string => {
|
||||||
|
let suffix = '';
|
||||||
|
|
||||||
if (databaseType && forceExtended && typeId) {
|
if (databaseType && forceExtended && typeId) {
|
||||||
return generateExtendedSuffix(field, databaseType, typeId);
|
suffix = generateExtendedSuffix(field, databaseType, typeId);
|
||||||
|
} else {
|
||||||
|
suffix = generateStandardSuffix(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
return generateStandardSuffix(field);
|
// Add array notation if field is an array
|
||||||
|
if (
|
||||||
|
field.isArray &&
|
||||||
|
supportsArrayDataType(
|
||||||
|
typeId ?? field.type.id,
|
||||||
|
databaseType ?? DatabaseType.GENERIC
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
suffix += '[]';
|
||||||
|
}
|
||||||
|
|
||||||
|
return suffix;
|
||||||
};
|
};
|
||||||
|
|
||||||
const generateExtendedSuffix = (
|
const generateExtendedSuffix = (
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import { DatabaseType } from './database-type';
|
import { DATABASE_CAPABILITIES } from './database-capabilities';
|
||||||
|
import type { DatabaseType } from './database-type';
|
||||||
|
|
||||||
export interface DBSchema {
|
export interface DBSchema {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -18,10 +19,8 @@ export const schemaNameToDomainSchemaName = (
|
|||||||
? undefined
|
? undefined
|
||||||
: schema?.trim();
|
: schema?.trim();
|
||||||
|
|
||||||
export const databasesWithSchemas: DatabaseType[] = [
|
export const databasesWithSchemas: DatabaseType[] = Object.keys(
|
||||||
DatabaseType.POSTGRESQL,
|
DATABASE_CAPABILITIES
|
||||||
DatabaseType.SQL_SERVER,
|
).filter(
|
||||||
DatabaseType.CLICKHOUSE,
|
(dbType) => DATABASE_CAPABILITIES[dbType as DatabaseType].supportsSchemas
|
||||||
DatabaseType.COCKROACHDB,
|
) as DatabaseType[];
|
||||||
DatabaseType.ORACLE,
|
|
||||||
];
|
|
||||||
|
@@ -28,6 +28,16 @@ export function getDiffMapKey({
|
|||||||
: `${diffObject}-${objectId}`;
|
: `${diffObject}-${objectId}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const isOneOfDefined = (
|
||||||
|
...values: (string | number | boolean | undefined | null)[]
|
||||||
|
): boolean => {
|
||||||
|
return values.some((value) => value !== undefined && value !== null);
|
||||||
|
};
|
||||||
|
|
||||||
|
const normalizeBoolean = (value: boolean | undefined | null): boolean => {
|
||||||
|
return value === true;
|
||||||
|
};
|
||||||
|
|
||||||
export interface GenerateDiffOptions {
|
export interface GenerateDiffOptions {
|
||||||
includeTables?: boolean;
|
includeTables?: boolean;
|
||||||
includeFields?: boolean;
|
includeFields?: boolean;
|
||||||
@@ -552,6 +562,8 @@ function compareFieldProperties({
|
|||||||
'characterMaximumLength',
|
'characterMaximumLength',
|
||||||
'scale',
|
'scale',
|
||||||
'precision',
|
'precision',
|
||||||
|
'increment',
|
||||||
|
'isArray',
|
||||||
];
|
];
|
||||||
|
|
||||||
const changedAttributes: FieldDiffAttribute[] = [];
|
const changedAttributes: FieldDiffAttribute[] = [];
|
||||||
@@ -620,6 +632,24 @@ function compareFieldProperties({
|
|||||||
changedAttributes.push('precision');
|
changedAttributes.push('precision');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
attributesToCheck.includes('increment') &&
|
||||||
|
isOneOfDefined(newField.increment, oldField.increment) &&
|
||||||
|
normalizeBoolean(oldField.increment) !==
|
||||||
|
normalizeBoolean(newField.increment)
|
||||||
|
) {
|
||||||
|
changedAttributes.push('increment');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
attributesToCheck.includes('isArray') &&
|
||||||
|
isOneOfDefined(newField.isArray, oldField.isArray) &&
|
||||||
|
normalizeBoolean(oldField.isArray) !==
|
||||||
|
normalizeBoolean(newField.isArray)
|
||||||
|
) {
|
||||||
|
changedAttributes.push('isArray');
|
||||||
|
}
|
||||||
|
|
||||||
if (changedAttributes.length > 0) {
|
if (changedAttributes.length > 0) {
|
||||||
for (const attribute of changedAttributes) {
|
for (const attribute of changedAttributes) {
|
||||||
diffMap.set(
|
diffMap.set(
|
||||||
|
@@ -15,7 +15,9 @@ export type FieldDiffAttribute =
|
|||||||
| 'comments'
|
| 'comments'
|
||||||
| 'characterMaximumLength'
|
| 'characterMaximumLength'
|
||||||
| 'precision'
|
| 'precision'
|
||||||
| 'scale';
|
| 'scale'
|
||||||
|
| 'increment'
|
||||||
|
| 'isArray';
|
||||||
|
|
||||||
export const fieldDiffAttributeSchema: z.ZodType<FieldDiffAttribute> = z.union([
|
export const fieldDiffAttributeSchema: z.ZodType<FieldDiffAttribute> = z.union([
|
||||||
z.literal('name'),
|
z.literal('name'),
|
||||||
|
@@ -2,7 +2,7 @@ import React, { useEffect } from 'react';
|
|||||||
import { KeyRound, Trash2 } from 'lucide-react';
|
import { KeyRound, Trash2 } from 'lucide-react';
|
||||||
import { Input } from '@/components/input/input';
|
import { Input } from '@/components/input/input';
|
||||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||||
import type { DBTable } from '@/lib/domain';
|
import type { DatabaseType, DBTable } from '@/lib/domain';
|
||||||
import { useUpdateTableField } from '@/hooks/use-update-table-field';
|
import { useUpdateTableField } from '@/hooks/use-update-table-field';
|
||||||
import {
|
import {
|
||||||
Tooltip,
|
Tooltip,
|
||||||
@@ -18,10 +18,11 @@ export interface TableEditModeFieldProps {
|
|||||||
table: DBTable;
|
table: DBTable;
|
||||||
field: DBField;
|
field: DBField;
|
||||||
focused?: boolean;
|
focused?: boolean;
|
||||||
|
databaseType: DatabaseType;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const TableEditModeField: React.FC<TableEditModeFieldProps> = React.memo(
|
export const TableEditModeField: React.FC<TableEditModeFieldProps> = React.memo(
|
||||||
({ table, field, focused = false }) => {
|
({ table, field, focused = false, databaseType }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const [showHighlight, setShowHighlight] = React.useState(false);
|
const [showHighlight, setShowHighlight] = React.useState(false);
|
||||||
|
|
||||||
@@ -102,7 +103,9 @@ export const TableEditModeField: React.FC<TableEditModeFieldProps> = React.memo(
|
|||||||
'side_panel.tables_section.table.field_type'
|
'side_panel.tables_section.table.field_type'
|
||||||
)}
|
)}
|
||||||
value={field.type.id}
|
value={field.type.id}
|
||||||
valueSuffix={generateDBFieldSuffix(field)}
|
valueSuffix={generateDBFieldSuffix(field, {
|
||||||
|
databaseType,
|
||||||
|
})}
|
||||||
optionSuffix={(option) =>
|
optionSuffix={(option) =>
|
||||||
generateFieldSuffix(option.value)
|
generateFieldSuffix(option.value)
|
||||||
}
|
}
|
||||||
@@ -119,9 +122,9 @@ export const TableEditModeField: React.FC<TableEditModeFieldProps> = React.memo(
|
|||||||
</TooltipTrigger>
|
</TooltipTrigger>
|
||||||
<TooltipContent>
|
<TooltipContent>
|
||||||
{field.type.name}
|
{field.type.name}
|
||||||
{field.characterMaximumLength
|
{generateDBFieldSuffix(field, {
|
||||||
? `(${field.characterMaximumLength})`
|
databaseType,
|
||||||
: ''}
|
})}
|
||||||
</TooltipContent>
|
</TooltipContent>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -300,6 +300,7 @@ export const TableEditMode: React.FC<TableEditModeProps> = React.memo(
|
|||||||
table={table}
|
table={table}
|
||||||
field={field}
|
field={field}
|
||||||
focused={focusFieldId === field.id}
|
focused={focusFieldId === field.id}
|
||||||
|
databaseType={databaseType}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
@@ -67,6 +67,7 @@ const arePropsEqual = (
|
|||||||
nextProps.field.characterMaximumLength &&
|
nextProps.field.characterMaximumLength &&
|
||||||
prevProps.field.precision === nextProps.field.precision &&
|
prevProps.field.precision === nextProps.field.precision &&
|
||||||
prevProps.field.scale === nextProps.field.scale &&
|
prevProps.field.scale === nextProps.field.scale &&
|
||||||
|
prevProps.field.isArray === nextProps.field.isArray &&
|
||||||
prevProps.focused === nextProps.focused &&
|
prevProps.focused === nextProps.focused &&
|
||||||
prevProps.highlighted === nextProps.highlighted &&
|
prevProps.highlighted === nextProps.highlighted &&
|
||||||
prevProps.visible === nextProps.visible &&
|
prevProps.visible === nextProps.visible &&
|
||||||
@@ -77,7 +78,8 @@ const arePropsEqual = (
|
|||||||
|
|
||||||
export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||||
({ field, focused, tableNodeId, highlighted, visible, isConnectable }) => {
|
({ field, focused, tableNodeId, highlighted, visible, isConnectable }) => {
|
||||||
const { relationships, readonly, highlightedCustomType } = useChartDB();
|
const { relationships, readonly, highlightedCustomType, databaseType } =
|
||||||
|
useChartDB();
|
||||||
|
|
||||||
const updateNodeInternals = useUpdateNodeInternals();
|
const updateNodeInternals = useUpdateNodeInternals();
|
||||||
const connection = useConnection();
|
const connection = useConnection();
|
||||||
@@ -152,6 +154,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
getFieldNewCharacterMaximumLength,
|
getFieldNewCharacterMaximumLength,
|
||||||
getFieldNewPrecision,
|
getFieldNewPrecision,
|
||||||
getFieldNewScale,
|
getFieldNewScale,
|
||||||
|
getFieldNewIsArray,
|
||||||
checkIfFieldHasChange,
|
checkIfFieldHasChange,
|
||||||
isSummaryOnly,
|
isSummaryOnly,
|
||||||
} = useDiff();
|
} = useDiff();
|
||||||
@@ -170,6 +173,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
fieldDiffChangedPrimaryKey: ReturnType<
|
fieldDiffChangedPrimaryKey: ReturnType<
|
||||||
typeof getFieldNewPrimaryKey
|
typeof getFieldNewPrimaryKey
|
||||||
>;
|
>;
|
||||||
|
fieldDiffChangedIsArray: ReturnType<typeof getFieldNewIsArray>;
|
||||||
isDiffFieldChanged: boolean;
|
isDiffFieldChanged: boolean;
|
||||||
}>({
|
}>({
|
||||||
isDiffFieldRemoved: false,
|
isDiffFieldRemoved: false,
|
||||||
@@ -181,6 +185,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
fieldDiffChangedScale: null,
|
fieldDiffChangedScale: null,
|
||||||
fieldDiffChangedPrecision: null,
|
fieldDiffChangedPrecision: null,
|
||||||
fieldDiffChangedPrimaryKey: null,
|
fieldDiffChangedPrimaryKey: null,
|
||||||
|
fieldDiffChangedIsArray: null,
|
||||||
isDiffFieldChanged: false,
|
isDiffFieldChanged: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -214,6 +219,9 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
fieldDiffChangedPrecision: getFieldNewPrecision({
|
fieldDiffChangedPrecision: getFieldNewPrecision({
|
||||||
fieldId: field.id,
|
fieldId: field.id,
|
||||||
}),
|
}),
|
||||||
|
fieldDiffChangedIsArray: getFieldNewIsArray({
|
||||||
|
fieldId: field.id,
|
||||||
|
}),
|
||||||
isDiffFieldChanged: checkIfFieldHasChange({
|
isDiffFieldChanged: checkIfFieldHasChange({
|
||||||
fieldId: field.id,
|
fieldId: field.id,
|
||||||
tableId: tableNodeId,
|
tableId: tableNodeId,
|
||||||
@@ -232,6 +240,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
getFieldNewCharacterMaximumLength,
|
getFieldNewCharacterMaximumLength,
|
||||||
getFieldNewPrecision,
|
getFieldNewPrecision,
|
||||||
getFieldNewScale,
|
getFieldNewScale,
|
||||||
|
getFieldNewIsArray,
|
||||||
field.id,
|
field.id,
|
||||||
tableNodeId,
|
tableNodeId,
|
||||||
]);
|
]);
|
||||||
@@ -247,8 +256,23 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
fieldDiffChangedCharacterMaximumLength,
|
fieldDiffChangedCharacterMaximumLength,
|
||||||
fieldDiffChangedScale,
|
fieldDiffChangedScale,
|
||||||
fieldDiffChangedPrecision,
|
fieldDiffChangedPrecision,
|
||||||
|
fieldDiffChangedIsArray,
|
||||||
} = diffState;
|
} = diffState;
|
||||||
|
|
||||||
|
const isFieldAttributeChanged = useMemo(() => {
|
||||||
|
return (
|
||||||
|
fieldDiffChangedCharacterMaximumLength ||
|
||||||
|
fieldDiffChangedScale ||
|
||||||
|
fieldDiffChangedPrecision ||
|
||||||
|
fieldDiffChangedIsArray
|
||||||
|
);
|
||||||
|
}, [
|
||||||
|
fieldDiffChangedCharacterMaximumLength,
|
||||||
|
fieldDiffChangedScale,
|
||||||
|
fieldDiffChangedPrecision,
|
||||||
|
fieldDiffChangedIsArray,
|
||||||
|
]);
|
||||||
|
|
||||||
const isCustomTypeHighlighted = useMemo(() => {
|
const isCustomTypeHighlighted = useMemo(() => {
|
||||||
if (!highlightedCustomType) return false;
|
if (!highlightedCustomType) return false;
|
||||||
return field.type.name === highlightedCustomType.name;
|
return field.type.name === highlightedCustomType.name;
|
||||||
@@ -342,17 +366,14 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn('flex items-center gap-1 min-w-0 text-left', {
|
||||||
'flex items-center gap-1 min-w-0 flex-1 text-left',
|
'font-semibold': field.primaryKey || field.unique,
|
||||||
{
|
})}
|
||||||
'font-semibold': field.primaryKey || field.unique,
|
|
||||||
}
|
|
||||||
)}
|
|
||||||
>
|
>
|
||||||
{isDiffFieldRemoved ? (
|
{isDiffFieldRemoved ? (
|
||||||
<SquareMinus className="size-3.5 text-red-800 dark:text-red-200" />
|
<SquareMinus className="size-3.5 shrink-0 text-red-800 dark:text-red-200" />
|
||||||
) : isDiffNewField ? (
|
) : isDiffNewField ? (
|
||||||
<SquarePlus className="size-3.5 text-green-800 dark:text-green-200" />
|
<SquarePlus className="size-3.5 shrink-0 text-green-800 dark:text-green-200" />
|
||||||
) : isDiffFieldChanged && !isSummaryOnly ? (
|
) : isDiffFieldChanged && !isSummaryOnly ? (
|
||||||
<SquareDot className="size-3.5 shrink-0 text-sky-800 dark:text-sky-200" />
|
<SquareDot className="size-3.5 shrink-0 text-sky-800 dark:text-sky-200" />
|
||||||
) : null}
|
) : null}
|
||||||
@@ -392,13 +413,17 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
) : null}
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="ml-2 flex shrink-0 items-center justify-end gap-1.5">
|
<div
|
||||||
|
className={cn(
|
||||||
|
'ml-auto flex shrink-0 items-center gap-1 min-w-0',
|
||||||
|
!readonly ? 'group-hover:hidden' : ''
|
||||||
|
)}
|
||||||
|
>
|
||||||
{(field.primaryKey && !fieldDiffChangedPrimaryKey?.old) ||
|
{(field.primaryKey && !fieldDiffChangedPrimaryKey?.old) ||
|
||||||
fieldDiffChangedPrimaryKey?.new ? (
|
fieldDiffChangedPrimaryKey?.new ? (
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-muted-foreground',
|
'text-muted-foreground shrink-0',
|
||||||
!readonly ? 'group-hover:hidden' : '',
|
|
||||||
isDiffFieldRemoved
|
isDiffFieldRemoved
|
||||||
? 'text-red-800 dark:text-red-200'
|
? 'text-red-800 dark:text-red-200'
|
||||||
: '',
|
: '',
|
||||||
@@ -416,12 +441,9 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
<KeyRound size={14} />
|
<KeyRound size={14} />
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
'content-center text-right text-xs text-muted-foreground overflow-hidden max-w-[8rem]',
|
'text-right text-xs text-muted-foreground overflow-hidden min-w-0',
|
||||||
field.primaryKey ? 'min-w-0' : 'min-w-[3rem]',
|
|
||||||
!readonly ? 'group-hover:hidden' : '',
|
|
||||||
isDiffFieldRemoved
|
isDiffFieldRemoved
|
||||||
? 'text-red-800 dark:text-red-200'
|
? 'text-red-800 dark:text-red-200'
|
||||||
: '',
|
: '',
|
||||||
@@ -437,41 +459,107 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<span className="block truncate">
|
<span className="block truncate">
|
||||||
{fieldDiffChangedType ? (
|
{
|
||||||
<>
|
// fieldDiffChangedType ? (
|
||||||
<span className="line-through">
|
// <>
|
||||||
|
// <span className="line-through">
|
||||||
|
// {
|
||||||
|
// fieldDiffChangedType.old.name.split(
|
||||||
|
// ' '
|
||||||
|
// )[0]
|
||||||
|
// }
|
||||||
|
// </span>{' '}
|
||||||
|
// {
|
||||||
|
// fieldDiffChangedType.new.name.split(
|
||||||
|
// ' '
|
||||||
|
// )[0]
|
||||||
|
// }
|
||||||
|
// </>
|
||||||
|
// ) :
|
||||||
|
isFieldAttributeChanged ||
|
||||||
|
fieldDiffChangedType ? (
|
||||||
|
<>
|
||||||
|
<span className="line-through">
|
||||||
|
{
|
||||||
|
(
|
||||||
|
fieldDiffChangedType?.old
|
||||||
|
?.name ??
|
||||||
|
field.type.name
|
||||||
|
).split(' ')[0]
|
||||||
|
}
|
||||||
|
{showFieldAttributes
|
||||||
|
? generateDBFieldSuffix(
|
||||||
|
{
|
||||||
|
...field,
|
||||||
|
...{
|
||||||
|
precision:
|
||||||
|
fieldDiffChangedPrecision?.old ??
|
||||||
|
field.precision,
|
||||||
|
scale:
|
||||||
|
fieldDiffChangedScale?.old ??
|
||||||
|
field.scale,
|
||||||
|
characterMaximumLength:
|
||||||
|
fieldDiffChangedCharacterMaximumLength?.old ??
|
||||||
|
field.characterMaximumLength,
|
||||||
|
isArray:
|
||||||
|
fieldDiffChangedIsArray?.old ??
|
||||||
|
field.isArray,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
databaseType,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
: field.isArray
|
||||||
|
? '[]'
|
||||||
|
: ''}
|
||||||
|
</span>{' '}
|
||||||
{
|
{
|
||||||
fieldDiffChangedType.old.name.split(
|
(
|
||||||
' '
|
fieldDiffChangedType?.new
|
||||||
)[0]
|
?.name ?? field.type.name
|
||||||
|
).split(' ')[0]
|
||||||
}
|
}
|
||||||
</span>{' '}
|
{showFieldAttributes
|
||||||
{
|
? generateDBFieldSuffix(
|
||||||
fieldDiffChangedType.new.name.split(
|
{
|
||||||
' '
|
...field,
|
||||||
)[0]
|
...{
|
||||||
}
|
precision:
|
||||||
</>
|
fieldDiffChangedPrecision?.new ??
|
||||||
) : (
|
field.precision,
|
||||||
`${field.type.name.split(' ')[0]}${
|
scale:
|
||||||
showFieldAttributes
|
fieldDiffChangedScale?.new ??
|
||||||
? generateDBFieldSuffix({
|
field.scale,
|
||||||
...field,
|
characterMaximumLength:
|
||||||
...{
|
fieldDiffChangedCharacterMaximumLength?.new ??
|
||||||
precision:
|
field.characterMaximumLength,
|
||||||
fieldDiffChangedPrecision?.new ??
|
isArray:
|
||||||
field.precision,
|
fieldDiffChangedIsArray?.new ??
|
||||||
scale:
|
field.isArray,
|
||||||
fieldDiffChangedScale?.new ??
|
},
|
||||||
field.scale,
|
},
|
||||||
characterMaximumLength:
|
{
|
||||||
fieldDiffChangedCharacterMaximumLength?.new ??
|
databaseType,
|
||||||
field.characterMaximumLength,
|
}
|
||||||
},
|
)
|
||||||
})
|
: (fieldDiffChangedIsArray?.new ??
|
||||||
: ''
|
field.isArray)
|
||||||
}`
|
? '[]'
|
||||||
)}
|
: ''}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
`${field.type.name.split(' ')[0]}${
|
||||||
|
showFieldAttributes
|
||||||
|
? generateDBFieldSuffix(field, {
|
||||||
|
databaseType,
|
||||||
|
})
|
||||||
|
: field.isArray
|
||||||
|
? '[]'
|
||||||
|
: ''
|
||||||
|
}`
|
||||||
|
)
|
||||||
|
}
|
||||||
{fieldDiffChangedNullable ? (
|
{fieldDiffChangedNullable ? (
|
||||||
fieldDiffChangedNullable.new ? (
|
fieldDiffChangedNullable.new ? (
|
||||||
<span className="font-semibold">?</span>
|
<span className="font-semibold">?</span>
|
||||||
@@ -485,21 +573,21 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
|||||||
)}
|
)}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
{readonly ? null : (
|
|
||||||
<div className="hidden flex-row group-hover:flex">
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
className="size-6 p-0 hover:bg-primary-foreground"
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
openEditTableOnField();
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<Pencil className="!size-3.5 text-pink-600" />
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
|
{readonly ? null : (
|
||||||
|
<div className="ml-2 hidden shrink-0 flex-row group-hover:flex">
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
className="size-6 p-0 hover:bg-primary-foreground"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
openEditTableOnField();
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Pencil className="!size-3.5 text-pink-600" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
@@ -8,6 +8,7 @@ import type { FieldAttributeRange } from '@/lib/data/data-types/data-types';
|
|||||||
import {
|
import {
|
||||||
findDataTypeDataById,
|
findDataTypeDataById,
|
||||||
supportsAutoIncrementDataType,
|
supportsAutoIncrementDataType,
|
||||||
|
supportsArrayDataType,
|
||||||
} from '@/lib/data/data-types/data-types';
|
} from '@/lib/data/data-types/data-types';
|
||||||
import {
|
import {
|
||||||
Popover,
|
Popover,
|
||||||
@@ -89,6 +90,7 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
|||||||
unique: localField.unique,
|
unique: localField.unique,
|
||||||
default: localField.default,
|
default: localField.default,
|
||||||
increment: localField.increment,
|
increment: localField.increment,
|
||||||
|
isArray: localField.isArray,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
prevFieldRef.current = localField;
|
prevFieldRef.current = localField;
|
||||||
@@ -104,6 +106,11 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
|||||||
[field.type.name]
|
[field.type.name]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const supportsArray = useMemo(
|
||||||
|
() => supportsArrayDataType(field.type.name, databaseType),
|
||||||
|
[field.type.name, databaseType]
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Popover
|
<Popover
|
||||||
open={isOpen}
|
open={isOpen}
|
||||||
@@ -172,6 +179,26 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
|
{supportsArray ? (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<Label
|
||||||
|
htmlFor="isArray"
|
||||||
|
className="text-subtitle"
|
||||||
|
>
|
||||||
|
Array
|
||||||
|
</Label>
|
||||||
|
<Checkbox
|
||||||
|
checked={localField.isArray ?? false}
|
||||||
|
disabled={readonly}
|
||||||
|
onCheckedChange={(value) =>
|
||||||
|
setLocalField((current) => ({
|
||||||
|
...current,
|
||||||
|
isArray: !!value,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-2">
|
||||||
<Label htmlFor="default" className="text-subtitle">
|
<Label htmlFor="default" className="text-subtitle">
|
||||||
{t(
|
{t(
|
||||||
|
@@ -2,7 +2,6 @@ import React from 'react';
|
|||||||
import { GripVertical, KeyRound } from 'lucide-react';
|
import { GripVertical, KeyRound } from 'lucide-react';
|
||||||
import { Input } from '@/components/input/input';
|
import { Input } from '@/components/input/input';
|
||||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||||
import { useChartDB } from '@/hooks/use-chartdb';
|
|
||||||
import { useUpdateTableField } from '@/hooks/use-update-table-field';
|
import { useUpdateTableField } from '@/hooks/use-update-table-field';
|
||||||
import {
|
import {
|
||||||
Tooltip,
|
Tooltip,
|
||||||
@@ -15,13 +14,15 @@ import { useSortable } from '@dnd-kit/sortable';
|
|||||||
import { CSS } from '@dnd-kit/utilities';
|
import { CSS } from '@dnd-kit/utilities';
|
||||||
import { SelectBox } from '@/components/select-box/select-box';
|
import { SelectBox } from '@/components/select-box/select-box';
|
||||||
import { TableFieldPopover } from './table-field-modal/table-field-modal';
|
import { TableFieldPopover } from './table-field-modal/table-field-modal';
|
||||||
import type { DBTable } from '@/lib/domain';
|
import type { DatabaseType, DBTable } from '@/lib/domain';
|
||||||
|
|
||||||
export interface TableFieldProps {
|
export interface TableFieldProps {
|
||||||
table: DBTable;
|
table: DBTable;
|
||||||
field: DBField;
|
field: DBField;
|
||||||
updateField: (attrs: Partial<DBField>) => void;
|
updateField: (attrs: Partial<DBField>) => void;
|
||||||
removeField: () => void;
|
removeField: () => void;
|
||||||
|
databaseType: DatabaseType;
|
||||||
|
readonly?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const TableField: React.FC<TableFieldProps> = ({
|
export const TableField: React.FC<TableFieldProps> = ({
|
||||||
@@ -29,8 +30,9 @@ export const TableField: React.FC<TableFieldProps> = ({
|
|||||||
field,
|
field,
|
||||||
updateField,
|
updateField,
|
||||||
removeField,
|
removeField,
|
||||||
|
databaseType,
|
||||||
|
readonly = false,
|
||||||
}) => {
|
}) => {
|
||||||
const { databaseType, readonly } = useChartDB();
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const { attributes, listeners, setNodeRef, transform, transition } =
|
const { attributes, listeners, setNodeRef, transform, transition } =
|
||||||
@@ -99,7 +101,9 @@ export const TableField: React.FC<TableFieldProps> = ({
|
|||||||
'side_panel.tables_section.table.field_type'
|
'side_panel.tables_section.table.field_type'
|
||||||
)}
|
)}
|
||||||
value={field.type.id}
|
value={field.type.id}
|
||||||
valueSuffix={generateDBFieldSuffix(field)}
|
valueSuffix={generateDBFieldSuffix(field, {
|
||||||
|
databaseType,
|
||||||
|
})}
|
||||||
optionSuffix={(option) =>
|
optionSuffix={(option) =>
|
||||||
generateFieldSuffix(option.value)
|
generateFieldSuffix(option.value)
|
||||||
}
|
}
|
||||||
|
@@ -49,6 +49,7 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
|||||||
updateIndex,
|
updateIndex,
|
||||||
updateTable,
|
updateTable,
|
||||||
readonly,
|
readonly,
|
||||||
|
databaseType,
|
||||||
} = useChartDB();
|
} = useChartDB();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { color } = table;
|
const { color } = table;
|
||||||
@@ -183,6 +184,8 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
|||||||
removeField={() =>
|
removeField={() =>
|
||||||
removeField(table.id, field.id)
|
removeField(table.id, field.id)
|
||||||
}
|
}
|
||||||
|
databaseType={databaseType}
|
||||||
|
readonly={readonly}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</SortableContext>
|
</SortableContext>
|
||||||
|
@@ -2418,9 +2418,10 @@ export const examples: Example[] = [
|
|||||||
id: 'yqrnjmizqeu2w7mpfhze3clbj',
|
id: 'yqrnjmizqeu2w7mpfhze3clbj',
|
||||||
name: 'special_features',
|
name: 'special_features',
|
||||||
type: {
|
type: {
|
||||||
id: 'array',
|
id: 'text',
|
||||||
name: 'array',
|
name: 'text',
|
||||||
},
|
},
|
||||||
|
isArray: true,
|
||||||
primaryKey: false,
|
primaryKey: false,
|
||||||
unique: false,
|
unique: false,
|
||||||
nullable: true,
|
nullable: true,
|
||||||
|
Reference in New Issue
Block a user