mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-24 08:33:44 +00:00
Compare commits
8 Commits
jf/add_dup
...
v1.15.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ce265781b | ||
|
|
60c5675cbf | ||
|
|
66b086378c | ||
|
|
abd2a6ccbe | ||
|
|
459c5f1ce3 | ||
|
|
44be48ff3a | ||
|
|
ad8e34483f | ||
|
|
215d57979d |
2
.github/workflows/cla.yaml
vendored
2
.github/workflows/cla.yaml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write # this can be 'read' if the signatures are in remote repository
|
||||
contents: read
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
|
||||
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -1,5 +1,55 @@
|
||||
# Changelog
|
||||
|
||||
## [1.15.1](https://github.com/chartdb/chartdb/compare/v1.15.0...v1.15.1) (2025-08-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add actions menu to diagram list + add duplicate diagram ([#876](https://github.com/chartdb/chartdb/issues/876)) ([abd2a6c](https://github.com/chartdb/chartdb/commit/abd2a6ccbe1aa63db44ec28b3eff525cc5d3f8b0))
|
||||
* **custom-types:** Make schema optional ([#866](https://github.com/chartdb/chartdb/issues/866)) ([60c5675](https://github.com/chartdb/chartdb/commit/60c5675cbfe205859d2d0c9848d8345a0a854671))
|
||||
* handle quoted identifiers with special characters in SQL import/export and DBML generation ([#877](https://github.com/chartdb/chartdb/issues/877)) ([66b0863](https://github.com/chartdb/chartdb/commit/66b086378cd63347acab5fc7f13db7db4feaa872))
|
||||
|
||||
## [1.15.0](https://github.com/chartdb/chartdb/compare/v1.14.0...v1.15.0) (2025-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add auto increment support for fields with database-specific export ([#851](https://github.com/chartdb/chartdb/issues/851)) ([c77c983](https://github.com/chartdb/chartdb/commit/c77c983989ae38a6b1139dd9015f4f3178d4e103))
|
||||
* **filter:** filter tables by areas ([#836](https://github.com/chartdb/chartdb/issues/836)) ([e9c5442](https://github.com/chartdb/chartdb/commit/e9c5442d9df2beadad78187da3363bb6406636c4))
|
||||
* include foreign keys inline in SQLite CREATE TABLE statements ([#833](https://github.com/chartdb/chartdb/issues/833)) ([43fc1d7](https://github.com/chartdb/chartdb/commit/43fc1d7fc26876b22c61405f6c3df89fc66b7992))
|
||||
* **postgres:** add support hash index types ([#812](https://github.com/chartdb/chartdb/issues/812)) ([0d623a8](https://github.com/chartdb/chartdb/commit/0d623a86b1cb7cbd223e10ad23d09fc0e106c006))
|
||||
* support create views ([#868](https://github.com/chartdb/chartdb/issues/868)) ([0a5874a](https://github.com/chartdb/chartdb/commit/0a5874a69b6323145430c1fb4e3482ac7da4916c))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* area filter logic ([#861](https://github.com/chartdb/chartdb/issues/861)) ([73daf0d](https://github.com/chartdb/chartdb/commit/73daf0df2142a29c2eeebe60b43198bcca869026))
|
||||
* **area filter:** fix dragging tables over filtered areas ([#842](https://github.com/chartdb/chartdb/issues/842)) ([19fd94c](https://github.com/chartdb/chartdb/commit/19fd94c6bde3a9ec749cd1ccacbedb6abc96d037))
|
||||
* **canvas:** delete table + area together bug ([#859](https://github.com/chartdb/chartdb/issues/859)) ([b697e26](https://github.com/chartdb/chartdb/commit/b697e26170da95dcb427ff6907b6f663c98ba59f))
|
||||
* **cla:** Harden action ([#867](https://github.com/chartdb/chartdb/issues/867)) ([ad8e344](https://github.com/chartdb/chartdb/commit/ad8e34483fdf4226de76c9e7768bc2ba9bf154de))
|
||||
* DBML export error with multi-line table comments for SQL Server ([#852](https://github.com/chartdb/chartdb/issues/852)) ([0545b41](https://github.com/chartdb/chartdb/commit/0545b411407b2449220d10981a04c3e368a90ca3))
|
||||
* filter to default schema on load new diagram ([#849](https://github.com/chartdb/chartdb/issues/849)) ([712bdf5](https://github.com/chartdb/chartdb/commit/712bdf5b958919d940c4f2a1c3b7c7e969990f02))
|
||||
* **filter:** filter toggle issues with no schemas dbs ([#856](https://github.com/chartdb/chartdb/issues/856)) ([d0dee84](https://github.com/chartdb/chartdb/commit/d0dee849702161d979b4f589a7e6579fbaade22d))
|
||||
* **filters:** refactor diagram filters - remove schema filter ([#832](https://github.com/chartdb/chartdb/issues/832)) ([4f1d329](https://github.com/chartdb/chartdb/commit/4f1d3295c09782ab46d82ce21b662032aa094f22))
|
||||
* for sqlite import - add more types & include type parameters ([#834](https://github.com/chartdb/chartdb/issues/834)) ([5936500](https://github.com/chartdb/chartdb/commit/5936500ca00a57b3f161616264c26152a13c36d2))
|
||||
* improve creating view to table dependency ([#874](https://github.com/chartdb/chartdb/issues/874)) ([44be48f](https://github.com/chartdb/chartdb/commit/44be48ff3ad1361279331c17364090b13af471a1))
|
||||
* initially show filter when filter active ([#853](https://github.com/chartdb/chartdb/issues/853)) ([ab4845c](https://github.com/chartdb/chartdb/commit/ab4845c7728e6e0b2d852f8005921fd90630eef9))
|
||||
* **menu:** clear file menu ([#843](https://github.com/chartdb/chartdb/issues/843)) ([eaebe34](https://github.com/chartdb/chartdb/commit/eaebe3476824af779214a354b3e991923a22f195))
|
||||
* merge relationship & dependency sections to ref section ([#870](https://github.com/chartdb/chartdb/issues/870)) ([ec3719e](https://github.com/chartdb/chartdb/commit/ec3719ebce4664b2aa6e3322fb3337e72bc21015))
|
||||
* move dbml into sections menu ([#862](https://github.com/chartdb/chartdb/issues/862)) ([2531a70](https://github.com/chartdb/chartdb/commit/2531a7023f36ef29e67c0da6bca4fd0346b18a51))
|
||||
* open filter by default ([#863](https://github.com/chartdb/chartdb/issues/863)) ([7e0fdd1](https://github.com/chartdb/chartdb/commit/7e0fdd1595bffe29e769d29602d04f42edfe417e))
|
||||
* preserve composite primary key constraint names across import/export workflows ([#869](https://github.com/chartdb/chartdb/issues/869)) ([215d579](https://github.com/chartdb/chartdb/commit/215d57979df2e91fa61988acff590daad2f4e771))
|
||||
* prevent false change detection in DBML editor by stripping public schema on import ([#858](https://github.com/chartdb/chartdb/issues/858)) ([0aaa451](https://github.com/chartdb/chartdb/commit/0aaa451479911d047e4cc83f063afa68a122ba9b))
|
||||
* remove unnecessary space ([#845](https://github.com/chartdb/chartdb/issues/845)) ([f1a4298](https://github.com/chartdb/chartdb/commit/f1a429836221aacdda73b91665bf33ffb011164c))
|
||||
* reorder with areas ([#846](https://github.com/chartdb/chartdb/issues/846)) ([d7c9536](https://github.com/chartdb/chartdb/commit/d7c9536272cf1d42104b7064ea448d128d091a20))
|
||||
* **select-box:** fix select box issue in dialog ([#840](https://github.com/chartdb/chartdb/issues/840)) ([cb2ba66](https://github.com/chartdb/chartdb/commit/cb2ba66233c8c04e2d963cf2d210499d8512a268))
|
||||
* set default filter only if has more than 1 schemas ([#855](https://github.com/chartdb/chartdb/issues/855)) ([b4ccfcd](https://github.com/chartdb/chartdb/commit/b4ccfcdcde2f3565b0d3bbc46fa1715feb6cd925))
|
||||
* show default schema first ([#854](https://github.com/chartdb/chartdb/issues/854)) ([1759b0b](https://github.com/chartdb/chartdb/commit/1759b0b9f271ed25f7c71f26c344e3f1d97bc5fb))
|
||||
* **sidebar:** add titles to sidebar ([#844](https://github.com/chartdb/chartdb/issues/844)) ([b8f2141](https://github.com/chartdb/chartdb/commit/b8f2141bd2e67272030896fb4009a7925f9f09e4))
|
||||
* **sql-import:** fix SQL Server foreign key parsing for tables without schema prefix ([#857](https://github.com/chartdb/chartdb/issues/857)) ([04d91c6](https://github.com/chartdb/chartdb/commit/04d91c67b1075e94948f75186878e633df7abbca))
|
||||
* **table colors:** switch to default table color ([#841](https://github.com/chartdb/chartdb/issues/841)) ([0da3cae](https://github.com/chartdb/chartdb/commit/0da3caeeac37926dd22f38d98423611f39c0412a))
|
||||
* update filter on adding table ([#838](https://github.com/chartdb/chartdb/issues/838)) ([41ba251](https://github.com/chartdb/chartdb/commit/41ba25137789dda25266178cd7c96ecbb37e62a4))
|
||||
|
||||
## [1.14.0](https://github.com/chartdb/chartdb/compare/v1.13.2...v1.14.0) (2025-08-04)
|
||||
|
||||
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"version": "1.14.0",
|
||||
"version": "1.15.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "chartdb",
|
||||
"version": "1.14.0",
|
||||
"version": "1.15.1",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"private": true,
|
||||
"version": "1.14.0",
|
||||
"version": "1.15.1",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -6,7 +6,10 @@ import type { ChartDBContext, ChartDBEvent } from './chartdb-context';
|
||||
import { chartDBContext } from './chartdb-context';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import {
|
||||
getTableIndexesWithPrimaryKey,
|
||||
type DBIndex,
|
||||
} from '@/lib/domain/db-index';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { useRedoUndoStack } from '@/hooks/use-redo-undo-stack';
|
||||
@@ -348,6 +351,11 @@ export const ChartDBProvider: React.FC<
|
||||
order: tables.length,
|
||||
...attributes,
|
||||
};
|
||||
|
||||
table.indexes = getTableIndexesWithPrimaryKey({
|
||||
table,
|
||||
});
|
||||
|
||||
await addTable(table);
|
||||
|
||||
return table;
|
||||
@@ -639,17 +647,30 @@ export const ChartDBProvider: React.FC<
|
||||
options = { updateHistory: true }
|
||||
) => {
|
||||
const prevField = getField(tableId, fieldId);
|
||||
|
||||
const updateTableFn = (table: DBTable) => {
|
||||
const updatedTable: DBTable = {
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
} satisfies DBTable;
|
||||
|
||||
updatedTable.indexes = getTableIndexesWithPrimaryKey({
|
||||
table: updatedTable,
|
||||
});
|
||||
|
||||
return updatedTable;
|
||||
};
|
||||
|
||||
setTables((tables) =>
|
||||
tables.map((table) =>
|
||||
table.id === tableId
|
||||
? {
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
}
|
||||
: table
|
||||
)
|
||||
tables.map((table) => {
|
||||
if (table.id === tableId) {
|
||||
return updateTableFn(table);
|
||||
}
|
||||
|
||||
return table;
|
||||
})
|
||||
);
|
||||
|
||||
const table = await db.getTable({ diagramId, id: tableId });
|
||||
@@ -664,10 +685,7 @@ export const ChartDBProvider: React.FC<
|
||||
db.updateTable({
|
||||
id: tableId,
|
||||
attributes: {
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
...updateTableFn(table),
|
||||
},
|
||||
}),
|
||||
]);
|
||||
@@ -694,19 +712,29 @@ export const ChartDBProvider: React.FC<
|
||||
fieldId: string,
|
||||
options = { updateHistory: true }
|
||||
) => {
|
||||
const updateTableFn = (table: DBTable) => {
|
||||
const updatedTable: DBTable = {
|
||||
...table,
|
||||
fields: table.fields.filter((f) => f.id !== fieldId),
|
||||
} satisfies DBTable;
|
||||
|
||||
updatedTable.indexes = getTableIndexesWithPrimaryKey({
|
||||
table: updatedTable,
|
||||
});
|
||||
|
||||
return updatedTable;
|
||||
};
|
||||
|
||||
const fields = getTable(tableId)?.fields ?? [];
|
||||
const prevField = getField(tableId, fieldId);
|
||||
setTables((tables) =>
|
||||
tables.map((table) =>
|
||||
table.id === tableId
|
||||
? {
|
||||
...table,
|
||||
fields: table.fields.filter(
|
||||
(f) => f.id !== fieldId
|
||||
),
|
||||
}
|
||||
: table
|
||||
)
|
||||
tables.map((table) => {
|
||||
if (table.id === tableId) {
|
||||
return updateTableFn(table);
|
||||
}
|
||||
|
||||
return table;
|
||||
})
|
||||
);
|
||||
|
||||
events.emit({
|
||||
@@ -730,8 +758,7 @@ export const ChartDBProvider: React.FC<
|
||||
db.updateTable({
|
||||
id: tableId,
|
||||
attributes: {
|
||||
...table,
|
||||
fields: table.fields.filter((f) => f.id !== fieldId),
|
||||
...updateTableFn(table),
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/dropdown-menu/dropdown-menu';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Ellipsis, Layers2, SquareArrowOutUpRight, Trash2 } from 'lucide-react';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import type { Diagram } from '@/lib/domain';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { cloneDiagram } from '@/lib/clone';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface DiagramRowActionsMenuProps {
|
||||
diagram: Diagram;
|
||||
onOpen: () => void;
|
||||
refetch: () => void;
|
||||
numberOfDiagrams: number;
|
||||
}
|
||||
|
||||
export const DiagramRowActionsMenu: React.FC<DiagramRowActionsMenuProps> = ({
|
||||
diagram,
|
||||
onOpen,
|
||||
refetch,
|
||||
numberOfDiagrams,
|
||||
}) => {
|
||||
const { diagramId } = useChartDB();
|
||||
const { deleteDiagram, addDiagram } = useStorage();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const onDelete = useCallback(async () => {
|
||||
deleteDiagram(diagram.id);
|
||||
refetch();
|
||||
|
||||
if (diagram.id === diagramId || numberOfDiagrams <= 1) {
|
||||
window.location.href = '/';
|
||||
}
|
||||
}, [deleteDiagram, diagram.id, diagramId, refetch, numberOfDiagrams]);
|
||||
|
||||
const onDuplicate = useCallback(async () => {
|
||||
const duplicatedDiagram = cloneDiagram(diagram);
|
||||
|
||||
const diagramToAdd = duplicatedDiagram.diagram;
|
||||
|
||||
if (!diagramToAdd) {
|
||||
return;
|
||||
}
|
||||
|
||||
diagramToAdd.name = `${diagram.name} (Copy)`;
|
||||
|
||||
addDiagram({ diagram: diagramToAdd });
|
||||
refetch();
|
||||
}, [addDiagram, refetch, diagram]);
|
||||
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-8 p-0"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<Ellipsis className="size-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem
|
||||
onClick={onOpen}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.open')}
|
||||
<SquareArrowOutUpRight className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
|
||||
<DropdownMenuItem
|
||||
onClick={onDuplicate}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.duplicate')}
|
||||
<Layers2 className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={onDelete}
|
||||
className="flex justify-between gap-4 text-red-700"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.delete')}
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
};
|
||||
@@ -27,6 +27,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import { DiagramRowActionsMenu } from './diagram-row-actions-menu/diagram-row-actions-menu';
|
||||
|
||||
export interface OpenDiagramDialogProps extends BaseDialogProps {
|
||||
canClose?: boolean;
|
||||
@@ -46,21 +47,22 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
string | undefined
|
||||
>();
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedDiagramId(undefined);
|
||||
}, [dialog.open]);
|
||||
const fetchDiagrams = useCallback(async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
}, [listDiagrams]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDiagrams = async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
};
|
||||
if (!dialog.open) {
|
||||
return;
|
||||
}
|
||||
setSelectedDiagramId(undefined);
|
||||
fetchDiagrams();
|
||||
}, [listDiagrams, setDiagrams, dialog.open]);
|
||||
}, [dialog.open, fetchDiagrams]);
|
||||
|
||||
const openDiagram = useCallback(
|
||||
(diagramId: string) => {
|
||||
@@ -166,6 +168,7 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
'open_diagram_dialog.table_columns.tables_count'
|
||||
)}
|
||||
</TableHead>
|
||||
<TableHead />
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
@@ -221,6 +224,19 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
<TableCell className="text-center">
|
||||
{diagram.tables?.length}
|
||||
</TableCell>
|
||||
<TableCell className="items-center p-0 pr-1 text-right">
|
||||
<DiagramRowActionsMenu
|
||||
diagram={diagram}
|
||||
onOpen={() => {
|
||||
openDiagram(diagram.id);
|
||||
closeOpenDiagramDialog();
|
||||
}}
|
||||
numberOfDiagrams={
|
||||
diagrams.length
|
||||
}
|
||||
refetch={fetchDiagrams}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
|
||||
@@ -12,8 +12,8 @@ export const ar: LanguageTranslation = {
|
||||
custom_types: 'الأنواع المخصصة',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'قواعد البيانات',
|
||||
actions: {
|
||||
actions: 'الإجراءات',
|
||||
new: 'مخطط جديد',
|
||||
browse: 'تصفح...',
|
||||
save: 'حفظ',
|
||||
@@ -323,6 +323,12 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'إلغاء',
|
||||
open: 'فتح',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'فتح',
|
||||
duplicate: 'تكرار',
|
||||
delete: 'حذف الرسم التخطيطي',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const bn: LanguageTranslation = {
|
||||
custom_types: 'কাস্টম টাইপ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'ডাটাবেস',
|
||||
actions: {
|
||||
actions: 'কার্য',
|
||||
new: 'নতুন ডায়াগ্রাম',
|
||||
browse: 'ব্রাউজ করুন...',
|
||||
save: 'সংরক্ষণ করুন',
|
||||
@@ -325,6 +325,12 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'বাতিল করুন',
|
||||
open: 'খুলুন',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'খুলুন',
|
||||
duplicate: 'ডুপ্লিকেট',
|
||||
delete: 'ডায়াগ্রাম মুছুন',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const de: LanguageTranslation = {
|
||||
custom_types: 'Benutzerdefinierte Typen',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Datenbanken',
|
||||
actions: {
|
||||
actions: 'Aktionen',
|
||||
new: 'Neues Diagramm',
|
||||
browse: 'Durchsuchen...',
|
||||
save: 'Speichern',
|
||||
@@ -328,6 +328,12 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Abbrechen',
|
||||
open: 'Öffnen',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Öffnen',
|
||||
duplicate: 'Duplizieren',
|
||||
delete: 'Diagramm löschen',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const en = {
|
||||
custom_types: 'Custom Types',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Databases',
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'New Diagram',
|
||||
browse: 'Browse...',
|
||||
save: 'Save',
|
||||
@@ -316,6 +316,12 @@ export const en = {
|
||||
},
|
||||
cancel: 'Cancel',
|
||||
open: 'Open',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Open',
|
||||
duplicate: 'Duplicate',
|
||||
delete: 'Delete Diagram',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const es: LanguageTranslation = {
|
||||
custom_types: 'Tipos Personalizados',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bases de Datos',
|
||||
actions: {
|
||||
actions: 'Acciones',
|
||||
new: 'Nuevo Diagrama',
|
||||
browse: 'Examinar...',
|
||||
save: 'Guardar',
|
||||
@@ -326,6 +326,12 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Cancelar',
|
||||
open: 'Abrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Eliminar Diagrama',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const fr: LanguageTranslation = {
|
||||
custom_types: 'Types Personnalisés',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bases de Données',
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'Nouveau Diagramme',
|
||||
browse: 'Parcourir...',
|
||||
save: 'Enregistrer',
|
||||
@@ -323,6 +323,12 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Annuler',
|
||||
open: 'Ouvrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Ouvrir',
|
||||
duplicate: 'Dupliquer',
|
||||
delete: 'Supprimer le diagramme',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const gu: LanguageTranslation = {
|
||||
custom_types: 'કસ્ટમ ટાઇપ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'ડેટાબેસેસ',
|
||||
actions: {
|
||||
actions: 'ક્રિયાઓ',
|
||||
new: 'નવું ડાયાગ્રામ',
|
||||
browse: 'બ્રાઉજ કરો...',
|
||||
save: 'સાચવો',
|
||||
@@ -325,6 +325,12 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'રદ કરો',
|
||||
open: 'ખોલો',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'ખોલો',
|
||||
duplicate: 'ડુપ્લિકેટ',
|
||||
delete: 'ડાયાગ્રામ કાઢી નાખો',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const hi: LanguageTranslation = {
|
||||
custom_types: 'कस्टम टाइप',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डेटाबेस',
|
||||
actions: {
|
||||
actions: 'कार्य',
|
||||
new: 'नया आरेख',
|
||||
browse: 'ब्राउज़ करें...',
|
||||
save: 'सहेजें',
|
||||
@@ -327,6 +327,12 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द करें',
|
||||
open: 'खोलें',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'खोलें',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'डायग्राम हटाएं',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const hr: LanguageTranslation = {
|
||||
custom_types: 'Prilagođeni Tipovi',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Baze Podataka',
|
||||
actions: {
|
||||
actions: 'Akcije',
|
||||
new: 'Novi Dijagram',
|
||||
browse: 'Pregledaj...',
|
||||
save: 'Spremi',
|
||||
@@ -320,6 +320,12 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Odustani',
|
||||
open: 'Otvori',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Otvori',
|
||||
duplicate: 'Dupliciraj',
|
||||
delete: 'Obriši dijagram',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const id_ID: LanguageTranslation = {
|
||||
custom_types: 'Tipe Kustom',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Basis Data',
|
||||
actions: {
|
||||
actions: 'Aksi',
|
||||
new: 'Diagram Baru',
|
||||
browse: 'Jelajahi...',
|
||||
save: 'Simpan',
|
||||
@@ -324,6 +324,12 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Batal',
|
||||
open: 'Buka',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Buka',
|
||||
duplicate: 'Duplikat',
|
||||
delete: 'Hapus Diagram',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const ja: LanguageTranslation = {
|
||||
custom_types: 'カスタムタイプ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'データベース',
|
||||
actions: {
|
||||
actions: 'アクション',
|
||||
new: '新しいダイアグラム',
|
||||
browse: '参照...',
|
||||
save: '保存',
|
||||
@@ -329,6 +329,12 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'キャンセル',
|
||||
open: '開く',
|
||||
|
||||
diagram_actions: {
|
||||
open: '開く',
|
||||
duplicate: '複製',
|
||||
delete: 'ダイアグラムを削除',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
custom_types: '사용자 지정 타입',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '데이터베이스',
|
||||
actions: {
|
||||
actions: '작업',
|
||||
new: '새 다이어그램',
|
||||
browse: '찾아보기...',
|
||||
save: '저장',
|
||||
@@ -324,6 +324,12 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
cancel: '취소',
|
||||
open: '열기',
|
||||
|
||||
diagram_actions: {
|
||||
open: '열기',
|
||||
duplicate: '복제',
|
||||
delete: '다이어그램 삭제',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const mr: LanguageTranslation = {
|
||||
custom_types: 'कस्टम प्रकार',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डेटाबेस',
|
||||
actions: {
|
||||
actions: 'क्रिया',
|
||||
new: 'नवीन आरेख',
|
||||
browse: 'ब्राउज करा...',
|
||||
save: 'जतन करा',
|
||||
@@ -330,6 +330,12 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द करा',
|
||||
open: 'उघडा',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'उघडा',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'आरेख हटवा',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const ne: LanguageTranslation = {
|
||||
custom_types: 'कस्टम प्रकारहरू',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डाटाबेसहरू',
|
||||
actions: {
|
||||
actions: 'कार्यहरू',
|
||||
new: 'नयाँ डायाग्राम',
|
||||
browse: 'ब्राउज गर्नुहोस्...',
|
||||
save: 'सुरक्षित गर्नुहोस्',
|
||||
@@ -327,6 +327,12 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
open: 'खोल्नुहोस्',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'खोल्नुहोस्',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'डायग्राम मेटाउनुहोस्',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
custom_types: 'Tipos Personalizados',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bancos de Dados',
|
||||
actions: {
|
||||
actions: 'Ações',
|
||||
new: 'Novo Diagrama',
|
||||
browse: 'Navegar...',
|
||||
save: 'Salvar',
|
||||
@@ -326,6 +326,12 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Cancelar',
|
||||
open: 'Abrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Excluir Diagrama',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const ru: LanguageTranslation = {
|
||||
custom_types: 'Пользовательские типы',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Базы данных',
|
||||
actions: {
|
||||
actions: 'Действия',
|
||||
new: 'Новая диаграмма',
|
||||
browse: 'Обзор...',
|
||||
save: 'Сохранить',
|
||||
@@ -323,6 +323,12 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Отмена',
|
||||
open: 'Открыть',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Открыть',
|
||||
duplicate: 'Дублировать',
|
||||
delete: 'Удалить диаграмму',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const te: LanguageTranslation = {
|
||||
custom_types: 'కస్టమ్ టైప్స్',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'డేటాబేస్లు',
|
||||
actions: {
|
||||
actions: 'చర్యలు',
|
||||
new: 'కొత్త డైగ్రాం',
|
||||
browse: 'బ్రాఉజ్ చేయండి...',
|
||||
save: 'సేవ్',
|
||||
@@ -327,6 +327,12 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'రద్దు',
|
||||
open: 'తెరవు',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'తెరవు',
|
||||
duplicate: 'నకలు',
|
||||
delete: 'డైగ్రామ్ తొలగించు',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const tr: LanguageTranslation = {
|
||||
custom_types: 'Özel Tipler',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Veritabanları',
|
||||
actions: {
|
||||
actions: 'Eylemler',
|
||||
new: 'Yeni Diyagram',
|
||||
browse: 'Gözat...',
|
||||
save: 'Kaydet',
|
||||
@@ -322,6 +322,12 @@ export const tr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'İptal',
|
||||
open: 'Aç',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Aç',
|
||||
duplicate: 'Kopyala',
|
||||
delete: 'Diyagramı Sil',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const uk: LanguageTranslation = {
|
||||
custom_types: 'Користувацькі типи',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Бази даних',
|
||||
actions: {
|
||||
actions: 'Дії',
|
||||
new: 'Нова діаграма',
|
||||
browse: 'Огляд...',
|
||||
save: 'Зберегти',
|
||||
@@ -324,6 +324,12 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Скасувати',
|
||||
open: 'Відкрити',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Відкрити',
|
||||
duplicate: 'Дублювати',
|
||||
delete: 'Видалити діаграму',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const vi: LanguageTranslation = {
|
||||
custom_types: 'Kiểu tùy chỉnh',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Cơ sở dữ liệu',
|
||||
actions: {
|
||||
actions: 'Hành động',
|
||||
new: 'Sơ đồ mới',
|
||||
browse: 'Duyệt...',
|
||||
save: 'Lưu',
|
||||
@@ -324,6 +324,12 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Hủy',
|
||||
open: 'Mở',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Mở',
|
||||
duplicate: 'Nhân bản',
|
||||
delete: 'Xóa sơ đồ',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
custom_types: '自定义类型',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '数据库',
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新建关系图',
|
||||
browse: '浏览...',
|
||||
save: '保存',
|
||||
@@ -321,6 +321,12 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
cancel: '取消',
|
||||
open: '打开',
|
||||
|
||||
diagram_actions: {
|
||||
open: '打开',
|
||||
duplicate: '复制',
|
||||
delete: '删除图表',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,8 +12,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
custom_types: '自定義類型',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '資料庫',
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新增圖表',
|
||||
browse: '瀏覽...',
|
||||
save: '儲存',
|
||||
@@ -320,6 +320,12 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
cancel: '取消',
|
||||
open: '開啟',
|
||||
|
||||
diagram_actions: {
|
||||
open: '開啟',
|
||||
duplicate: '複製',
|
||||
delete: '刪除圖表',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { exportBaseSQL } from '../export-sql-script';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
|
||||
// Mock the dbml/core importer
|
||||
vi.mock('@dbml/core', () => ({
|
||||
importer: {
|
||||
import: vi.fn((sql: string) => {
|
||||
// Return a simplified DBML for testing
|
||||
return sql;
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('DBML Export - SQL Generation Tests', () => {
|
||||
// Helper to generate test IDs and timestamps
|
||||
let idCounter = 0;
|
||||
@@ -124,6 +114,96 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should not create duplicate index for composite primary key', () => {
|
||||
const tableId = testId();
|
||||
const field1Id = testId();
|
||||
const field2Id = testId();
|
||||
const field3Id = testId();
|
||||
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Landlord System',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: tableId,
|
||||
name: 'users_master_table',
|
||||
schema: 'landlord',
|
||||
fields: [
|
||||
createField({
|
||||
id: field1Id,
|
||||
name: 'master_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: field2Id,
|
||||
name: 'tenant_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: field3Id,
|
||||
name: 'tenant_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'enabled',
|
||||
type: { id: 'boolean', name: 'boolean' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
id: testId(),
|
||||
name: 'idx_users_master_table_master_user_id_tenant_id_tenant_user_id',
|
||||
unique: false,
|
||||
fieldIds: [field1Id, field2Id, field3Id],
|
||||
createdAt: testTime,
|
||||
},
|
||||
{
|
||||
id: testId(),
|
||||
name: 'index_1',
|
||||
unique: true,
|
||||
fieldIds: [field2Id, field3Id],
|
||||
createdAt: testTime,
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should contain composite primary key constraint
|
||||
expect(sql).toContain(
|
||||
'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)'
|
||||
);
|
||||
|
||||
// Should NOT contain the duplicate index for the primary key fields
|
||||
expect(sql).not.toContain(
|
||||
'CREATE INDEX idx_users_master_table_master_user_id_tenant_id_tenant_user_id'
|
||||
);
|
||||
|
||||
// Should still contain the unique index on subset of fields
|
||||
expect(sql).toContain('CREATE UNIQUE INDEX index_1');
|
||||
});
|
||||
|
||||
it('should handle single primary keys inline', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
@@ -516,7 +596,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should create a valid table without primary key
|
||||
expect(sql).toContain('CREATE TABLE experiment_logs');
|
||||
expect(sql).toContain('CREATE TABLE "experiment_logs"');
|
||||
expect(sql).not.toContain('PRIMARY KEY');
|
||||
});
|
||||
|
||||
@@ -631,11 +711,11 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should create both tables
|
||||
expect(sql).toContain('CREATE TABLE guilds');
|
||||
expect(sql).toContain('CREATE TABLE guild_members');
|
||||
expect(sql).toContain('CREATE TABLE "guilds"');
|
||||
expect(sql).toContain('CREATE TABLE "guild_members"');
|
||||
// Should create foreign key
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE guild_members ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY (guild_id) REFERENCES guilds (id)'
|
||||
'ALTER TABLE "guild_members" ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY (guild_id) REFERENCES "guilds" (id);'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -709,12 +789,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should create schemas
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS transportation');
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS magic');
|
||||
// Should use schema-qualified table names
|
||||
expect(sql).toContain('CREATE TABLE transportation.portals');
|
||||
expect(sql).toContain('CREATE TABLE magic.spells');
|
||||
expect(sql).toContain('CREATE TABLE "transportation"."portals"');
|
||||
expect(sql).toContain('CREATE TABLE "magic"."spells"');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -761,7 +838,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should still create table structure
|
||||
expect(sql).toContain('CREATE TABLE empty_table');
|
||||
expect(sql).toContain('CREATE TABLE "empty_table"');
|
||||
expect(sql).toContain('(\n\n)');
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -178,7 +178,15 @@ export function exportMSSQL({
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT [${pkIndex.name}] `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
|
||||
@@ -313,7 +313,15 @@ export function exportMySQL({
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT \`${pkIndex.name}\` `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
|
||||
@@ -325,7 +325,15 @@ export function exportPostgreSQL({
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT "${pkIndex.name}" `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
|
||||
@@ -20,6 +20,38 @@ const simplifyDataType = (typeName: string): string => {
|
||||
return typeMap[typeName.toLowerCase()] || typeName;
|
||||
};
|
||||
|
||||
// Helper function to properly quote table/schema names with special characters
|
||||
const getQuotedTableName = (
|
||||
table: DBTable,
|
||||
isDBMLFlow: boolean = false
|
||||
): string => {
|
||||
// Check if a name is already quoted
|
||||
const isAlreadyQuoted = (name: string) => {
|
||||
return (
|
||||
(name.startsWith('"') && name.endsWith('"')) ||
|
||||
(name.startsWith('`') && name.endsWith('`')) ||
|
||||
(name.startsWith('[') && name.endsWith(']'))
|
||||
);
|
||||
};
|
||||
|
||||
// Only add quotes if needed and not already quoted
|
||||
const quoteIfNeeded = (name: string) => {
|
||||
if (isAlreadyQuoted(name)) {
|
||||
return name;
|
||||
}
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(name) || isDBMLFlow;
|
||||
return needsQuoting ? `"${name}"` : name;
|
||||
};
|
||||
|
||||
if (table.schema) {
|
||||
const quotedSchema = quoteIfNeeded(table.schema);
|
||||
const quotedTable = quoteIfNeeded(table.name);
|
||||
return `${quotedSchema}.${quotedTable}`;
|
||||
} else {
|
||||
return quoteIfNeeded(table.name);
|
||||
}
|
||||
};
|
||||
|
||||
export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
@@ -63,18 +95,21 @@ export const exportBaseSQL = ({
|
||||
let sqlScript = '';
|
||||
|
||||
// First create the CREATE SCHEMA statements for all the found schemas based on tables
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
// Skip schema creation for DBML flow as DBML doesn't support CREATE SCHEMA syntax
|
||||
if (!isDBMLFlow) {
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add CREATE SCHEMA statements if any schemas exist
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS ${schema};\n`;
|
||||
});
|
||||
if (schemas.size > 0) sqlScript += '\n'; // Add newline only if schemas were added
|
||||
// Add CREATE SCHEMA statements if any schemas exist
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
if (schemas.size > 0) sqlScript += '\n'; // Add newline only if schemas were added
|
||||
}
|
||||
|
||||
// Add CREATE TYPE statements for ENUMs and COMPOSITE types from diagram.customTypes
|
||||
if (diagram.customTypes && diagram.customTypes.length > 0) {
|
||||
@@ -166,9 +201,7 @@ export const exportBaseSQL = ({
|
||||
|
||||
// Loop through each non-view table to generate the SQL statements
|
||||
nonViewTables.forEach((table) => {
|
||||
const tableName = table.schema
|
||||
? `${table.schema}.${table.name}`
|
||||
: table.name;
|
||||
const tableName = getQuotedTableName(table, isDBMLFlow);
|
||||
sqlScript += `CREATE TABLE ${tableName} (\n`;
|
||||
|
||||
// Check for composite primary keys
|
||||
@@ -313,21 +346,33 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle PRIMARY KEY constraint - only add inline if not composite
|
||||
if (field.primaryKey && !hasCompositePrimaryKey) {
|
||||
// Handle PRIMARY KEY constraint - only add inline if no PK index with custom name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (field.primaryKey && !hasCompositePrimaryKey && !pkIndex?.name) {
|
||||
sqlScript += ' PRIMARY KEY';
|
||||
}
|
||||
|
||||
// Add a comma after each field except the last one (or before composite primary key)
|
||||
if (index < table.fields.length - 1 || hasCompositePrimaryKey) {
|
||||
// Add a comma after each field except the last one (or before PK constraint)
|
||||
const needsPKConstraint =
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name);
|
||||
if (index < table.fields.length - 1 || needsPKConstraint) {
|
||||
sqlScript += ',\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Add composite primary key constraint if needed
|
||||
if (hasCompositePrimaryKey) {
|
||||
// Add primary key constraint if needed (for composite PKs or single PK with custom name)
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name)
|
||||
) {
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).join(', ');
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
if (pkIndex?.name) {
|
||||
sqlScript += `\n CONSTRAINT ${pkIndex.name} PRIMARY KEY (${pkFieldNames})`;
|
||||
} else {
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
}
|
||||
|
||||
sqlScript += '\n);\n';
|
||||
@@ -349,12 +394,33 @@ export const exportBaseSQL = ({
|
||||
|
||||
// Generate SQL for indexes
|
||||
table.indexes.forEach((index) => {
|
||||
const fieldNames = index.fieldIds
|
||||
.map(
|
||||
(fieldId) =>
|
||||
table.fields.find((field) => field.id === fieldId)?.name
|
||||
// Skip the primary key index (it's already handled as a constraint)
|
||||
if (index.isPrimaryKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => table.fields.find((f) => f.id === fieldId))
|
||||
.filter(
|
||||
(field): field is NonNullable<typeof field> =>
|
||||
field !== undefined
|
||||
);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes for composite primary keys
|
||||
if (
|
||||
primaryKeyFields.length > 0 &&
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some((field) => field.id === pk.id)
|
||||
)
|
||||
.filter(Boolean)
|
||||
) {
|
||||
return; // Skip this index as it's redundant with the primary key
|
||||
}
|
||||
|
||||
const fieldNames = indexFields
|
||||
.map((field) => field.name)
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
@@ -432,12 +498,9 @@ export const exportBaseSQL = ({
|
||||
return;
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `${fkTable.schema}.${fkTable.name}`
|
||||
: fkTable.name;
|
||||
const refTableName = refTable.schema
|
||||
? `${refTable.schema}.${refTable.name}`
|
||||
: refTable.name;
|
||||
const fkTableName = getQuotedTableName(fkTable, isDBMLFlow);
|
||||
const refTableName = getQuotedTableName(refTable, isDBMLFlow);
|
||||
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${fkField.name}) REFERENCES ${refTableName} (${refField.name});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -0,0 +1,350 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Import - Quoted Identifiers with Special Characters', () => {
|
||||
describe('CREATE TABLE with quoted identifiers', () => {
|
||||
it('should handle tables with quoted schema and table names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "my-schema"."user-profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
name text NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('my-schema');
|
||||
expect(table.name).toBe('user-profiles');
|
||||
});
|
||||
|
||||
it('should handle tables with spaces in schema and table names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "user schema"."profile table" (
|
||||
"user id" integer PRIMARY KEY,
|
||||
"full name" varchar(255)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('user schema');
|
||||
expect(table.name).toBe('profile table');
|
||||
expect(table.columns).toBeDefined();
|
||||
expect(table.columns.length).toBeGreaterThan(0);
|
||||
// Note: Column names with spaces might be parsed differently
|
||||
});
|
||||
|
||||
it('should handle mixed quoted and unquoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "special-schema".users (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
CREATE TABLE public."special-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
expect(result.tables[0].schema).toBe('special-schema');
|
||||
expect(result.tables[0].name).toBe('users');
|
||||
expect(result.tables[1].schema).toBe('public');
|
||||
expect(result.tables[1].name).toBe('special-table');
|
||||
});
|
||||
|
||||
it('should handle tables with dots in names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema.with.dots"."table.with.dots" (
|
||||
id serial PRIMARY KEY,
|
||||
data text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('schema.with.dots');
|
||||
expect(table.name).toBe('table.with.dots');
|
||||
});
|
||||
});
|
||||
|
||||
describe('FOREIGN KEY with quoted identifiers', () => {
|
||||
it('should handle inline REFERENCES with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "auth-schema"."users" (
|
||||
"user-id" serial PRIMARY KEY,
|
||||
email text UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE "app-schema"."user-profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
"user-id" integer REFERENCES "auth-schema"."users"("user-id"),
|
||||
bio text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('user-profiles');
|
||||
expect(relationship.targetTable).toBe('users');
|
||||
expect(relationship.sourceColumn).toBe('user-id');
|
||||
expect(relationship.targetColumn).toBe('user-id');
|
||||
});
|
||||
|
||||
it('should handle FOREIGN KEY constraints with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema one"."table one" (
|
||||
"id field" serial PRIMARY KEY,
|
||||
"data field" text
|
||||
);
|
||||
|
||||
CREATE TABLE "schema two"."table two" (
|
||||
id serial PRIMARY KEY,
|
||||
"ref id" integer,
|
||||
FOREIGN KEY ("ref id") REFERENCES "schema one"."table one"("id field")
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('table two');
|
||||
expect(relationship.targetTable).toBe('table one');
|
||||
expect(relationship.sourceColumn).toBe('ref id');
|
||||
expect(relationship.targetColumn).toBe('id field');
|
||||
});
|
||||
|
||||
it('should handle named constraints with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "auth"."users" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE "app"."profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
user_id integer,
|
||||
CONSTRAINT "fk-user-profile" FOREIGN KEY (user_id) REFERENCES "auth"."users"(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
// Note: Constraint names with special characters might be normalized
|
||||
expect(relationship.name).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD CONSTRAINT with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "user-schema"."user-accounts" (
|
||||
"account-id" serial PRIMARY KEY,
|
||||
username text
|
||||
);
|
||||
|
||||
CREATE TABLE "order-schema"."user-orders" (
|
||||
"order-id" serial PRIMARY KEY,
|
||||
"account-id" integer
|
||||
);
|
||||
|
||||
ALTER TABLE "order-schema"."user-orders"
|
||||
ADD CONSTRAINT "fk_orders_accounts"
|
||||
FOREIGN KEY ("account-id")
|
||||
REFERENCES "user-schema"."user-accounts"("account-id");
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.name).toBe('fk_orders_accounts');
|
||||
expect(relationship.sourceTable).toBe('user-orders');
|
||||
expect(relationship.targetTable).toBe('user-accounts');
|
||||
expect(relationship.sourceColumn).toBe('account-id');
|
||||
expect(relationship.targetColumn).toBe('account-id');
|
||||
});
|
||||
|
||||
it('should handle complex mixed quoting scenarios', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE auth.users (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE "app-data"."user_profiles" (
|
||||
profile_id serial PRIMARY KEY,
|
||||
"user-id" integer REFERENCES auth.users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE "app-data".posts (
|
||||
id serial PRIMARY KEY,
|
||||
profile_id integer
|
||||
);
|
||||
|
||||
ALTER TABLE "app-data".posts
|
||||
ADD CONSTRAINT fk_posts_profiles
|
||||
FOREIGN KEY (profile_id)
|
||||
REFERENCES "app-data"."user_profiles"(profile_id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Verify the relationships were correctly identified
|
||||
const profilesTable = result.tables.find(
|
||||
(t) => t.name === 'user_profiles'
|
||||
);
|
||||
expect(profilesTable?.schema).toBe('app-data');
|
||||
|
||||
const postsTable = result.tables.find((t) => t.name === 'posts');
|
||||
expect(postsTable?.schema).toBe('app-data');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases and special scenarios', () => {
|
||||
it('should handle Unicode characters in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "схема"."таблица" (
|
||||
"идентификатор" serial PRIMARY KEY,
|
||||
"данные" text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('схема');
|
||||
expect(table.name).toBe('таблица');
|
||||
expect(table.columns).toBeDefined();
|
||||
expect(table.columns.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should handle parentheses in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema(prod)"."users(archived)" (
|
||||
id serial PRIMARY KEY,
|
||||
data text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('schema(prod)');
|
||||
expect(table.name).toBe('users(archived)');
|
||||
});
|
||||
|
||||
it('should handle forward slashes in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "api/v1"."users/profiles" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('api/v1');
|
||||
expect(table.name).toBe('users/profiles');
|
||||
});
|
||||
|
||||
it('should handle IF NOT EXISTS with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE IF NOT EXISTS "test-schema"."test-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('test-schema');
|
||||
expect(table.name).toBe('test-table');
|
||||
});
|
||||
|
||||
it('should handle ONLY keyword with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ONLY "parent-schema"."parent-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE ONLY "parent-schema"."parent-table"
|
||||
ADD CONSTRAINT "unique-constraint" UNIQUE (id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// ONLY keyword might trigger warnings
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('parent-schema');
|
||||
expect(table.name).toBe('parent-table');
|
||||
});
|
||||
|
||||
it('should handle self-referencing foreign keys with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "org-schema"."departments" (
|
||||
"dept-id" serial PRIMARY KEY,
|
||||
"parent-dept-id" integer REFERENCES "org-schema"."departments"("dept-id"),
|
||||
name text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('departments');
|
||||
expect(relationship.targetTable).toBe('departments'); // Self-reference
|
||||
expect(relationship.sourceColumn).toBe('parent-dept-id');
|
||||
expect(relationship.targetColumn).toBe('dept-id');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -490,16 +490,21 @@ function extractForeignKeysFromCreateTable(
|
||||
|
||||
const tableBody = tableBodyMatch[1];
|
||||
|
||||
// Pattern for inline REFERENCES - more flexible to handle various formats
|
||||
// Pattern for inline REFERENCES - handles quoted and unquoted identifiers
|
||||
const inlineRefPattern =
|
||||
/["']?(\w+)["']?\s+(?:\w+(?:\([^)]*\))?(?:\[[^\]]*\])?(?:\s+\w+)*\s+)?REFERENCES\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(\s*["']?(\w+)["']?\s*\)/gi;
|
||||
/(?:"([^"]+)"|([^"\s,()]+))\s+(?:\w+(?:\([^)]*\))?(?:\[[^\]]*\])?(?:\s+\w+)*\s+)?REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)/gi;
|
||||
|
||||
let match;
|
||||
while ((match = inlineRefPattern.exec(tableBody)) !== null) {
|
||||
const sourceColumn = match[1];
|
||||
const targetSchema = match[2] || 'public';
|
||||
const targetTable = match[3];
|
||||
const targetColumn = match[4];
|
||||
// Extract values from appropriate match groups
|
||||
// Groups: 1=quoted source col, 2=unquoted source col,
|
||||
// 3=quoted schema, 4=unquoted schema,
|
||||
// 5=quoted target table, 6=unquoted target table,
|
||||
// 7=quoted target col, 8=unquoted target col
|
||||
const sourceColumn = match[1] || match[2];
|
||||
const targetSchema = match[3] || match[4] || 'public';
|
||||
const targetTable = match[5] || match[6];
|
||||
const targetColumn = match[7] || match[8];
|
||||
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
@@ -521,15 +526,16 @@ function extractForeignKeysFromCreateTable(
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern for FOREIGN KEY constraints
|
||||
// Pattern for FOREIGN KEY constraints - handles quoted and unquoted identifiers
|
||||
const fkConstraintPattern =
|
||||
/FOREIGN\s+KEY\s*\(\s*["']?(\w+)["']?\s*\)\s*REFERENCES\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(\s*["']?(\w+)["']?\s*\)/gi;
|
||||
/FOREIGN\s+KEY\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)\s*REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)/gi;
|
||||
|
||||
while ((match = fkConstraintPattern.exec(tableBody)) !== null) {
|
||||
const sourceColumn = match[1];
|
||||
const targetSchema = match[2] || 'public';
|
||||
const targetTable = match[3];
|
||||
const targetColumn = match[4];
|
||||
// Extract values from appropriate match groups
|
||||
const sourceColumn = match[1] || match[2];
|
||||
const targetSchema = match[3] || match[4] || 'public';
|
||||
const targetTable = match[5] || match[6];
|
||||
const targetColumn = match[7] || match[8];
|
||||
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
@@ -585,12 +591,16 @@ export async function fromPostgres(
|
||||
? stmt.sql.substring(createTableIndex)
|
||||
: stmt.sql;
|
||||
|
||||
// Updated regex to properly handle quoted identifiers with special characters
|
||||
// Matches: schema.table, "schema"."table", "schema".table, schema."table"
|
||||
const tableMatch = sqlFromCreate.match(
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?["'`]?([^"'`\s.(]+)["'`]?/i
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const schemaName = tableMatch[1] || 'public';
|
||||
const tableName = tableMatch[2];
|
||||
// Extract schema and table names from the appropriate match groups
|
||||
// Groups: 1=quoted schema, 2=unquoted schema, 3=quoted table, 4=unquoted table
|
||||
const schemaName = tableMatch[1] || tableMatch[2] || 'public';
|
||||
const tableName = tableMatch[3] || tableMatch[4];
|
||||
const tableKey = `${schemaName}.${tableName}`;
|
||||
tableMap[tableKey] = generateId();
|
||||
}
|
||||
@@ -938,12 +948,16 @@ export async function fromPostgres(
|
||||
? stmt.sql.substring(createTableIndex)
|
||||
: stmt.sql;
|
||||
|
||||
// Updated regex to properly handle quoted identifiers with special characters
|
||||
// Matches: schema.table, "schema"."table", "schema".table, schema."table"
|
||||
const tableMatch = sqlFromCreate.match(
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?["'`]?([^"'`\s.(]+)["'`]?/i
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const schemaName = tableMatch[1] || 'public';
|
||||
const tableName = tableMatch[2];
|
||||
// Extract schema and table names from the appropriate match groups
|
||||
// Groups: 1=quoted schema, 2=unquoted schema, 3=quoted table, 4=unquoted table
|
||||
const schemaName = tableMatch[1] || tableMatch[2] || 'public';
|
||||
const tableName = tableMatch[3] || tableMatch[4];
|
||||
const tableKey = `${schemaName}.${tableName}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
@@ -1130,18 +1144,22 @@ export async function fromPostgres(
|
||||
} else if (stmt.type === 'alter' && !stmt.parsed) {
|
||||
// Handle ALTER TABLE statements that failed to parse
|
||||
// Extract foreign keys using regex as fallback
|
||||
// Updated regex to handle quoted identifiers properly
|
||||
const alterFKMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:"?([^"\s.]+)"?\.)?["']?([^"'\s.(]+)["']?\s+ADD\s+CONSTRAINT\s+["']?([^"'\s]+)["']?\s+FOREIGN\s+KEY\s*\(["']?([^"'\s)]+)["']?\)\s+REFERENCES\s+(?:"?([^"\s.]+)"?\.)?["']?([^"'\s.(]+)["']?\s*\(["']?([^"'\s)]+)["']?\)/i
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ADD\s+CONSTRAINT\s+(?:"([^"]+)"|([^"\s]+))\s+FOREIGN\s+KEY\s*\((?:"([^"]+)"|([^"\s)]+))\)\s+REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\((?:"([^"]+)"|([^"\s)]+))\)/i
|
||||
);
|
||||
|
||||
if (alterFKMatch) {
|
||||
const sourceSchema = alterFKMatch[1] || 'public';
|
||||
const sourceTable = alterFKMatch[2];
|
||||
const constraintName = alterFKMatch[3];
|
||||
const sourceColumn = alterFKMatch[4];
|
||||
const targetSchema = alterFKMatch[5] || 'public';
|
||||
const targetTable = alterFKMatch[6];
|
||||
const targetColumn = alterFKMatch[7];
|
||||
// Extract values from appropriate match groups
|
||||
const sourceSchema =
|
||||
alterFKMatch[1] || alterFKMatch[2] || 'public';
|
||||
const sourceTable = alterFKMatch[3] || alterFKMatch[4];
|
||||
const constraintName = alterFKMatch[5] || alterFKMatch[6];
|
||||
const sourceColumn = alterFKMatch[7] || alterFKMatch[8];
|
||||
const targetSchema =
|
||||
alterFKMatch[9] || alterFKMatch[10] || 'public';
|
||||
const targetTable = alterFKMatch[11] || alterFKMatch[12];
|
||||
const targetColumn = alterFKMatch[13] || alterFKMatch[14];
|
||||
|
||||
const sourceTableId = getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
|
||||
114
src/lib/dbml/dbml-export/__tests__/composite-pk-export.test.ts
Normal file
114
src/lib/dbml/dbml-export/__tests__/composite-pk-export.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { generateDBMLFromDiagram } from '../dbml-export';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
describe('Composite Primary Key Name Export', () => {
|
||||
it('should export composite primary key with name in DBML', () => {
|
||||
const diagram: Diagram = {
|
||||
id: generateId(),
|
||||
name: 'Test',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
tables: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'users_master_table',
|
||||
schema: 'landlord',
|
||||
x: 0,
|
||||
y: 0,
|
||||
color: '#FFF',
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
fields: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'master_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'tenant_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'tenant_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'enabled',
|
||||
type: { id: 'boolean', name: 'boolean' },
|
||||
nullable: true,
|
||||
primaryKey: false,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'users_master_table_index_1',
|
||||
unique: true,
|
||||
fieldIds: ['dummy1', 'dummy2'], // Will be replaced
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
};
|
||||
|
||||
// Fix field IDs in the index and add PK index
|
||||
const table = diagram.tables![0];
|
||||
const masterUserIdField = table.fields.find(
|
||||
(f) => f.name === 'master_user_id'
|
||||
);
|
||||
const tenantIdField = table.fields.find((f) => f.name === 'tenant_id');
|
||||
const tenantUserIdField = table.fields.find(
|
||||
(f) => f.name === 'tenant_user_id'
|
||||
);
|
||||
table.indexes[0].fieldIds = [tenantIdField!.id, tenantUserIdField!.id];
|
||||
|
||||
// Add the PK index with name
|
||||
table.indexes.push({
|
||||
id: generateId(),
|
||||
name: 'moshe',
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
fieldIds: [
|
||||
masterUserIdField!.id,
|
||||
tenantIdField!.id,
|
||||
tenantUserIdField!.id,
|
||||
],
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Check that the DBML contains the composite PK with name
|
||||
expect(result.standardDbml).toContain(
|
||||
'(master_user_id, tenant_id, tenant_user_id) [pk, name: "moshe"]'
|
||||
);
|
||||
|
||||
// Check that the unique index is also present
|
||||
expect(result.standardDbml).toContain(
|
||||
'(tenant_id, tenant_user_id) [unique, name: "users_master_table_index_1"]'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1383,12 +1383,9 @@ Ref "fk_0_table_2_id_fk":"table_1"."id" < "table_2"."id"
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Check that the inline DBML has proper indentation
|
||||
// Note: indexes on primary key fields should be filtered out
|
||||
expect(result.inlineDbml).toContain(`Table "table_1" {
|
||||
"id" bigint [pk, not null]
|
||||
|
||||
Indexes {
|
||||
id [name: "index_1"]
|
||||
}
|
||||
}`);
|
||||
|
||||
expect(result.inlineDbml).toContain(`Table "table_2" {
|
||||
|
||||
@@ -0,0 +1,438 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { generateDBMLFromDiagram } from '../dbml-export';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
|
||||
describe('DBML Export - Fix Multiline Table Names', () => {
|
||||
// Helper to generate test IDs and timestamps
|
||||
let idCounter = 0;
|
||||
const testId = () => `test-id-${++idCounter}`;
|
||||
const testTime = Date.now();
|
||||
|
||||
// Helper to create a field
|
||||
const createField = (overrides: Partial<DBField>): DBField =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'field',
|
||||
type: { id: 'text', name: 'text' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
createdAt: testTime,
|
||||
...overrides,
|
||||
}) as DBField;
|
||||
|
||||
// Helper to create a table
|
||||
const createTable = (overrides: Partial<DBTable>): DBTable =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'table',
|
||||
fields: [],
|
||||
indexes: [],
|
||||
createdAt: testTime,
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 200,
|
||||
...overrides,
|
||||
}) as DBTable;
|
||||
|
||||
// Helper to create a diagram
|
||||
const createDiagram = (overrides: Partial<Diagram>): Diagram =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'diagram',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
createdAt: testTime,
|
||||
updatedAt: testTime,
|
||||
...overrides,
|
||||
}) as Diagram;
|
||||
|
||||
describe('DBML Generation with Special Characters', () => {
|
||||
it('should handle table names with special characters', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
name: 'user-profiles',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
createField({
|
||||
name: 'user-name',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
nullable: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should properly quote table names with special characters
|
||||
expect(result.standardDbml).toContain('Table "user-profiles"');
|
||||
|
||||
// Field names with special characters should also be quoted
|
||||
expect(result.standardDbml).toContain('"user-name"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle schema-qualified table names', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
schema: 'my-schema',
|
||||
name: 'my-table',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should properly quote schema and table names
|
||||
expect(result.standardDbml).toContain(
|
||||
'Table "my-schema"."my-table"'
|
||||
);
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle table names with spaces', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
name: 'user profiles',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should properly quote table names with spaces
|
||||
expect(result.standardDbml).toContain('Table "user profiles"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle schema names with spaces', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
schema: 'my schema',
|
||||
name: 'my_table',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should properly quote schema with spaces
|
||||
expect(result.standardDbml).toContain(
|
||||
'Table "my schema"."my_table"'
|
||||
);
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle table names with dots', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
name: 'app.config',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should properly quote table names with dots
|
||||
expect(result.standardDbml).toContain('Table "app.config"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not have line breaks in table declarations', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
schema: 'very-long-schema-name-with-dashes',
|
||||
name: 'very-long-table-name-with-special-characters',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Table declaration should be on a single line
|
||||
const tableDeclarations =
|
||||
result.standardDbml.match(/Table\s+[^{]+\{/g) || [];
|
||||
tableDeclarations.forEach((decl) => {
|
||||
// Should not contain newlines before the opening brace
|
||||
expect(decl).not.toContain('\n');
|
||||
});
|
||||
|
||||
// The full qualified name should be on one line
|
||||
expect(result.standardDbml).toMatch(
|
||||
/Table\s+"very-long-schema-name-with-dashes"\."very-long-table-name-with-special-characters"\s*\{/
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple tables and relationships', () => {
|
||||
it('should handle multiple tables with special characters', () => {
|
||||
const parentTableId = testId();
|
||||
const childTableId = testId();
|
||||
const parentIdField = testId();
|
||||
const childParentIdField = testId();
|
||||
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: parentTableId,
|
||||
schema: 'auth-schema',
|
||||
name: 'user-accounts',
|
||||
fields: [
|
||||
createField({
|
||||
id: parentIdField,
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createTable({
|
||||
id: childTableId,
|
||||
schema: 'app-schema',
|
||||
name: 'user-profiles',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
createField({
|
||||
id: childParentIdField,
|
||||
name: 'account-id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [
|
||||
{
|
||||
id: testId(),
|
||||
name: 'fk_profiles_accounts',
|
||||
sourceTableId: childTableId,
|
||||
targetTableId: parentTableId,
|
||||
sourceFieldId: childParentIdField,
|
||||
targetFieldId: parentIdField,
|
||||
sourceCardinality: 'many',
|
||||
targetCardinality: 'one',
|
||||
createdAt: testTime,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should contain both tables properly quoted
|
||||
expect(result.standardDbml).toContain(
|
||||
'Table "auth-schema"."user-accounts"'
|
||||
);
|
||||
expect(result.standardDbml).toContain(
|
||||
'Table "app-schema"."user-profiles"'
|
||||
);
|
||||
|
||||
// Should contain the relationship reference
|
||||
expect(result.standardDbml).toContain('Ref');
|
||||
|
||||
// Should contain field names with dashes properly quoted
|
||||
expect(result.standardDbml).toContain('"account-id"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work correctly with inline DBML format', () => {
|
||||
const parentTableId = testId();
|
||||
const childTableId = testId();
|
||||
const parentIdField = testId();
|
||||
const childParentIdField = testId();
|
||||
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: parentTableId,
|
||||
name: 'parent-table',
|
||||
fields: [
|
||||
createField({
|
||||
id: parentIdField,
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createTable({
|
||||
id: childTableId,
|
||||
name: 'child-table',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
createField({
|
||||
id: childParentIdField,
|
||||
name: 'parent-id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [
|
||||
{
|
||||
id: testId(),
|
||||
name: 'fk_child_parent',
|
||||
sourceTableId: childTableId,
|
||||
targetTableId: parentTableId,
|
||||
sourceFieldId: childParentIdField,
|
||||
targetFieldId: parentIdField,
|
||||
sourceCardinality: 'many',
|
||||
targetCardinality: 'one',
|
||||
createdAt: testTime,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Both standard and inline should be generated
|
||||
expect(result.standardDbml).toBeDefined();
|
||||
expect(result.inlineDbml).toBeDefined();
|
||||
|
||||
// Inline version should contain inline references
|
||||
expect(result.inlineDbml).toContain('ref:');
|
||||
|
||||
// Both should properly quote table names
|
||||
expect(result.standardDbml).toContain('Table "parent-table"');
|
||||
expect(result.inlineDbml).toContain('Table "parent-table"');
|
||||
expect(result.standardDbml).toContain('Table "child-table"');
|
||||
expect(result.inlineDbml).toContain('Table "child-table"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty table names gracefully', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
name: '',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should not throw error
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle Unicode characters in names', () => {
|
||||
const diagram = createDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
name: 'użytkownik',
|
||||
fields: [
|
||||
createField({
|
||||
name: 'identyfikator',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Should handle Unicode characters
|
||||
expect(result.standardDbml).toContain('Table "użytkownik"');
|
||||
expect(result.standardDbml).toContain('"identyfikator"');
|
||||
|
||||
// Should not have any errors
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -605,6 +605,62 @@ const fixTableBracketSyntax = (dbml: string): string => {
|
||||
);
|
||||
};
|
||||
|
||||
// Fix table names that have been broken across multiple lines
|
||||
const fixMultilineTableNames = (dbml: string): string => {
|
||||
// Match Table declarations that might have line breaks in the table name
|
||||
// This regex captures:
|
||||
// - Table keyword
|
||||
// - Optional quoted schema with dot
|
||||
// - Table name that might be broken across lines (until the opening brace)
|
||||
return dbml.replace(
|
||||
/Table\s+((?:"[^"]*"\.)?"[^"]*(?:\n[^"]*)*")\s*\{/g,
|
||||
(_, tableName) => {
|
||||
// Remove line breaks within the table name
|
||||
const fixedTableName = tableName.replace(/\n\s*/g, '');
|
||||
return `Table ${fixedTableName} {`;
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
// Restore composite primary key names in the DBML
|
||||
const restoreCompositePKNames = (dbml: string, tables: DBTable[]): string => {
|
||||
if (!tables || tables.length === 0) return dbml;
|
||||
|
||||
let result = dbml;
|
||||
|
||||
tables.forEach((table) => {
|
||||
// Check if this table has a PK index with a name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (pkIndex?.name) {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// Build the column list for the composite PK
|
||||
const columnList = primaryKeyFields
|
||||
.map((f) => f.name)
|
||||
.join(', ');
|
||||
|
||||
// Build the table identifier pattern
|
||||
const tableIdentifier = table.schema
|
||||
? `"${table.schema.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"\\."${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"`
|
||||
: `"${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"`;
|
||||
|
||||
// Pattern to match the composite PK index line
|
||||
// Match patterns like: (col1, col2, col3) [pk]
|
||||
const pkPattern = new RegExp(
|
||||
`(Table ${tableIdentifier} \\{[^}]*?Indexes \\{[^}]*?)(\\(${columnList.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\) \\[pk\\])`,
|
||||
'gs'
|
||||
);
|
||||
|
||||
// Replace with the named version
|
||||
const replacement = `$1(${columnList}) [pk, name: "${pkIndex.name}"]`;
|
||||
result = result.replace(pkPattern, replacement);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Restore schema information that may have been stripped by the DBML importer
|
||||
const restoreTableSchemas = (dbml: string, tables: DBTable[]): string => {
|
||||
if (!tables || tables.length === 0) return dbml;
|
||||
@@ -870,14 +926,16 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
...table,
|
||||
name: safeTableName,
|
||||
fields: processedFields,
|
||||
indexes: (table.indexes || []).map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
indexes: (table.indexes || [])
|
||||
.filter((index) => !index.isPrimaryKey) // Filter out PK indexes as they're handled separately
|
||||
.map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -928,10 +986,12 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
}
|
||||
|
||||
standard = normalizeCharTypeFormat(
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
fixMultilineTableNames(
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
@@ -939,6 +999,9 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
// Restore schema information that may have been stripped by DBML importer
|
||||
standard = restoreTableSchemas(standard, uniqueTables);
|
||||
|
||||
// Restore composite primary key names
|
||||
standard = restoreCompositePKNames(standard, uniqueTables);
|
||||
|
||||
// Prepend Enum DBML to the standard output
|
||||
if (enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
|
||||
190
src/lib/dbml/dbml-import/__tests__/composite-pk-name.test.ts
Normal file
190
src/lib/dbml/dbml-import/__tests__/composite-pk-name.test.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importDBMLToDiagram } from '../dbml-import';
|
||||
import { exportPostgreSQL } from '@/lib/data/export-metadata/export-per-type/postgresql';
|
||||
import { exportMySQL } from '@/lib/data/export-metadata/export-per-type/mysql';
|
||||
import { exportMSSQL } from '@/lib/data/export-metadata/export-per-type/mssql';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Composite Primary Key with Name', () => {
|
||||
it('should preserve composite primary key name in DBML import and SQL export', async () => {
|
||||
const dbmlContent = `
|
||||
Table "landlord"."users_master_table" {
|
||||
"master_user_id" bigint [not null]
|
||||
"tenant_id" bigint [not null]
|
||||
"tenant_user_id" bigint [not null]
|
||||
"enabled" boolean
|
||||
|
||||
Indexes {
|
||||
(master_user_id, tenant_id, tenant_user_id) [pk, name: "idx_users_master_table_master_user_id_tenant_id_tenant_user_id"]
|
||||
(tenant_id, tenant_user_id) [unique, name: "index_1"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// Import DBML
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
// Check that the composite PK name was captured
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Check for the PK index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe(
|
||||
'idx_users_master_table_master_user_id_tenant_id_tenant_user_id'
|
||||
);
|
||||
|
||||
// Check that fields are marked as primary keys
|
||||
const pkFields = table.fields.filter((f) => f.primaryKey);
|
||||
expect(pkFields).toHaveLength(3);
|
||||
expect(pkFields.map((f) => f.name)).toEqual([
|
||||
'master_user_id',
|
||||
'tenant_id',
|
||||
'tenant_user_id',
|
||||
]);
|
||||
|
||||
// Check that we have both the PK index and the unique index
|
||||
expect(table.indexes).toHaveLength(2);
|
||||
const uniqueIndex = table.indexes.find((idx) => !idx.isPrimaryKey);
|
||||
expect(uniqueIndex!.name).toBe('index_1');
|
||||
expect(uniqueIndex!.unique).toBe(true);
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in PostgreSQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "users" {
|
||||
"id" bigint [not null]
|
||||
"tenant_id" bigint [not null]
|
||||
|
||||
Indexes {
|
||||
(id, tenant_id) [pk, name: "pk_users_composite"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
const sqlScript = exportPostgreSQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT "pk_users_composite" PRIMARY KEY ("id", "tenant_id")'
|
||||
);
|
||||
expect(sqlScript).not.toContain('PRIMARY KEY ("id", "tenant_id"),'); // Should not have unnamed PK
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in MySQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "orders" {
|
||||
"order_id" int [not null]
|
||||
"product_id" int [not null]
|
||||
|
||||
Indexes {
|
||||
(order_id, product_id) [pk, name: "orders_order_product_pk"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.MYSQL,
|
||||
});
|
||||
|
||||
const sqlScript = exportMySQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT `orders_order_product_pk` PRIMARY KEY (`order_id`, `product_id`)'
|
||||
);
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in MSSQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "products" {
|
||||
"category_id" int [not null]
|
||||
"product_id" int [not null]
|
||||
|
||||
Indexes {
|
||||
(category_id, product_id) [pk, name: "pk_products"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
const sqlScript = exportMSSQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT [pk_products] PRIMARY KEY ([category_id], [product_id])'
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge duplicate PK index with name', async () => {
|
||||
const dbmlContent = `
|
||||
Table "test" {
|
||||
"a" int [not null]
|
||||
"b" int [not null]
|
||||
|
||||
Indexes {
|
||||
(a, b) [pk]
|
||||
(a, b) [name: "test_pk_name"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Should capture the name from the duplicate index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe('test_pk_name');
|
||||
|
||||
// Should only have the PK index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
|
||||
// Fields should be marked as primary keys
|
||||
expect(table.fields.filter((f) => f.primaryKey)).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle composite PK without name', async () => {
|
||||
const dbmlContent = `
|
||||
Table "simple" {
|
||||
"x" int [not null]
|
||||
"y" int [not null]
|
||||
|
||||
Indexes {
|
||||
(x, y) [pk]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// PK index should not exist for composite PK without name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
|
||||
const sqlScript = exportPostgreSQL({ diagram });
|
||||
|
||||
// Should have unnamed PRIMARY KEY
|
||||
expect(sqlScript).toContain('PRIMARY KEY ("x", "y")');
|
||||
expect(sqlScript).toContain('CONSTRAINT');
|
||||
});
|
||||
});
|
||||
@@ -177,7 +177,7 @@ Table ranks {
|
||||
expect(wizardsTable?.fields).toHaveLength(11);
|
||||
|
||||
// Check indexes
|
||||
expect(wizardsTable?.indexes).toHaveLength(2);
|
||||
expect(wizardsTable?.indexes).toHaveLength(3);
|
||||
const emailIndex = wizardsTable?.indexes.find((idx) =>
|
||||
idx.name.includes('email')
|
||||
);
|
||||
@@ -920,7 +920,7 @@ Note dragon_note {
|
||||
expect(hoardsTable).toBeDefined();
|
||||
|
||||
// Verify all indexes are imported correctly
|
||||
expect(hoardsTable?.indexes).toHaveLength(3); // Should have 3 indexes as defined in DBML
|
||||
expect(hoardsTable?.indexes).toHaveLength(4); // 3 from DBML + 1 implicit PK index
|
||||
|
||||
// Verify named indexes
|
||||
const uniqueDragonIndex = hoardsTable?.indexes.find(
|
||||
@@ -1119,7 +1119,7 @@ Table "public_3"."comments" {
|
||||
).toBe('timestamp');
|
||||
|
||||
// Check posts indexes thoroughly
|
||||
expect(postsTable?.indexes).toHaveLength(2);
|
||||
expect(postsTable?.indexes).toHaveLength(3);
|
||||
|
||||
// Index 1: Composite unique index on (content, user_id)
|
||||
const compositeIndex = postsTable?.indexes.find(
|
||||
@@ -1154,7 +1154,7 @@ Table "public_3"."comments" {
|
||||
|
||||
// Check comments table
|
||||
expect(commentsTable?.fields).toHaveLength(5);
|
||||
expect(commentsTable?.indexes).toHaveLength(1);
|
||||
expect(commentsTable?.indexes).toHaveLength(2);
|
||||
|
||||
// Index: Unique index on id
|
||||
const idIndex = commentsTable?.indexes.find(
|
||||
|
||||
@@ -9,7 +9,7 @@ import { findDataTypeDataById } from '@/lib/data/data-types/data-types';
|
||||
import { defaultTableColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type Field from '@dbml/core/types/model_structure/field';
|
||||
import type { DBIndex } from '@/lib/domain';
|
||||
import { getTableIndexesWithPrimaryKey, type DBIndex } from '@/lib/domain';
|
||||
import {
|
||||
DBCustomTypeKind,
|
||||
type DBCustomType,
|
||||
@@ -100,6 +100,7 @@ interface DBMLIndex {
|
||||
columns: (string | DBMLIndexColumn)[];
|
||||
unique?: boolean;
|
||||
name?: string;
|
||||
pk?: boolean; // Primary key index flag
|
||||
}
|
||||
|
||||
interface DBMLTable {
|
||||
@@ -387,15 +388,19 @@ export const importDBMLToDiagram = async (
|
||||
);
|
||||
}
|
||||
|
||||
// Generate a consistent index name
|
||||
// For PK indexes, only use the name if explicitly provided
|
||||
// For regular indexes, generate a default name if needed
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
(!dbmlIndex.pk
|
||||
? `idx_${table.name}_${indexColumns.join('_')}`
|
||||
: undefined);
|
||||
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
pk: Boolean(dbmlIndex.pk) || false,
|
||||
};
|
||||
}) || [],
|
||||
});
|
||||
@@ -484,29 +489,126 @@ export const importDBMLToDiagram = async (
|
||||
};
|
||||
});
|
||||
|
||||
// Convert DBML indexes to ChartDB indexes
|
||||
const indexes: DBIndex[] =
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
const fieldIds = dbmlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
// Process composite primary keys from indexes with [pk] attribute
|
||||
let compositePKFields: string[] = [];
|
||||
let compositePKIndexName: string | undefined;
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
// Find PK indexes and mark fields as primary keys
|
||||
table.indexes?.forEach((dbmlIndex) => {
|
||||
if (dbmlIndex.pk) {
|
||||
// Extract column names from the columns array
|
||||
compositePKFields = dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'string' ? col : col.value
|
||||
);
|
||||
// Only store the name if it was explicitly provided (not undefined)
|
||||
if (dbmlIndex.name) {
|
||||
compositePKIndexName = dbmlIndex.name;
|
||||
}
|
||||
// Mark fields as primary keys
|
||||
dbmlIndex.columns.forEach((col) => {
|
||||
const columnName =
|
||||
typeof col === 'string' ? col : col.value;
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (field) {
|
||||
field.primaryKey = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// If we found a PK without a name, look for a duplicate index with just a name
|
||||
if (compositePKFields.length > 0 && !compositePKIndexName) {
|
||||
table.indexes?.forEach((dbmlIndex) => {
|
||||
if (
|
||||
!dbmlIndex.pk &&
|
||||
dbmlIndex.name &&
|
||||
dbmlIndex.columns.length === compositePKFields.length
|
||||
) {
|
||||
// Check if columns match
|
||||
const indexColumns = dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'string' ? col : col.value
|
||||
);
|
||||
if (
|
||||
indexColumns.every(
|
||||
(col, i) => col === compositePKFields[i]
|
||||
)
|
||||
) {
|
||||
compositePKIndexName = dbmlIndex.name;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Convert DBML indexes to ChartDB indexes (excluding PK indexes and their duplicates)
|
||||
const indexes: DBIndex[] =
|
||||
table.indexes
|
||||
?.filter((dbmlIndex) => {
|
||||
// Skip PK indexes - we'll handle them separately
|
||||
if (dbmlIndex.pk) return false;
|
||||
|
||||
// Skip duplicate indexes that match the composite PK
|
||||
// (when user has both [pk] and [name: "..."] on same fields)
|
||||
if (
|
||||
compositePKFields.length > 0 &&
|
||||
dbmlIndex.columns.length ===
|
||||
compositePKFields.length &&
|
||||
dbmlIndex.columns.every((col, i) => {
|
||||
const colName =
|
||||
typeof col === 'string' ? col : col.value;
|
||||
return colName === compositePKFields[i];
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})
|
||||
.map((dbmlIndex) => {
|
||||
const fieldIds = dbmlIndex.columns.map((columnName) => {
|
||||
const field = fields.find(
|
||||
(f) => f.name === columnName
|
||||
);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
// Add PK as an index if it exists and has a name
|
||||
// Only create the PK index if there's an explicit name for it
|
||||
if (compositePKFields.length >= 1 && compositePKIndexName) {
|
||||
const pkFieldIds = compositePKFields.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`PK references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
|
||||
indexes.push({
|
||||
id: generateId(),
|
||||
name: compositePKIndexName,
|
||||
fieldIds: pkFieldIds,
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
// Extract table note/comment
|
||||
let tableComment: string | undefined;
|
||||
@@ -521,7 +623,7 @@ export const importDBMLToDiagram = async (
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
const tableToReturn: DBTable = {
|
||||
id: generateId(),
|
||||
name: table.name.replace(/['"]/g, ''),
|
||||
schema:
|
||||
@@ -540,6 +642,13 @@ export const importDBMLToDiagram = async (
|
||||
createdAt: Date.now(),
|
||||
comments: tableComment,
|
||||
} satisfies DBTable;
|
||||
|
||||
return {
|
||||
...tableToReturn,
|
||||
indexes: getTableIndexesWithPrimaryKey({
|
||||
table: tableToReturn,
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
// Create relationships using the refs
|
||||
|
||||
192
src/lib/domain/__tests__/composite-pk-metadata-import.test.ts
Normal file
192
src/lib/domain/__tests__/composite-pk-metadata-import.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createTablesFromMetadata } from '../db-table';
|
||||
import { DatabaseType } from '../database-type';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
|
||||
describe('Composite Primary Key Name from Metadata Import', () => {
|
||||
it('should capture composite primary key name from metadata indexes', () => {
|
||||
const metadata: DatabaseMetadata = {
|
||||
database_name: 'test_db',
|
||||
version: '',
|
||||
fk_info: [],
|
||||
pk_info: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'master_user_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'tenant_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'tenant_user_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
],
|
||||
columns: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'master_user_id',
|
||||
ordinal_position: 1,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'tenant_id',
|
||||
ordinal_position: 2,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'tenant_user_id',
|
||||
ordinal_position: 3,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'enabled',
|
||||
ordinal_position: 4,
|
||||
type: 'boolean',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: true,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
// The composite PK index named "moshe"
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'master_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 1,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'tenant_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 2,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'tenant_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 3,
|
||||
direction: 'asc',
|
||||
},
|
||||
// Another unique index
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'users_master_table_index_1',
|
||||
column: 'tenant_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 1,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'users_master_table_index_1',
|
||||
column: 'tenant_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 2,
|
||||
direction: 'asc',
|
||||
},
|
||||
],
|
||||
tables: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
rows: 0,
|
||||
type: 'BASE TABLE',
|
||||
engine: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
],
|
||||
views: [],
|
||||
custom_types: [],
|
||||
};
|
||||
|
||||
const tables = createTablesFromMetadata({
|
||||
databaseMetadata: metadata,
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(tables).toHaveLength(1);
|
||||
const table = tables[0];
|
||||
|
||||
// Check that the composite PK name was captured as "moshe" in the PK index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe('moshe');
|
||||
|
||||
// Check that primary key fields are marked correctly
|
||||
const pkFields = table.fields.filter((f) => f.primaryKey);
|
||||
expect(pkFields).toHaveLength(3);
|
||||
expect(pkFields.map((f) => f.name).sort()).toEqual([
|
||||
'master_user_id',
|
||||
'tenant_id',
|
||||
'tenant_user_id',
|
||||
]);
|
||||
|
||||
// Check that we have both the PK index and the unique index
|
||||
expect(table.indexes).toHaveLength(2);
|
||||
const uniqueIndex = table.indexes.find((idx) => !idx.isPrimaryKey);
|
||||
expect(uniqueIndex!.name).toBe('users_master_table_index_1');
|
||||
});
|
||||
});
|
||||
@@ -15,12 +15,12 @@ export interface DBCustomTypeField {
|
||||
|
||||
export interface DBCustomType {
|
||||
id: string;
|
||||
schema?: string;
|
||||
schema?: string | null;
|
||||
name: string;
|
||||
kind: DBCustomTypeKind;
|
||||
values?: string[]; // For enum types
|
||||
fields?: DBCustomTypeField[]; // For composite types
|
||||
order?: number;
|
||||
values?: string[] | null; // For enum types
|
||||
fields?: DBCustomTypeField[] | null; // For composite types
|
||||
order?: number | null;
|
||||
}
|
||||
|
||||
export const dbCustomTypeFieldSchema = z.object({
|
||||
@@ -30,11 +30,12 @@ export const dbCustomTypeFieldSchema = z.object({
|
||||
|
||||
export const dbCustomTypeSchema: z.ZodType<DBCustomType> = z.object({
|
||||
id: z.string(),
|
||||
schema: z.string(),
|
||||
schema: z.string().or(z.null()).optional(),
|
||||
name: z.string(),
|
||||
kind: z.nativeEnum(DBCustomTypeKind),
|
||||
values: z.array(z.string()).optional(),
|
||||
fields: z.array(dbCustomTypeFieldSchema).optional(),
|
||||
values: z.array(z.string()).or(z.null()).optional(),
|
||||
fields: z.array(dbCustomTypeFieldSchema).or(z.null()).optional(),
|
||||
order: z.number().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createCustomTypesFromMetadata = ({
|
||||
|
||||
@@ -3,6 +3,7 @@ import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types
|
||||
import { generateId } from '../utils';
|
||||
import type { DBField } from './db-field';
|
||||
import { DatabaseType } from './database-type';
|
||||
import type { DBTable } from './db-table';
|
||||
|
||||
export const INDEX_TYPES = [
|
||||
'btree',
|
||||
@@ -29,6 +30,7 @@ export interface DBIndex {
|
||||
fieldIds: string[];
|
||||
createdAt: number;
|
||||
type?: IndexType | null;
|
||||
isPrimaryKey?: boolean | null;
|
||||
}
|
||||
|
||||
export const dbIndexSchema: z.ZodType<DBIndex> = z.object({
|
||||
@@ -38,6 +40,7 @@ export const dbIndexSchema: z.ZodType<DBIndex> = z.object({
|
||||
fieldIds: z.array(z.string()),
|
||||
createdAt: z.number(),
|
||||
type: z.enum(INDEX_TYPES).optional(),
|
||||
isPrimaryKey: z.boolean().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createIndexesFromMetadata = ({
|
||||
@@ -64,3 +67,51 @@ export const createIndexesFromMetadata = ({
|
||||
export const databaseIndexTypes: { [key in DatabaseType]?: IndexType[] } = {
|
||||
[DatabaseType.POSTGRESQL]: ['btree', 'hash'],
|
||||
};
|
||||
|
||||
export const getTablePrimaryKeyIndex = ({
|
||||
table,
|
||||
}: {
|
||||
table: DBTable;
|
||||
}): DBIndex | null => {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
const existingPKIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
|
||||
if (primaryKeyFields.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pkFieldIds = primaryKeyFields.map((f) => f.id);
|
||||
|
||||
if (existingPKIndex) {
|
||||
return {
|
||||
...existingPKIndex,
|
||||
fieldIds: pkFieldIds,
|
||||
};
|
||||
} else {
|
||||
// Create new PK index for primary key(s)
|
||||
const pkIndex: DBIndex = {
|
||||
id: generateId(),
|
||||
name: `pk_${table.name}_${primaryKeyFields.map((f) => f.name).join('_')}`,
|
||||
fieldIds: pkFieldIds,
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
return pkIndex;
|
||||
}
|
||||
};
|
||||
|
||||
export const getTableIndexesWithPrimaryKey = ({
|
||||
table,
|
||||
}: {
|
||||
table: DBTable;
|
||||
}): DBIndex[] => {
|
||||
const primaryKeyIndex = getTablePrimaryKeyIndex({ table });
|
||||
const indexesWithoutPKIndex = table.indexes.filter(
|
||||
(idx) => !idx.isPrimaryKey
|
||||
);
|
||||
return primaryKeyIndex
|
||||
? [primaryKeyIndex, ...indexesWithoutPKIndex]
|
||||
: indexesWithoutPKIndex;
|
||||
};
|
||||
|
||||
@@ -203,11 +203,57 @@ export const createTablesFromMetadata = ({
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
// Check for composite primary key and find matching index name
|
||||
const primaryKeyFields = fields.filter((f) => f.primaryKey);
|
||||
let pkMatchingIndexName: string | undefined;
|
||||
let pkIndex: DBIndex | undefined;
|
||||
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// We have a composite primary key, look for an index that matches all PK columns
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).sort();
|
||||
|
||||
// Find an index that matches the primary key columns exactly
|
||||
const matchingIndex = aggregatedIndexes.find((index) => {
|
||||
const indexColumnNames = index.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
return (
|
||||
indexColumnNames.length === pkFieldNames.length &&
|
||||
indexColumnNames.every((col, i) => col === pkFieldNames[i])
|
||||
);
|
||||
});
|
||||
|
||||
if (matchingIndex) {
|
||||
pkMatchingIndexName = matchingIndex.name;
|
||||
// Create a special PK index
|
||||
pkIndex = {
|
||||
id: generateId(),
|
||||
name: matchingIndex.name,
|
||||
unique: true,
|
||||
fieldIds: primaryKeyFields.map((f) => f.id),
|
||||
createdAt: Date.now(),
|
||||
isPrimaryKey: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the index that matches the composite PK (to avoid duplication)
|
||||
const filteredAggregatedIndexes = pkMatchingIndexName
|
||||
? aggregatedIndexes.filter(
|
||||
(idx) => idx.name !== pkMatchingIndexName
|
||||
)
|
||||
: aggregatedIndexes;
|
||||
|
||||
const dbIndexes = createIndexesFromMetadata({
|
||||
aggregatedIndexes,
|
||||
aggregatedIndexes: filteredAggregatedIndexes,
|
||||
fields,
|
||||
});
|
||||
|
||||
// Add the PK index if it exists
|
||||
if (pkIndex) {
|
||||
dbIndexes.push(pkIndex);
|
||||
}
|
||||
|
||||
// Determine if the current table is a view by checking against pre-computed sets
|
||||
const viewKey = generateTableKey({
|
||||
schemaName: tableSchema,
|
||||
|
||||
@@ -7,6 +7,10 @@ import {
|
||||
useUpdateNodeInternals,
|
||||
} from '@xyflow/react';
|
||||
import React, { useEffect, useMemo, useRef } from 'react';
|
||||
import {
|
||||
LEFT_HANDLE_ID_PREFIX,
|
||||
RIGHT_HANDLE_ID_PREFIX,
|
||||
} from './table-node-field';
|
||||
|
||||
export const TOP_SOURCE_HANDLE_ID_PREFIX = 'top_dep_';
|
||||
export const BOTTOM_SOURCE_HANDLE_ID_PREFIX = 'bottom_dep_';
|
||||
@@ -36,6 +40,22 @@ export const TableNodeDependencyIndicator: React.FC<TableNodeDependencyIndicator
|
||||
[connection, table.id]
|
||||
);
|
||||
|
||||
const isTargetFromTable = useMemo(
|
||||
() =>
|
||||
connection.inProgress &&
|
||||
connection.fromNode.id !== table.id &&
|
||||
(connection.fromHandle.id?.startsWith(RIGHT_HANDLE_ID_PREFIX) ||
|
||||
connection.fromHandle.id?.startsWith(
|
||||
LEFT_HANDLE_ID_PREFIX
|
||||
)),
|
||||
[
|
||||
connection.inProgress,
|
||||
connection.fromNode?.id,
|
||||
connection.fromHandle?.id,
|
||||
table.id,
|
||||
]
|
||||
);
|
||||
|
||||
const numberOfEdgesToTable = useMemo(
|
||||
() =>
|
||||
dependencies.filter(
|
||||
@@ -58,12 +78,20 @@ export const TableNodeDependencyIndicator: React.FC<TableNodeDependencyIndicator
|
||||
return (
|
||||
<>
|
||||
{table.isView || table.isMaterializedView ? (
|
||||
<Handle
|
||||
id={`${TOP_SOURCE_HANDLE_ID_PREFIX}${table.id}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused ? '!invisible' : ''}`}
|
||||
position={Position.Top}
|
||||
type="source"
|
||||
/>
|
||||
<>
|
||||
<Handle
|
||||
id={`${TOP_SOURCE_HANDLE_ID_PREFIX}${table.id}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused || isTargetFromTable ? '!invisible' : ''}`}
|
||||
position={Position.Top}
|
||||
type="source"
|
||||
/>
|
||||
<Handle
|
||||
id={`${BOTTOM_SOURCE_HANDLE_ID_PREFIX}${table.id}`}
|
||||
className={`!z-10 !h-4 !w-4 !border-2 !bg-pink-600 ${!focused || isTargetFromTable ? '!invisible' : ''}`}
|
||||
position={Position.Bottom}
|
||||
type="source"
|
||||
/>
|
||||
</>
|
||||
) : null}
|
||||
{Array.from(
|
||||
{ length: numberOfEdgesToTable },
|
||||
@@ -82,7 +110,7 @@ export const TableNodeDependencyIndicator: React.FC<TableNodeDependencyIndicator
|
||||
id={`${TARGET_DEP_PREFIX}${numberOfEdgesToTable}_${table.id}`}
|
||||
className={
|
||||
isTarget
|
||||
? '!absolute !left-0 !top-0 !h-full !w-full !transform-none !rounded-none !border-none !opacity-0'
|
||||
? '!absolute !inset-0 !z-10 !h-full !w-full !transform-none !rounded-none !border-none !opacity-0'
|
||||
: `!invisible`
|
||||
}
|
||||
position={Position.Top}
|
||||
|
||||
@@ -33,6 +33,10 @@ import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
import {
|
||||
BOTTOM_SOURCE_HANDLE_ID_PREFIX,
|
||||
TOP_SOURCE_HANDLE_ID_PREFIX,
|
||||
} from './table-node-dependency-indicator';
|
||||
|
||||
export const LEFT_HANDLE_ID_PREFIX = 'left_rel_';
|
||||
export const RIGHT_HANDLE_ID_PREFIX = 'right_rel_';
|
||||
@@ -102,6 +106,24 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
tableNodeId,
|
||||
]
|
||||
);
|
||||
const isTargetFromView = useMemo(
|
||||
() =>
|
||||
connection.inProgress &&
|
||||
connection.fromNode.id !== tableNodeId &&
|
||||
(connection.fromHandle.id?.startsWith(
|
||||
TOP_SOURCE_HANDLE_ID_PREFIX
|
||||
) ||
|
||||
connection.fromHandle.id?.startsWith(
|
||||
BOTTOM_SOURCE_HANDLE_ID_PREFIX
|
||||
)),
|
||||
[
|
||||
connection.inProgress,
|
||||
connection.fromNode?.id,
|
||||
connection.fromHandle?.id,
|
||||
tableNodeId,
|
||||
]
|
||||
);
|
||||
|
||||
const numberOfEdgesToField = useMemo(() => {
|
||||
let count = 0;
|
||||
for (const rel of relationships) {
|
||||
@@ -289,13 +311,13 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
<>
|
||||
<Handle
|
||||
id={`${RIGHT_HANDLE_ID_PREFIX}${field.id}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused || readonly ? '!invisible' : ''}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused || readonly || isTargetFromView ? '!invisible' : ''}`}
|
||||
position={Position.Right}
|
||||
type="source"
|
||||
/>
|
||||
<Handle
|
||||
id={`${LEFT_HANDLE_ID_PREFIX}${field.id}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused || readonly ? '!invisible' : ''}`}
|
||||
className={`!h-4 !w-4 !border-2 !bg-pink-600 ${!focused || readonly || isTargetFromView ? '!invisible' : ''}`}
|
||||
position={Position.Left}
|
||||
type="source"
|
||||
/>
|
||||
|
||||
@@ -6,7 +6,7 @@ import React, {
|
||||
useEffect,
|
||||
} from 'react';
|
||||
import type { NodeProps, Node } from '@xyflow/react';
|
||||
import { NodeResizer, useStore } from '@xyflow/react';
|
||||
import { NodeResizer, useConnection, useStore } from '@xyflow/react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import {
|
||||
ChevronsLeftRight,
|
||||
@@ -80,6 +80,14 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
const [isHovering, setIsHovering] = useState(false);
|
||||
|
||||
const connection = useConnection();
|
||||
|
||||
const isTarget = useMemo(() => {
|
||||
if (!isHovering) return false;
|
||||
|
||||
return connection.inProgress && connection.fromNode.id !== table.id;
|
||||
}, [connection, table.id, isHovering]);
|
||||
|
||||
const {
|
||||
getTableNewName,
|
||||
getTableNewColor,
|
||||
@@ -298,7 +306,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
() =>
|
||||
cn(
|
||||
'flex w-full flex-col border-2 bg-slate-50 dark:bg-slate-950 rounded-lg shadow-sm transition-transform duration-300',
|
||||
selected
|
||||
selected || isTarget
|
||||
? 'border-pink-600'
|
||||
: 'border-slate-500 dark:border-slate-700',
|
||||
isOverlapping
|
||||
@@ -335,6 +343,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
isDiffTableChanged,
|
||||
isDiffNewTable,
|
||||
isDiffTableRemoved,
|
||||
isTarget,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -128,8 +128,8 @@ export const CustomTypeList: React.FC<CustomTypeProps> = ({ customTypes }) => {
|
||||
|
||||
// if both have order, sort by order
|
||||
if (
|
||||
customType1.order !== undefined &&
|
||||
customType2.order !== undefined
|
||||
customType1.order != null &&
|
||||
customType2.order != null
|
||||
) {
|
||||
return (
|
||||
customType1.order - customType2.order
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { Ellipsis, Trash2 } from 'lucide-react';
|
||||
import { Ellipsis, Trash2, KeyRound } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import {
|
||||
databaseIndexTypes,
|
||||
@@ -106,29 +106,45 @@ export const TableIndex: React.FC<TableIndexProps> = ({
|
||||
'side_panel.tables_section.table.no_types_found'
|
||||
)}
|
||||
keepOrder
|
||||
disabled={index.isPrimaryKey ?? false}
|
||||
/>
|
||||
<div className="flex shrink-0 gap-1">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle
|
||||
pressed={index.unique}
|
||||
onPressedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
>
|
||||
U
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
{index.isPrimaryKey ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle pressed={true} disabled>
|
||||
<KeyRound className="h-3.5" />
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('side_panel.tables_section.table.primary_key')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle
|
||||
pressed={index.unique}
|
||||
onPressedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
>
|
||||
U
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
@@ -164,52 +180,56 @@ export const TableIndex: React.FC<TableIndexProps> = ({
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={index.unique}
|
||||
onCheckedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{indexTypeOptions.length > 0 ? (
|
||||
<div className="mt-2 flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="indexType"
|
||||
className="text-subtitle"
|
||||
{!index.isPrimaryKey ? (
|
||||
<>
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={index.unique}
|
||||
onCheckedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{indexTypeOptions.length > 0 ? (
|
||||
<div className="mt-2 flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="indexType"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.index_type'
|
||||
)}
|
||||
</Label>
|
||||
<SelectBox
|
||||
options={indexTypeOptions}
|
||||
value={index.type || 'btree'}
|
||||
onChange={updateIndexType}
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
<Separator orientation="horizontal" />
|
||||
<Button
|
||||
variant="outline"
|
||||
className="flex gap-2 !text-red-700"
|
||||
onClick={removeIndex}
|
||||
>
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.index_type'
|
||||
'side_panel.tables_section.table.index_actions.delete_index'
|
||||
)}
|
||||
</Label>
|
||||
<SelectBox
|
||||
options={indexTypeOptions}
|
||||
value={index.type || 'btree'}
|
||||
onChange={updateIndexType}
|
||||
/>
|
||||
</div>
|
||||
</Button>
|
||||
</>
|
||||
) : null}
|
||||
<Separator orientation="horizontal" />
|
||||
<Button
|
||||
variant="outline"
|
||||
className="flex gap-2 !text-red-700"
|
||||
onClick={removeIndex}
|
||||
>
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.delete_index'
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
@@ -224,19 +224,27 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
</div>
|
||||
</AccordionTrigger>
|
||||
<AccordionContent className="pb-0 pt-1">
|
||||
{table.indexes.map((index) => (
|
||||
<TableIndex
|
||||
key={index.id}
|
||||
index={index}
|
||||
removeIndex={() =>
|
||||
removeIndex(table.id, index.id)
|
||||
}
|
||||
updateIndex={(attrs: Partial<DBIndex>) =>
|
||||
updateIndex(table.id, index.id, attrs)
|
||||
}
|
||||
fields={table.fields}
|
||||
/>
|
||||
))}
|
||||
{[...table.indexes]
|
||||
.sort((a, b) => {
|
||||
// Sort PK indexes first
|
||||
if (a.isPrimaryKey && !b.isPrimaryKey)
|
||||
return -1;
|
||||
if (!a.isPrimaryKey && b.isPrimaryKey) return 1;
|
||||
return 0;
|
||||
})
|
||||
.map((index) => (
|
||||
<TableIndex
|
||||
key={index.id}
|
||||
index={index}
|
||||
removeIndex={() =>
|
||||
removeIndex(table.id, index.id)
|
||||
}
|
||||
updateIndex={(attrs: Partial<DBIndex>) =>
|
||||
updateIndex(table.id, index.id, attrs)
|
||||
}
|
||||
fields={table.fields}
|
||||
/>
|
||||
))}
|
||||
<div className="flex justify-start p-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
@@ -151,13 +151,13 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
return (
|
||||
<Menubar className="h-8 border-none py-2 shadow-none md:h-10 md:py-0">
|
||||
<MenubarMenu>
|
||||
<MenubarTrigger>{t('menu.databases.databases')}</MenubarTrigger>
|
||||
<MenubarTrigger>{t('menu.actions.actions')}</MenubarTrigger>
|
||||
<MenubarContent>
|
||||
<MenubarItem onClick={createNewDiagram}>
|
||||
{t('menu.databases.new')}
|
||||
{t('menu.actions.new')}
|
||||
</MenubarItem>
|
||||
<MenubarItem onClick={openDiagram}>
|
||||
{t('menu.databases.browse')}
|
||||
{t('menu.actions.browse')}
|
||||
<MenubarShortcut>
|
||||
{
|
||||
keyboardShortcutsForOS[
|
||||
@@ -167,7 +167,7 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
</MenubarShortcut>
|
||||
</MenubarItem>
|
||||
<MenubarItem onClick={updateDiagramUpdatedAt}>
|
||||
{t('menu.databases.save')}
|
||||
{t('menu.actions.save')}
|
||||
<MenubarShortcut>
|
||||
{
|
||||
keyboardShortcutsForOS[
|
||||
@@ -179,7 +179,7 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
<MenubarSeparator />
|
||||
<MenubarSub>
|
||||
<MenubarSubTrigger>
|
||||
{t('menu.databases.import')}
|
||||
{t('menu.actions.import')}
|
||||
</MenubarSubTrigger>
|
||||
<MenubarSubContent>
|
||||
<MenubarItem onClick={openImportDiagramDialog}>
|
||||
@@ -248,7 +248,7 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
<MenubarSeparator />
|
||||
<MenubarSub>
|
||||
<MenubarSubTrigger>
|
||||
{t('menu.databases.export_sql')}
|
||||
{t('menu.actions.export_sql')}
|
||||
</MenubarSubTrigger>
|
||||
<MenubarSubContent>
|
||||
{databaseType === DatabaseType.GENERIC ? (
|
||||
@@ -331,7 +331,7 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
</MenubarSub>
|
||||
<MenubarSub>
|
||||
<MenubarSubTrigger>
|
||||
{t('menu.databases.export_as')}
|
||||
{t('menu.actions.export_as')}
|
||||
</MenubarSubTrigger>
|
||||
<MenubarSubContent>
|
||||
<MenubarItem onClick={exportPNG}>PNG</MenubarItem>
|
||||
@@ -357,7 +357,7 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
})
|
||||
}
|
||||
>
|
||||
{t('menu.databases.delete_diagram')}
|
||||
{t('menu.actions.delete_diagram')}
|
||||
</MenubarItem>
|
||||
</MenubarContent>
|
||||
</MenubarMenu>
|
||||
|
||||
Reference in New Issue
Block a user