mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-27 18:14:01 +00:00
Compare commits
1 Commits
jf/add_sup
...
jf/add_dup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76f9662b80 |
2
.github/workflows/cla.yaml
vendored
2
.github/workflows/cla.yaml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
contents: write # this can be 'read' if the signatures are in remote repository
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
|
||||
|
||||
@@ -6,10 +6,7 @@ import type { ChartDBContext, ChartDBEvent } from './chartdb-context';
|
||||
import { chartDBContext } from './chartdb-context';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import {
|
||||
getTableIndexesWithPrimaryKey,
|
||||
type DBIndex,
|
||||
} from '@/lib/domain/db-index';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { useRedoUndoStack } from '@/hooks/use-redo-undo-stack';
|
||||
@@ -351,11 +348,6 @@ export const ChartDBProvider: React.FC<
|
||||
order: tables.length,
|
||||
...attributes,
|
||||
};
|
||||
|
||||
table.indexes = getTableIndexesWithPrimaryKey({
|
||||
table,
|
||||
});
|
||||
|
||||
await addTable(table);
|
||||
|
||||
return table;
|
||||
@@ -647,30 +639,17 @@ export const ChartDBProvider: React.FC<
|
||||
options = { updateHistory: true }
|
||||
) => {
|
||||
const prevField = getField(tableId, fieldId);
|
||||
|
||||
const updateTableFn = (table: DBTable) => {
|
||||
const updatedTable: DBTable = {
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
} satisfies DBTable;
|
||||
|
||||
updatedTable.indexes = getTableIndexesWithPrimaryKey({
|
||||
table: updatedTable,
|
||||
});
|
||||
|
||||
return updatedTable;
|
||||
};
|
||||
|
||||
setTables((tables) =>
|
||||
tables.map((table) => {
|
||||
if (table.id === tableId) {
|
||||
return updateTableFn(table);
|
||||
}
|
||||
|
||||
return table;
|
||||
})
|
||||
tables.map((table) =>
|
||||
table.id === tableId
|
||||
? {
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
}
|
||||
: table
|
||||
)
|
||||
);
|
||||
|
||||
const table = await db.getTable({ diagramId, id: tableId });
|
||||
@@ -685,7 +664,10 @@ export const ChartDBProvider: React.FC<
|
||||
db.updateTable({
|
||||
id: tableId,
|
||||
attributes: {
|
||||
...updateTableFn(table),
|
||||
...table,
|
||||
fields: table.fields.map((f) =>
|
||||
f.id === fieldId ? { ...f, ...field } : f
|
||||
),
|
||||
},
|
||||
}),
|
||||
]);
|
||||
@@ -712,29 +694,19 @@ export const ChartDBProvider: React.FC<
|
||||
fieldId: string,
|
||||
options = { updateHistory: true }
|
||||
) => {
|
||||
const updateTableFn = (table: DBTable) => {
|
||||
const updatedTable: DBTable = {
|
||||
...table,
|
||||
fields: table.fields.filter((f) => f.id !== fieldId),
|
||||
} satisfies DBTable;
|
||||
|
||||
updatedTable.indexes = getTableIndexesWithPrimaryKey({
|
||||
table: updatedTable,
|
||||
});
|
||||
|
||||
return updatedTable;
|
||||
};
|
||||
|
||||
const fields = getTable(tableId)?.fields ?? [];
|
||||
const prevField = getField(tableId, fieldId);
|
||||
setTables((tables) =>
|
||||
tables.map((table) => {
|
||||
if (table.id === tableId) {
|
||||
return updateTableFn(table);
|
||||
}
|
||||
|
||||
return table;
|
||||
})
|
||||
tables.map((table) =>
|
||||
table.id === tableId
|
||||
? {
|
||||
...table,
|
||||
fields: table.fields.filter(
|
||||
(f) => f.id !== fieldId
|
||||
),
|
||||
}
|
||||
: table
|
||||
)
|
||||
);
|
||||
|
||||
events.emit({
|
||||
@@ -758,7 +730,8 @@ export const ChartDBProvider: React.FC<
|
||||
db.updateTable({
|
||||
id: tableId,
|
||||
attributes: {
|
||||
...updateTableFn(table),
|
||||
...table,
|
||||
fields: table.fields.filter((f) => f.id !== fieldId),
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
@@ -764,6 +764,7 @@ export const StorageProvider: React.FC<React.PropsWithChildren> = ({
|
||||
db.db_dependencies.where('diagramId').equals(id).delete(),
|
||||
db.areas.where('diagramId').equals(id).delete(),
|
||||
db.db_custom_types.where('diagramId').equals(id).delete(),
|
||||
db.diagram_filters.where('diagramId').equals(id).delete(),
|
||||
]);
|
||||
},
|
||||
[db]
|
||||
|
||||
@@ -0,0 +1,216 @@
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/dropdown-menu/dropdown-menu';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import {
|
||||
Copy,
|
||||
MoreHorizontal,
|
||||
SquareArrowOutUpRight,
|
||||
Trash2,
|
||||
Loader2,
|
||||
} from 'lucide-react';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { useAlert } from '@/context/alert-context/alert-context';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { cloneDiagram } from '@/lib/clone';
|
||||
import { useParams, useNavigate } from 'react-router-dom';
|
||||
import { useConfig } from '@/hooks/use-config';
|
||||
|
||||
interface DiagramRowActionsMenuProps {
|
||||
diagram: Diagram;
|
||||
onOpen: () => void;
|
||||
refetch: () => void;
|
||||
onSelectDiagram?: (diagramId: string | undefined) => void;
|
||||
}
|
||||
|
||||
export const DiagramRowActionsMenu: React.FC<DiagramRowActionsMenuProps> = ({
|
||||
diagram,
|
||||
onOpen,
|
||||
refetch,
|
||||
onSelectDiagram,
|
||||
}) => {
|
||||
const { addDiagram, deleteDiagram, listDiagrams, getDiagram } =
|
||||
useStorage();
|
||||
const { showAlert } = useAlert();
|
||||
const { t } = useTranslation();
|
||||
const { diagramId: currentDiagramId } = useParams<{ diagramId: string }>();
|
||||
const navigate = useNavigate();
|
||||
const { updateConfig } = useConfig();
|
||||
const [isDuplicating, setIsDuplicating] = useState(false);
|
||||
|
||||
const handleDuplicateDiagram = useCallback(async () => {
|
||||
setIsDuplicating(true);
|
||||
|
||||
try {
|
||||
// Load the full diagram with all components
|
||||
const fullDiagram = await getDiagram(diagram.id, {
|
||||
includeTables: true,
|
||||
includeRelationships: true,
|
||||
includeAreas: true,
|
||||
includeDependencies: true,
|
||||
includeCustomTypes: true,
|
||||
});
|
||||
|
||||
if (!fullDiagram) {
|
||||
console.error('Failed to load diagram for duplication');
|
||||
setIsDuplicating(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const { diagram: clonedDiagram } = cloneDiagram(fullDiagram);
|
||||
|
||||
// Generate a unique name for the duplicated diagram
|
||||
const diagrams = await listDiagrams();
|
||||
const existingNames = diagrams.map((d) => d.name);
|
||||
let duplicatedName = `${diagram.name} - Copy`;
|
||||
let counter = 1;
|
||||
|
||||
while (existingNames.includes(duplicatedName)) {
|
||||
duplicatedName = `${diagram.name} - Copy ${counter}`;
|
||||
counter++;
|
||||
}
|
||||
|
||||
const diagramToAdd = {
|
||||
...clonedDiagram,
|
||||
name: duplicatedName,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Add 2 second delay for better UX
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
await addDiagram({ diagram: diagramToAdd });
|
||||
|
||||
// Clear current selection first, then select the new diagram
|
||||
if (onSelectDiagram) {
|
||||
onSelectDiagram(undefined); // Clear selection
|
||||
await refetch(); // Refresh the list
|
||||
// Use setTimeout to ensure the DOM has updated with the new row
|
||||
setTimeout(() => {
|
||||
onSelectDiagram(diagramToAdd.id);
|
||||
}, 100);
|
||||
} else {
|
||||
await refetch(); // Refresh the list
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error duplicating diagram:', error);
|
||||
} finally {
|
||||
setIsDuplicating(false);
|
||||
}
|
||||
}, [
|
||||
diagram,
|
||||
addDiagram,
|
||||
listDiagrams,
|
||||
getDiagram,
|
||||
refetch,
|
||||
onSelectDiagram,
|
||||
]);
|
||||
|
||||
const handleDeleteDiagram = useCallback(() => {
|
||||
showAlert({
|
||||
title: t('delete_diagram_alert.title'),
|
||||
description: t('delete_diagram_alert.description'),
|
||||
actionLabel: t('delete_diagram_alert.delete'),
|
||||
closeLabel: t('delete_diagram_alert.cancel'),
|
||||
onAction: async () => {
|
||||
await deleteDiagram(diagram.id);
|
||||
|
||||
// If we deleted the currently open diagram, navigate to another one
|
||||
if (currentDiagramId === diagram.id) {
|
||||
// Get updated list of diagrams after deletion
|
||||
const remainingDiagrams = await listDiagrams();
|
||||
|
||||
if (remainingDiagrams.length > 0) {
|
||||
// Sort by last modified date (most recent first)
|
||||
const sortedDiagrams = remainingDiagrams.sort(
|
||||
(a, b) =>
|
||||
b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
);
|
||||
|
||||
// Navigate to the most recently modified diagram
|
||||
const firstDiagram = sortedDiagrams[0];
|
||||
updateConfig({
|
||||
config: { defaultDiagramId: firstDiagram.id },
|
||||
});
|
||||
navigate(`/diagrams/${firstDiagram.id}`);
|
||||
} else {
|
||||
// No diagrams left, navigate to home
|
||||
navigate('/');
|
||||
}
|
||||
}
|
||||
|
||||
refetch(); // Refresh the list
|
||||
},
|
||||
});
|
||||
}, [
|
||||
diagram.id,
|
||||
currentDiagramId,
|
||||
deleteDiagram,
|
||||
refetch,
|
||||
showAlert,
|
||||
t,
|
||||
listDiagrams,
|
||||
updateConfig,
|
||||
navigate,
|
||||
]);
|
||||
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="size-8 p-0"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
disabled={isDuplicating}
|
||||
>
|
||||
{isDuplicating ? (
|
||||
<Loader2 className="size-3.5 animate-spin" />
|
||||
) : (
|
||||
<MoreHorizontal className="size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onOpen();
|
||||
}}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
Open
|
||||
<SquareArrowOutUpRight className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleDuplicateDiagram();
|
||||
}}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
{t('menu.databases.duplicate')}
|
||||
<Copy className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleDeleteDiagram();
|
||||
}}
|
||||
className="flex items-center justify-between text-red-600 focus:text-red-600"
|
||||
>
|
||||
{t('menu.databases.delete_diagram')}
|
||||
<Trash2 className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
};
|
||||
@@ -27,6 +27,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import { DiagramRowActionsMenu } from './diagram-row-actions-menu/diagram-row-actions-menu';
|
||||
|
||||
export interface OpenDiagramDialogProps extends BaseDialogProps {
|
||||
canClose?: boolean;
|
||||
@@ -50,17 +51,18 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
setSelectedDiagramId(undefined);
|
||||
}, [dialog.open]);
|
||||
|
||||
const fetchDiagrams = useCallback(async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
}, [listDiagrams]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDiagrams = async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
};
|
||||
fetchDiagrams();
|
||||
}, [listDiagrams, setDiagrams, dialog.open]);
|
||||
}, [fetchDiagrams, dialog.open]);
|
||||
|
||||
const openDiagram = useCallback(
|
||||
(diagramId: string) => {
|
||||
@@ -221,6 +223,19 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
<TableCell className="text-center">
|
||||
{diagram.tables?.length}
|
||||
</TableCell>
|
||||
<TableCell className="items-center p-0 pr-1 text-right">
|
||||
<DiagramRowActionsMenu
|
||||
diagram={diagram}
|
||||
onOpen={() => {
|
||||
openDiagram(diagram.id);
|
||||
closeOpenDiagramDialog();
|
||||
}}
|
||||
refetch={fetchDiagrams}
|
||||
onSelectDiagram={
|
||||
setSelectedDiagramId
|
||||
}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ar: LanguageTranslation = {
|
||||
new: 'مخطط جديد',
|
||||
browse: 'تصفح...',
|
||||
save: 'حفظ',
|
||||
duplicate: 'تكرار',
|
||||
import: 'استيراد قاعدة بيانات',
|
||||
export_sql: 'SQL تصدير',
|
||||
export_as: 'تصدير كـ',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const bn: LanguageTranslation = {
|
||||
new: 'নতুন ডায়াগ্রাম',
|
||||
browse: 'ব্রাউজ করুন...',
|
||||
save: 'সংরক্ষণ করুন',
|
||||
duplicate: 'ডুপ্লিকেট করুন',
|
||||
import: 'ডাটাবেস আমদানি করুন',
|
||||
export_sql: 'SQL রপ্তানি করুন',
|
||||
export_as: 'রূপে রপ্তানি করুন',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const de: LanguageTranslation = {
|
||||
new: 'Neues Diagramm',
|
||||
browse: 'Durchsuchen...',
|
||||
save: 'Speichern',
|
||||
duplicate: 'Diagramm duplizieren',
|
||||
import: 'Datenbank importieren',
|
||||
export_sql: 'SQL exportieren',
|
||||
export_as: 'Exportieren als',
|
||||
@@ -304,7 +305,7 @@ export const de: LanguageTranslation = {
|
||||
step_1: 'Gehen Sie zu Tools > Optionen > Abfrageergebnisse > SQL Server.',
|
||||
step_2: 'Wenn Sie "Ergebnisse in Raster" verwenden, ändern Sie die maximale Zeichenanzahl für Nicht-XML-Daten (auf 9999999 setzen).',
|
||||
},
|
||||
instructions_link: 'Brauchen Sie Hilfe? So geht’s',
|
||||
instructions_link: "Brauchen Sie Hilfe? So geht's",
|
||||
check_script_result: 'Skriptergebnis überprüfen',
|
||||
},
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ export const en = {
|
||||
new: 'New Diagram',
|
||||
browse: 'Browse...',
|
||||
save: 'Save',
|
||||
duplicate: 'Duplicate Diagram',
|
||||
import: 'Import',
|
||||
export_sql: 'Export SQL',
|
||||
export_as: 'Export as',
|
||||
@@ -143,7 +144,6 @@ export const en = {
|
||||
title: 'Field Attributes',
|
||||
unique: 'Unique',
|
||||
auto_increment: 'Auto Increment',
|
||||
array: 'Declare Array',
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const es: LanguageTranslation = {
|
||||
new: 'Nuevo Diagrama',
|
||||
browse: 'Examinar...',
|
||||
save: 'Guardar',
|
||||
duplicate: 'Duplicar',
|
||||
import: 'Importar Base de Datos',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const fr: LanguageTranslation = {
|
||||
new: 'Nouveau Diagramme',
|
||||
browse: 'Parcourir...',
|
||||
save: 'Enregistrer',
|
||||
duplicate: 'Dupliquer',
|
||||
import: 'Importer Base de Données',
|
||||
export_sql: 'Exporter SQL',
|
||||
export_as: 'Exporter en tant que',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const gu: LanguageTranslation = {
|
||||
new: 'નવું ડાયાગ્રામ',
|
||||
browse: 'બ્રાઉજ કરો...',
|
||||
save: 'સાચવો',
|
||||
duplicate: 'ડુપ્લિકેટ',
|
||||
import: 'ડેટાબેસ આયાત કરો',
|
||||
export_sql: 'SQL નિકાસ કરો',
|
||||
export_as: 'રૂપે નિકાસ કરો',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const hi: LanguageTranslation = {
|
||||
new: 'नया आरेख',
|
||||
browse: 'ब्राउज़ करें...',
|
||||
save: 'सहेजें',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
import: 'डेटाबेस आयात करें',
|
||||
export_sql: 'SQL निर्यात करें',
|
||||
export_as: 'के रूप में निर्यात करें',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const hr: LanguageTranslation = {
|
||||
new: 'Novi Dijagram',
|
||||
browse: 'Pregledaj...',
|
||||
save: 'Spremi',
|
||||
duplicate: 'Dupliciraj dijagram',
|
||||
import: 'Uvezi',
|
||||
export_sql: 'Izvezi SQL',
|
||||
export_as: 'Izvezi kao',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const id_ID: LanguageTranslation = {
|
||||
new: 'Diagram Baru',
|
||||
browse: 'Jelajahi...',
|
||||
save: 'Simpan',
|
||||
duplicate: 'Duplikat',
|
||||
import: 'Impor Database',
|
||||
export_sql: 'Ekspor SQL',
|
||||
export_as: 'Ekspor Sebagai',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ja: LanguageTranslation = {
|
||||
new: '新しいダイアグラム',
|
||||
browse: '参照...',
|
||||
save: '保存',
|
||||
duplicate: '複製',
|
||||
import: 'データベースをインポート',
|
||||
export_sql: 'SQLをエクスポート',
|
||||
export_as: '形式を指定してエクスポート',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
new: '새 다이어그램',
|
||||
browse: '찾아보기...',
|
||||
save: '저장',
|
||||
duplicate: '복사',
|
||||
import: '데이터베이스 가져오기',
|
||||
export_sql: 'SQL로 저장',
|
||||
export_as: '다른 형식으로 저장',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const mr: LanguageTranslation = {
|
||||
new: 'नवीन आरेख',
|
||||
browse: 'ब्राउज करा...',
|
||||
save: 'जतन करा',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
import: 'डेटाबेस इम्पोर्ट करा',
|
||||
export_sql: 'SQL एक्स्पोर्ट करा',
|
||||
export_as: 'म्हणून एक्स्पोर्ट करा',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ne: LanguageTranslation = {
|
||||
new: 'नयाँ डायाग्राम',
|
||||
browse: 'ब्राउज गर्नुहोस्...',
|
||||
save: 'सुरक्षित गर्नुहोस्',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
import: 'डाटाबेस आयात गर्नुहोस्',
|
||||
export_sql: 'SQL निर्यात गर्नुहोस्',
|
||||
export_as: 'निर्यात गर्नुहोस्',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
new: 'Novo Diagrama',
|
||||
browse: 'Navegar...',
|
||||
save: 'Salvar',
|
||||
duplicate: 'Duplicar',
|
||||
import: 'Importar Banco de Dados',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ru: LanguageTranslation = {
|
||||
new: 'Новая диаграмма',
|
||||
browse: 'Обзор...',
|
||||
save: 'Сохранить',
|
||||
duplicate: 'Дублировать',
|
||||
import: 'Импортировать базу данных',
|
||||
export_sql: 'Экспорт SQL',
|
||||
export_as: 'Экспортировать как',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const te: LanguageTranslation = {
|
||||
new: 'కొత్త డైగ్రాం',
|
||||
browse: 'బ్రాఉజ్ చేయండి...',
|
||||
save: 'సేవ్',
|
||||
duplicate: 'డుప్లికేట్',
|
||||
import: 'డేటాబేస్ను దిగుమతి చేసుకోండి',
|
||||
export_sql: 'SQL ఎగుమతి',
|
||||
export_as: 'వగా ఎగుమతి చేయండి',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const tr: LanguageTranslation = {
|
||||
new: 'Yeni Diyagram',
|
||||
browse: 'Gözat...',
|
||||
save: 'Kaydet',
|
||||
duplicate: 'Kopyala',
|
||||
import: 'Veritabanı İçe Aktar',
|
||||
export_sql: 'SQL Olarak Dışa Aktar',
|
||||
export_as: 'Olarak Dışa Aktar',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const uk: LanguageTranslation = {
|
||||
new: 'Нова діаграма',
|
||||
browse: 'Огляд...',
|
||||
save: 'Зберегти',
|
||||
duplicate: 'Дублювати',
|
||||
import: 'Імпорт бази даних',
|
||||
export_sql: 'Експорт SQL',
|
||||
export_as: 'Експортувати як',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const vi: LanguageTranslation = {
|
||||
new: 'Sơ đồ mới',
|
||||
browse: 'Duyệt...',
|
||||
save: 'Lưu',
|
||||
duplicate: 'Nhân đôi',
|
||||
import: 'Nhập cơ sở dữ liệu',
|
||||
export_sql: 'Xuất SQL',
|
||||
export_as: 'Xuất thành',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
new: '新建关系图',
|
||||
browse: '浏览...',
|
||||
save: '保存',
|
||||
duplicate: '复制',
|
||||
import: '导入数据库',
|
||||
export_sql: '导出 SQL 语句',
|
||||
export_as: '导出为',
|
||||
|
||||
@@ -17,6 +17,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
new: '新增圖表',
|
||||
browse: '瀏覽...',
|
||||
save: '儲存',
|
||||
duplicate: '複製',
|
||||
import: '匯入資料庫',
|
||||
export_sql: '匯出 SQL',
|
||||
export_as: '匯出為特定格式',
|
||||
|
||||
@@ -165,21 +165,3 @@ export const supportsAutoIncrementDataType = (
|
||||
'decimal',
|
||||
].includes(dataTypeName.toLocaleLowerCase());
|
||||
};
|
||||
|
||||
export const supportsArrayDataType = (dataTypeName: string): boolean => {
|
||||
// Types that do NOT support arrays in PostgreSQL
|
||||
const unsupportedTypes = [
|
||||
'serial',
|
||||
'bigserial',
|
||||
'smallserial',
|
||||
'serial2',
|
||||
'serial4',
|
||||
'serial8',
|
||||
'xml',
|
||||
'money',
|
||||
];
|
||||
|
||||
// Check if the type is in the unsupported list
|
||||
const normalizedType = dataTypeName.toLowerCase();
|
||||
return !unsupportedTypes.includes(normalizedType);
|
||||
};
|
||||
|
||||
@@ -124,96 +124,6 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should not create duplicate index for composite primary key', () => {
|
||||
const tableId = testId();
|
||||
const field1Id = testId();
|
||||
const field2Id = testId();
|
||||
const field3Id = testId();
|
||||
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Landlord System',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: tableId,
|
||||
name: 'users_master_table',
|
||||
schema: 'landlord',
|
||||
fields: [
|
||||
createField({
|
||||
id: field1Id,
|
||||
name: 'master_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: field2Id,
|
||||
name: 'tenant_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: field3Id,
|
||||
name: 'tenant_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'enabled',
|
||||
type: { id: 'boolean', name: 'boolean' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
id: testId(),
|
||||
name: 'idx_users_master_table_master_user_id_tenant_id_tenant_user_id',
|
||||
unique: false,
|
||||
fieldIds: [field1Id, field2Id, field3Id],
|
||||
createdAt: testTime,
|
||||
},
|
||||
{
|
||||
id: testId(),
|
||||
name: 'index_1',
|
||||
unique: true,
|
||||
fieldIds: [field2Id, field3Id],
|
||||
createdAt: testTime,
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should contain composite primary key constraint
|
||||
expect(sql).toContain(
|
||||
'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)'
|
||||
);
|
||||
|
||||
// Should NOT contain the duplicate index for the primary key fields
|
||||
expect(sql).not.toContain(
|
||||
'CREATE INDEX idx_users_master_table_master_user_id_tenant_id_tenant_user_id'
|
||||
);
|
||||
|
||||
// Should still contain the unique index on subset of fields
|
||||
expect(sql).toContain('CREATE UNIQUE INDEX index_1');
|
||||
});
|
||||
|
||||
it('should handle single primary keys inline', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
|
||||
@@ -178,15 +178,7 @@ export function exportMSSQL({
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT [${pkIndex.name}] `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${table.fields
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
|
||||
@@ -313,15 +313,7 @@ export function exportMySQL({
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT \`${pkIndex.name}\` `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${primaryKeyFields
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
|
||||
@@ -286,14 +286,10 @@ export function exportPostgreSQL({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle array types (check if the field has array property or type name ends with '[]')
|
||||
if (field.array || typeName.endsWith('[]')) {
|
||||
if (!typeName.endsWith('[]')) {
|
||||
typeWithSize = typeWithSize + '[]';
|
||||
} else {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
@@ -329,15 +325,7 @@ export function exportPostgreSQL({
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n ${(() => {
|
||||
// Find PK index to get the constraint name
|
||||
const pkIndex = table.indexes.find(
|
||||
(idx) => idx.isPrimaryKey
|
||||
);
|
||||
return pkIndex?.name
|
||||
? `CONSTRAINT "${pkIndex.name}" `
|
||||
: '';
|
||||
})()}PRIMARY KEY (${primaryKeyFields
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
|
||||
@@ -313,33 +313,21 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle PRIMARY KEY constraint - only add inline if no PK index with custom name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (field.primaryKey && !hasCompositePrimaryKey && !pkIndex?.name) {
|
||||
// Handle PRIMARY KEY constraint - only add inline if not composite
|
||||
if (field.primaryKey && !hasCompositePrimaryKey) {
|
||||
sqlScript += ' PRIMARY KEY';
|
||||
}
|
||||
|
||||
// Add a comma after each field except the last one (or before PK constraint)
|
||||
const needsPKConstraint =
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name);
|
||||
if (index < table.fields.length - 1 || needsPKConstraint) {
|
||||
// Add a comma after each field except the last one (or before composite primary key)
|
||||
if (index < table.fields.length - 1 || hasCompositePrimaryKey) {
|
||||
sqlScript += ',\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Add primary key constraint if needed (for composite PKs or single PK with custom name)
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name)
|
||||
) {
|
||||
// Add composite primary key constraint if needed
|
||||
if (hasCompositePrimaryKey) {
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).join(', ');
|
||||
if (pkIndex?.name) {
|
||||
sqlScript += `\n CONSTRAINT ${pkIndex.name} PRIMARY KEY (${pkFieldNames})`;
|
||||
} else {
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
|
||||
sqlScript += '\n);\n';
|
||||
@@ -361,33 +349,12 @@ export const exportBaseSQL = ({
|
||||
|
||||
// Generate SQL for indexes
|
||||
table.indexes.forEach((index) => {
|
||||
// Skip the primary key index (it's already handled as a constraint)
|
||||
if (index.isPrimaryKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => table.fields.find((f) => f.id === fieldId))
|
||||
.filter(
|
||||
(field): field is NonNullable<typeof field> =>
|
||||
field !== undefined
|
||||
);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes for composite primary keys
|
||||
if (
|
||||
primaryKeyFields.length > 0 &&
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some((field) => field.id === pk.id)
|
||||
const fieldNames = index.fieldIds
|
||||
.map(
|
||||
(fieldId) =>
|
||||
table.fields.find((field) => field.id === fieldId)?.name
|
||||
)
|
||||
) {
|
||||
return; // Skip this index as it's redundant with the primary key
|
||||
}
|
||||
|
||||
const fieldNames = indexFields
|
||||
.map((field) => field.name)
|
||||
.filter(Boolean)
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
|
||||
@@ -29,7 +29,6 @@ export interface SQLColumn {
|
||||
comment?: string;
|
||||
default?: string;
|
||||
increment?: boolean;
|
||||
array?: boolean;
|
||||
}
|
||||
|
||||
export interface SQLTable {
|
||||
@@ -613,7 +612,6 @@ export function convertToChartDBDiagram(
|
||||
default: column.default || '',
|
||||
createdAt: Date.now(),
|
||||
increment: column.increment,
|
||||
array: column.array,
|
||||
};
|
||||
|
||||
// Add type arguments if present
|
||||
|
||||
@@ -373,13 +373,6 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
// Check if it's an array type
|
||||
let isArrayType = false;
|
||||
if (columnType.endsWith('[]')) {
|
||||
isArrayType = true;
|
||||
columnType = columnType.slice(0, -2);
|
||||
}
|
||||
|
||||
// Normalize the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
|
||||
@@ -402,7 +395,6 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
trimmedLine.includes('uuid_generate_v4()') ||
|
||||
trimmedLine.includes('GENERATED ALWAYS AS IDENTITY') ||
|
||||
trimmedLine.includes('GENERATED BY DEFAULT AS IDENTITY'),
|
||||
array: isArrayType,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -790,16 +782,6 @@ export async function fromPostgres(
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
}
|
||||
|
||||
// Check if it's an array type
|
||||
let isArrayType = false;
|
||||
if (normalizedBaseType.endsWith('[]')) {
|
||||
isArrayType = true;
|
||||
normalizedBaseType = normalizedBaseType.slice(
|
||||
0,
|
||||
-2
|
||||
);
|
||||
}
|
||||
|
||||
// Now handle parameters - but skip for integer types that shouldn't have them
|
||||
let finalDataType = normalizedBaseType;
|
||||
|
||||
@@ -892,7 +874,6 @@ export async function fromPostgres(
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
array: isArrayType,
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { generateDBMLFromDiagram } from '../dbml-export';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
describe('Composite Primary Key Name Export', () => {
|
||||
it('should export composite primary key with name in DBML', () => {
|
||||
const diagram: Diagram = {
|
||||
id: generateId(),
|
||||
name: 'Test',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
tables: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'users_master_table',
|
||||
schema: 'landlord',
|
||||
x: 0,
|
||||
y: 0,
|
||||
color: '#FFF',
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
fields: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'master_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'tenant_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'tenant_user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: false,
|
||||
primaryKey: true,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'enabled',
|
||||
type: { id: 'boolean', name: 'boolean' },
|
||||
nullable: true,
|
||||
primaryKey: false,
|
||||
unique: false,
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
id: generateId(),
|
||||
name: 'users_master_table_index_1',
|
||||
unique: true,
|
||||
fieldIds: ['dummy1', 'dummy2'], // Will be replaced
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
};
|
||||
|
||||
// Fix field IDs in the index and add PK index
|
||||
const table = diagram.tables![0];
|
||||
const masterUserIdField = table.fields.find(
|
||||
(f) => f.name === 'master_user_id'
|
||||
);
|
||||
const tenantIdField = table.fields.find((f) => f.name === 'tenant_id');
|
||||
const tenantUserIdField = table.fields.find(
|
||||
(f) => f.name === 'tenant_user_id'
|
||||
);
|
||||
table.indexes[0].fieldIds = [tenantIdField!.id, tenantUserIdField!.id];
|
||||
|
||||
// Add the PK index with name
|
||||
table.indexes.push({
|
||||
id: generateId(),
|
||||
name: 'moshe',
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
fieldIds: [
|
||||
masterUserIdField!.id,
|
||||
tenantIdField!.id,
|
||||
tenantUserIdField!.id,
|
||||
],
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Check that the DBML contains the composite PK with name
|
||||
expect(result.standardDbml).toContain(
|
||||
'(master_user_id, tenant_id, tenant_user_id) [pk, name: "moshe"]'
|
||||
);
|
||||
|
||||
// Check that the unique index is also present
|
||||
expect(result.standardDbml).toContain(
|
||||
'(tenant_id, tenant_user_id) [unique, name: "users_master_table_index_1"]'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1383,9 +1383,12 @@ Ref "fk_0_table_2_id_fk":"table_1"."id" < "table_2"."id"
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Check that the inline DBML has proper indentation
|
||||
// Note: indexes on primary key fields should be filtered out
|
||||
expect(result.inlineDbml).toContain(`Table "table_1" {
|
||||
"id" bigint [pk, not null]
|
||||
|
||||
Indexes {
|
||||
id [name: "index_1"]
|
||||
}
|
||||
}`);
|
||||
|
||||
expect(result.inlineDbml).toContain(`Table "table_2" {
|
||||
|
||||
@@ -605,45 +605,6 @@ const fixTableBracketSyntax = (dbml: string): string => {
|
||||
);
|
||||
};
|
||||
|
||||
// Restore composite primary key names in the DBML
|
||||
const restoreCompositePKNames = (dbml: string, tables: DBTable[]): string => {
|
||||
if (!tables || tables.length === 0) return dbml;
|
||||
|
||||
let result = dbml;
|
||||
|
||||
tables.forEach((table) => {
|
||||
// Check if this table has a PK index with a name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
if (pkIndex?.name) {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// Build the column list for the composite PK
|
||||
const columnList = primaryKeyFields
|
||||
.map((f) => f.name)
|
||||
.join(', ');
|
||||
|
||||
// Build the table identifier pattern
|
||||
const tableIdentifier = table.schema
|
||||
? `"${table.schema.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"\\."${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"`
|
||||
: `"${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"`;
|
||||
|
||||
// Pattern to match the composite PK index line
|
||||
// Match patterns like: (col1, col2, col3) [pk]
|
||||
const pkPattern = new RegExp(
|
||||
`(Table ${tableIdentifier} \\{[^}]*?Indexes \\{[^}]*?)(\\(${columnList.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\) \\[pk\\])`,
|
||||
'gs'
|
||||
);
|
||||
|
||||
// Replace with the named version
|
||||
const replacement = `$1(${columnList}) [pk, name: "${pkIndex.name}"]`;
|
||||
result = result.replace(pkPattern, replacement);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Restore schema information that may have been stripped by the DBML importer
|
||||
const restoreTableSchemas = (dbml: string, tables: DBTable[]): string => {
|
||||
if (!tables || tables.length === 0) return dbml;
|
||||
@@ -909,16 +870,14 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
...table,
|
||||
name: safeTableName,
|
||||
fields: processedFields,
|
||||
indexes: (table.indexes || [])
|
||||
.filter((index) => !index.isPrimaryKey) // Filter out PK indexes as they're handled separately
|
||||
.map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
indexes: (table.indexes || []).map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -980,9 +939,6 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
// Restore schema information that may have been stripped by DBML importer
|
||||
standard = restoreTableSchemas(standard, uniqueTables);
|
||||
|
||||
// Restore composite primary key names
|
||||
standard = restoreCompositePKNames(standard, uniqueTables);
|
||||
|
||||
// Prepend Enum DBML to the standard output
|
||||
if (enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
|
||||
@@ -1,190 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importDBMLToDiagram } from '../dbml-import';
|
||||
import { exportPostgreSQL } from '@/lib/data/export-metadata/export-per-type/postgresql';
|
||||
import { exportMySQL } from '@/lib/data/export-metadata/export-per-type/mysql';
|
||||
import { exportMSSQL } from '@/lib/data/export-metadata/export-per-type/mssql';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Composite Primary Key with Name', () => {
|
||||
it('should preserve composite primary key name in DBML import and SQL export', async () => {
|
||||
const dbmlContent = `
|
||||
Table "landlord"."users_master_table" {
|
||||
"master_user_id" bigint [not null]
|
||||
"tenant_id" bigint [not null]
|
||||
"tenant_user_id" bigint [not null]
|
||||
"enabled" boolean
|
||||
|
||||
Indexes {
|
||||
(master_user_id, tenant_id, tenant_user_id) [pk, name: "idx_users_master_table_master_user_id_tenant_id_tenant_user_id"]
|
||||
(tenant_id, tenant_user_id) [unique, name: "index_1"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// Import DBML
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
// Check that the composite PK name was captured
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Check for the PK index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe(
|
||||
'idx_users_master_table_master_user_id_tenant_id_tenant_user_id'
|
||||
);
|
||||
|
||||
// Check that fields are marked as primary keys
|
||||
const pkFields = table.fields.filter((f) => f.primaryKey);
|
||||
expect(pkFields).toHaveLength(3);
|
||||
expect(pkFields.map((f) => f.name)).toEqual([
|
||||
'master_user_id',
|
||||
'tenant_id',
|
||||
'tenant_user_id',
|
||||
]);
|
||||
|
||||
// Check that we have both the PK index and the unique index
|
||||
expect(table.indexes).toHaveLength(2);
|
||||
const uniqueIndex = table.indexes.find((idx) => !idx.isPrimaryKey);
|
||||
expect(uniqueIndex!.name).toBe('index_1');
|
||||
expect(uniqueIndex!.unique).toBe(true);
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in PostgreSQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "users" {
|
||||
"id" bigint [not null]
|
||||
"tenant_id" bigint [not null]
|
||||
|
||||
Indexes {
|
||||
(id, tenant_id) [pk, name: "pk_users_composite"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
const sqlScript = exportPostgreSQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT "pk_users_composite" PRIMARY KEY ("id", "tenant_id")'
|
||||
);
|
||||
expect(sqlScript).not.toContain('PRIMARY KEY ("id", "tenant_id"),'); // Should not have unnamed PK
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in MySQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "orders" {
|
||||
"order_id" int [not null]
|
||||
"product_id" int [not null]
|
||||
|
||||
Indexes {
|
||||
(order_id, product_id) [pk, name: "orders_order_product_pk"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.MYSQL,
|
||||
});
|
||||
|
||||
const sqlScript = exportMySQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT `orders_order_product_pk` PRIMARY KEY (`order_id`, `product_id`)'
|
||||
);
|
||||
});
|
||||
|
||||
it('should export composite primary key with CONSTRAINT name in MSSQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "products" {
|
||||
"category_id" int [not null]
|
||||
"product_id" int [not null]
|
||||
|
||||
Indexes {
|
||||
(category_id, product_id) [pk, name: "pk_products"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
const sqlScript = exportMSSQL({ diagram });
|
||||
|
||||
// Check that the SQL contains the named constraint
|
||||
expect(sqlScript).toContain(
|
||||
'CONSTRAINT [pk_products] PRIMARY KEY ([category_id], [product_id])'
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge duplicate PK index with name', async () => {
|
||||
const dbmlContent = `
|
||||
Table "test" {
|
||||
"a" int [not null]
|
||||
"b" int [not null]
|
||||
|
||||
Indexes {
|
||||
(a, b) [pk]
|
||||
(a, b) [name: "test_pk_name"]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Should capture the name from the duplicate index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe('test_pk_name');
|
||||
|
||||
// Should only have the PK index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
|
||||
// Fields should be marked as primary keys
|
||||
expect(table.fields.filter((f) => f.primaryKey)).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle composite PK without name', async () => {
|
||||
const dbmlContent = `
|
||||
Table "simple" {
|
||||
"x" int [not null]
|
||||
"y" int [not null]
|
||||
|
||||
Indexes {
|
||||
(x, y) [pk]
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// PK index should not exist for composite PK without name
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
|
||||
const sqlScript = exportPostgreSQL({ diagram });
|
||||
|
||||
// Should have unnamed PRIMARY KEY
|
||||
expect(sqlScript).toContain('PRIMARY KEY ("x", "y")');
|
||||
expect(sqlScript).toContain('CONSTRAINT');
|
||||
});
|
||||
});
|
||||
@@ -177,7 +177,7 @@ Table ranks {
|
||||
expect(wizardsTable?.fields).toHaveLength(11);
|
||||
|
||||
// Check indexes
|
||||
expect(wizardsTable?.indexes).toHaveLength(3);
|
||||
expect(wizardsTable?.indexes).toHaveLength(2);
|
||||
const emailIndex = wizardsTable?.indexes.find((idx) =>
|
||||
idx.name.includes('email')
|
||||
);
|
||||
@@ -920,7 +920,7 @@ Note dragon_note {
|
||||
expect(hoardsTable).toBeDefined();
|
||||
|
||||
// Verify all indexes are imported correctly
|
||||
expect(hoardsTable?.indexes).toHaveLength(4); // 3 from DBML + 1 implicit PK index
|
||||
expect(hoardsTable?.indexes).toHaveLength(3); // Should have 3 indexes as defined in DBML
|
||||
|
||||
// Verify named indexes
|
||||
const uniqueDragonIndex = hoardsTable?.indexes.find(
|
||||
@@ -1119,7 +1119,7 @@ Table "public_3"."comments" {
|
||||
).toBe('timestamp');
|
||||
|
||||
// Check posts indexes thoroughly
|
||||
expect(postsTable?.indexes).toHaveLength(3);
|
||||
expect(postsTable?.indexes).toHaveLength(2);
|
||||
|
||||
// Index 1: Composite unique index on (content, user_id)
|
||||
const compositeIndex = postsTable?.indexes.find(
|
||||
@@ -1154,7 +1154,7 @@ Table "public_3"."comments" {
|
||||
|
||||
// Check comments table
|
||||
expect(commentsTable?.fields).toHaveLength(5);
|
||||
expect(commentsTable?.indexes).toHaveLength(2);
|
||||
expect(commentsTable?.indexes).toHaveLength(1);
|
||||
|
||||
// Index: Unique index on id
|
||||
const idIndex = commentsTable?.indexes.find(
|
||||
|
||||
@@ -9,7 +9,7 @@ import { findDataTypeDataById } from '@/lib/data/data-types/data-types';
|
||||
import { defaultTableColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type Field from '@dbml/core/types/model_structure/field';
|
||||
import { getTableIndexesWithPrimaryKey, type DBIndex } from '@/lib/domain';
|
||||
import type { DBIndex } from '@/lib/domain';
|
||||
import {
|
||||
DBCustomTypeKind,
|
||||
type DBCustomType,
|
||||
@@ -100,7 +100,6 @@ interface DBMLIndex {
|
||||
columns: (string | DBMLIndexColumn)[];
|
||||
unique?: boolean;
|
||||
name?: string;
|
||||
pk?: boolean; // Primary key index flag
|
||||
}
|
||||
|
||||
interface DBMLTable {
|
||||
@@ -388,19 +387,15 @@ export const importDBMLToDiagram = async (
|
||||
);
|
||||
}
|
||||
|
||||
// For PK indexes, only use the name if explicitly provided
|
||||
// For regular indexes, generate a default name if needed
|
||||
// Generate a consistent index name
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
(!dbmlIndex.pk
|
||||
? `idx_${table.name}_${indexColumns.join('_')}`
|
||||
: undefined);
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
pk: Boolean(dbmlIndex.pk) || false,
|
||||
};
|
||||
}) || [],
|
||||
});
|
||||
@@ -489,126 +484,29 @@ export const importDBMLToDiagram = async (
|
||||
};
|
||||
});
|
||||
|
||||
// Process composite primary keys from indexes with [pk] attribute
|
||||
let compositePKFields: string[] = [];
|
||||
let compositePKIndexName: string | undefined;
|
||||
|
||||
// Find PK indexes and mark fields as primary keys
|
||||
table.indexes?.forEach((dbmlIndex) => {
|
||||
if (dbmlIndex.pk) {
|
||||
// Extract column names from the columns array
|
||||
compositePKFields = dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'string' ? col : col.value
|
||||
);
|
||||
// Only store the name if it was explicitly provided (not undefined)
|
||||
if (dbmlIndex.name) {
|
||||
compositePKIndexName = dbmlIndex.name;
|
||||
}
|
||||
// Mark fields as primary keys
|
||||
dbmlIndex.columns.forEach((col) => {
|
||||
const columnName =
|
||||
typeof col === 'string' ? col : col.value;
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (field) {
|
||||
field.primaryKey = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// If we found a PK without a name, look for a duplicate index with just a name
|
||||
if (compositePKFields.length > 0 && !compositePKIndexName) {
|
||||
table.indexes?.forEach((dbmlIndex) => {
|
||||
if (
|
||||
!dbmlIndex.pk &&
|
||||
dbmlIndex.name &&
|
||||
dbmlIndex.columns.length === compositePKFields.length
|
||||
) {
|
||||
// Check if columns match
|
||||
const indexColumns = dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'string' ? col : col.value
|
||||
);
|
||||
if (
|
||||
indexColumns.every(
|
||||
(col, i) => col === compositePKFields[i]
|
||||
)
|
||||
) {
|
||||
compositePKIndexName = dbmlIndex.name;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Convert DBML indexes to ChartDB indexes (excluding PK indexes and their duplicates)
|
||||
// Convert DBML indexes to ChartDB indexes
|
||||
const indexes: DBIndex[] =
|
||||
table.indexes
|
||||
?.filter((dbmlIndex) => {
|
||||
// Skip PK indexes - we'll handle them separately
|
||||
if (dbmlIndex.pk) return false;
|
||||
|
||||
// Skip duplicate indexes that match the composite PK
|
||||
// (when user has both [pk] and [name: "..."] on same fields)
|
||||
if (
|
||||
compositePKFields.length > 0 &&
|
||||
dbmlIndex.columns.length ===
|
||||
compositePKFields.length &&
|
||||
dbmlIndex.columns.every((col, i) => {
|
||||
const colName =
|
||||
typeof col === 'string' ? col : col.value;
|
||||
return colName === compositePKFields[i];
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})
|
||||
.map((dbmlIndex) => {
|
||||
const fieldIds = dbmlIndex.columns.map((columnName) => {
|
||||
const field = fields.find(
|
||||
(f) => f.name === columnName
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
const fieldIds = dbmlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
// Add PK as an index if it exists and has a name
|
||||
// Only create the PK index if there's an explicit name for it
|
||||
if (compositePKFields.length >= 1 && compositePKIndexName) {
|
||||
const pkFieldIds = compositePKFields.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`PK references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
|
||||
indexes.push({
|
||||
id: generateId(),
|
||||
name: compositePKIndexName,
|
||||
fieldIds: pkFieldIds,
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
}
|
||||
return {
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
// Extract table note/comment
|
||||
let tableComment: string | undefined;
|
||||
@@ -623,7 +521,7 @@ export const importDBMLToDiagram = async (
|
||||
}
|
||||
}
|
||||
|
||||
const tableToReturn: DBTable = {
|
||||
return {
|
||||
id: generateId(),
|
||||
name: table.name.replace(/['"]/g, ''),
|
||||
schema:
|
||||
@@ -642,13 +540,6 @@ export const importDBMLToDiagram = async (
|
||||
createdAt: Date.now(),
|
||||
comments: tableComment,
|
||||
} satisfies DBTable;
|
||||
|
||||
return {
|
||||
...tableToReturn,
|
||||
indexes: getTableIndexesWithPrimaryKey({
|
||||
table: tableToReturn,
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
// Create relationships using the refs
|
||||
|
||||
@@ -1,192 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createTablesFromMetadata } from '../db-table';
|
||||
import { DatabaseType } from '../database-type';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
|
||||
describe('Composite Primary Key Name from Metadata Import', () => {
|
||||
it('should capture composite primary key name from metadata indexes', () => {
|
||||
const metadata: DatabaseMetadata = {
|
||||
database_name: 'test_db',
|
||||
version: '',
|
||||
fk_info: [],
|
||||
pk_info: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'master_user_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'tenant_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
column: 'tenant_user_id',
|
||||
pk_def: 'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)',
|
||||
},
|
||||
],
|
||||
columns: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'master_user_id',
|
||||
ordinal_position: 1,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'tenant_id',
|
||||
ordinal_position: 2,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'tenant_user_id',
|
||||
ordinal_position: 3,
|
||||
type: 'bigint',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: false,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'enabled',
|
||||
ordinal_position: 4,
|
||||
type: 'boolean',
|
||||
character_maximum_length: null,
|
||||
precision: null,
|
||||
nullable: true,
|
||||
default: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
// The composite PK index named "moshe"
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'master_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 1,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'tenant_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 2,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'moshe',
|
||||
column: 'tenant_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 3,
|
||||
direction: 'asc',
|
||||
},
|
||||
// Another unique index
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'users_master_table_index_1',
|
||||
column: 'tenant_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 1,
|
||||
direction: 'asc',
|
||||
},
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
name: 'users_master_table_index_1',
|
||||
column: 'tenant_user_id',
|
||||
index_type: 'btree',
|
||||
cardinality: 0,
|
||||
size: 8192,
|
||||
unique: true,
|
||||
column_position: 2,
|
||||
direction: 'asc',
|
||||
},
|
||||
],
|
||||
tables: [
|
||||
{
|
||||
schema: 'landlord',
|
||||
table: 'users_master_table',
|
||||
rows: 0,
|
||||
type: 'BASE TABLE',
|
||||
engine: '',
|
||||
collation: '',
|
||||
comment: '',
|
||||
},
|
||||
],
|
||||
views: [],
|
||||
custom_types: [],
|
||||
};
|
||||
|
||||
const tables = createTablesFromMetadata({
|
||||
databaseMetadata: metadata,
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
expect(tables).toHaveLength(1);
|
||||
const table = tables[0];
|
||||
|
||||
// Check that the composite PK name was captured as "moshe" in the PK index
|
||||
const pkIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex!.name).toBe('moshe');
|
||||
|
||||
// Check that primary key fields are marked correctly
|
||||
const pkFields = table.fields.filter((f) => f.primaryKey);
|
||||
expect(pkFields).toHaveLength(3);
|
||||
expect(pkFields.map((f) => f.name).sort()).toEqual([
|
||||
'master_user_id',
|
||||
'tenant_id',
|
||||
'tenant_user_id',
|
||||
]);
|
||||
|
||||
// Check that we have both the PK index and the unique index
|
||||
expect(table.indexes).toHaveLength(2);
|
||||
const uniqueIndex = table.indexes.find((idx) => !idx.isPrimaryKey);
|
||||
expect(uniqueIndex!.name).toBe('users_master_table_index_1');
|
||||
});
|
||||
});
|
||||
@@ -19,7 +19,6 @@ export interface DBField {
|
||||
unique: boolean;
|
||||
nullable: boolean;
|
||||
increment?: boolean | null;
|
||||
array?: boolean | null;
|
||||
createdAt: number;
|
||||
characterMaximumLength?: string | null;
|
||||
precision?: number | null;
|
||||
@@ -37,7 +36,6 @@ export const dbFieldSchema: z.ZodType<DBField> = z.object({
|
||||
unique: z.boolean(),
|
||||
nullable: z.boolean(),
|
||||
increment: z.boolean().or(z.null()).optional(),
|
||||
array: z.boolean().or(z.null()).optional(),
|
||||
createdAt: z.number(),
|
||||
characterMaximumLength: z.string().or(z.null()).optional(),
|
||||
precision: z.number().or(z.null()).optional(),
|
||||
@@ -73,48 +71,13 @@ export const createFieldsFromMetadata = ({
|
||||
pk.column.trim()
|
||||
);
|
||||
|
||||
return sortedColumns.map((col: ColumnInfo): DBField => {
|
||||
// Check if type is an array (ends with [])
|
||||
const isArrayType = col.type.endsWith('[]');
|
||||
let baseType = col.type;
|
||||
|
||||
// Extract base type and any parameters if it's an array
|
||||
if (isArrayType) {
|
||||
baseType = col.type.slice(0, -2); // Remove the [] suffix
|
||||
}
|
||||
|
||||
// Extract parameters from types like "character varying(100)" or "numeric(10,2)"
|
||||
let charMaxLength = col.character_maximum_length;
|
||||
let precision = col.precision?.precision;
|
||||
let scale = col.precision?.scale;
|
||||
|
||||
// Handle types with single parameter like varchar(100)
|
||||
const singleParamMatch = baseType.match(/^(.+?)\((\d+)\)$/);
|
||||
if (singleParamMatch) {
|
||||
baseType = singleParamMatch[1];
|
||||
if (!charMaxLength || charMaxLength === 'null') {
|
||||
charMaxLength = singleParamMatch[2];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle types with two parameters like numeric(10,2)
|
||||
const twoParamMatch = baseType.match(/^(.+?)\((\d+),\s*(\d+)\)$/);
|
||||
if (twoParamMatch) {
|
||||
baseType = twoParamMatch[1];
|
||||
if (!precision) {
|
||||
precision = parseInt(twoParamMatch[2]);
|
||||
}
|
||||
if (!scale) {
|
||||
scale = parseInt(twoParamMatch[3]);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
return sortedColumns.map(
|
||||
(col: ColumnInfo): DBField => ({
|
||||
id: generateId(),
|
||||
name: col.name,
|
||||
type: {
|
||||
id: baseType.split(' ').join('_').toLowerCase(),
|
||||
name: baseType.toLowerCase(),
|
||||
id: col.type.split(' ').join('_').toLowerCase(),
|
||||
name: col.type.toLowerCase(),
|
||||
},
|
||||
primaryKey: tablePrimaryKeysColumns.includes(col.name),
|
||||
unique: Object.values(aggregatedIndexes).some(
|
||||
@@ -124,18 +87,20 @@ export const createFieldsFromMetadata = ({
|
||||
idx.columns[0].name === col.name
|
||||
),
|
||||
nullable: Boolean(col.nullable),
|
||||
...(isArrayType ? { array: true } : {}),
|
||||
...(charMaxLength && charMaxLength !== 'null'
|
||||
? { characterMaximumLength: charMaxLength }
|
||||
...(col.character_maximum_length &&
|
||||
col.character_maximum_length !== 'null'
|
||||
? { characterMaximumLength: col.character_maximum_length }
|
||||
: {}),
|
||||
...(precision ? { precision } : {}),
|
||||
...(scale ? { scale } : {}),
|
||||
...(col.precision?.precision
|
||||
? { precision: col.precision.precision }
|
||||
: {}),
|
||||
...(col.precision?.scale ? { scale: col.precision.scale } : {}),
|
||||
...(col.default ? { default: col.default } : {}),
|
||||
...(col.collation ? { collation: col.collation } : {}),
|
||||
createdAt: Date.now(),
|
||||
comments: col.comment ? col.comment : undefined,
|
||||
};
|
||||
});
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const generateDBFieldSuffix = (
|
||||
|
||||
@@ -3,7 +3,6 @@ import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types
|
||||
import { generateId } from '../utils';
|
||||
import type { DBField } from './db-field';
|
||||
import { DatabaseType } from './database-type';
|
||||
import type { DBTable } from './db-table';
|
||||
|
||||
export const INDEX_TYPES = [
|
||||
'btree',
|
||||
@@ -30,7 +29,6 @@ export interface DBIndex {
|
||||
fieldIds: string[];
|
||||
createdAt: number;
|
||||
type?: IndexType | null;
|
||||
isPrimaryKey?: boolean | null;
|
||||
}
|
||||
|
||||
export const dbIndexSchema: z.ZodType<DBIndex> = z.object({
|
||||
@@ -40,7 +38,6 @@ export const dbIndexSchema: z.ZodType<DBIndex> = z.object({
|
||||
fieldIds: z.array(z.string()),
|
||||
createdAt: z.number(),
|
||||
type: z.enum(INDEX_TYPES).optional(),
|
||||
isPrimaryKey: z.boolean().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createIndexesFromMetadata = ({
|
||||
@@ -67,51 +64,3 @@ export const createIndexesFromMetadata = ({
|
||||
export const databaseIndexTypes: { [key in DatabaseType]?: IndexType[] } = {
|
||||
[DatabaseType.POSTGRESQL]: ['btree', 'hash'],
|
||||
};
|
||||
|
||||
export const getTablePrimaryKeyIndex = ({
|
||||
table,
|
||||
}: {
|
||||
table: DBTable;
|
||||
}): DBIndex | null => {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
const existingPKIndex = table.indexes.find((idx) => idx.isPrimaryKey);
|
||||
|
||||
if (primaryKeyFields.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pkFieldIds = primaryKeyFields.map((f) => f.id);
|
||||
|
||||
if (existingPKIndex) {
|
||||
return {
|
||||
...existingPKIndex,
|
||||
fieldIds: pkFieldIds,
|
||||
};
|
||||
} else {
|
||||
// Create new PK index for primary key(s)
|
||||
const pkIndex: DBIndex = {
|
||||
id: generateId(),
|
||||
name: `pk_${table.name}_${primaryKeyFields.map((f) => f.name).join('_')}`,
|
||||
fieldIds: pkFieldIds,
|
||||
unique: true,
|
||||
isPrimaryKey: true,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
return pkIndex;
|
||||
}
|
||||
};
|
||||
|
||||
export const getTableIndexesWithPrimaryKey = ({
|
||||
table,
|
||||
}: {
|
||||
table: DBTable;
|
||||
}): DBIndex[] => {
|
||||
const primaryKeyIndex = getTablePrimaryKeyIndex({ table });
|
||||
const indexesWithoutPKIndex = table.indexes.filter(
|
||||
(idx) => !idx.isPrimaryKey
|
||||
);
|
||||
return primaryKeyIndex
|
||||
? [primaryKeyIndex, ...indexesWithoutPKIndex]
|
||||
: indexesWithoutPKIndex;
|
||||
};
|
||||
|
||||
@@ -203,57 +203,11 @@ export const createTablesFromMetadata = ({
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
// Check for composite primary key and find matching index name
|
||||
const primaryKeyFields = fields.filter((f) => f.primaryKey);
|
||||
let pkMatchingIndexName: string | undefined;
|
||||
let pkIndex: DBIndex | undefined;
|
||||
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// We have a composite primary key, look for an index that matches all PK columns
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).sort();
|
||||
|
||||
// Find an index that matches the primary key columns exactly
|
||||
const matchingIndex = aggregatedIndexes.find((index) => {
|
||||
const indexColumnNames = index.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
return (
|
||||
indexColumnNames.length === pkFieldNames.length &&
|
||||
indexColumnNames.every((col, i) => col === pkFieldNames[i])
|
||||
);
|
||||
});
|
||||
|
||||
if (matchingIndex) {
|
||||
pkMatchingIndexName = matchingIndex.name;
|
||||
// Create a special PK index
|
||||
pkIndex = {
|
||||
id: generateId(),
|
||||
name: matchingIndex.name,
|
||||
unique: true,
|
||||
fieldIds: primaryKeyFields.map((f) => f.id),
|
||||
createdAt: Date.now(),
|
||||
isPrimaryKey: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the index that matches the composite PK (to avoid duplication)
|
||||
const filteredAggregatedIndexes = pkMatchingIndexName
|
||||
? aggregatedIndexes.filter(
|
||||
(idx) => idx.name !== pkMatchingIndexName
|
||||
)
|
||||
: aggregatedIndexes;
|
||||
|
||||
const dbIndexes = createIndexesFromMetadata({
|
||||
aggregatedIndexes: filteredAggregatedIndexes,
|
||||
aggregatedIndexes,
|
||||
fields,
|
||||
});
|
||||
|
||||
// Add the PK index if it exists
|
||||
if (pkIndex) {
|
||||
dbIndexes.push(pkIndex);
|
||||
}
|
||||
|
||||
// Determine if the current table is a view by checking against pre-computed sets
|
||||
const viewKey = generateTableKey({
|
||||
schemaName: tableSchema,
|
||||
|
||||
@@ -8,7 +8,6 @@ import type { FieldAttributeRange } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
findDataTypeDataById,
|
||||
supportsAutoIncrementDataType,
|
||||
supportsArrayDataType,
|
||||
} from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
Popover,
|
||||
@@ -88,7 +87,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
unique: localField.unique,
|
||||
default: localField.default,
|
||||
increment: localField.increment,
|
||||
array: localField.array,
|
||||
});
|
||||
}
|
||||
prevFieldRef.current = localField;
|
||||
@@ -104,13 +102,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
[field.type.name]
|
||||
);
|
||||
|
||||
const supportsArray = useMemo(
|
||||
() =>
|
||||
databaseType === 'postgresql' &&
|
||||
supportsArrayDataType(field.type.name),
|
||||
[field.type.name, databaseType]
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
@@ -177,27 +168,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
{supportsArray ? (
|
||||
<div className="flex items-center justify-between">
|
||||
<Label
|
||||
htmlFor="array"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.array'
|
||||
)}
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={localField.array ?? false}
|
||||
onCheckedChange={(value) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
array: !!value,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
<div className="flex flex-col gap-2">
|
||||
<Label htmlFor="default" className="text-subtitle">
|
||||
{t(
|
||||
|
||||
@@ -7,7 +7,6 @@ import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
dataTypeDataToDataType,
|
||||
sortedDataTypeMap,
|
||||
supportsArrayDataType,
|
||||
} from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
Tooltip,
|
||||
@@ -176,13 +175,6 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the new type supports arrays - if not, clear the array property
|
||||
const newTypeName = dataType?.name ?? (value as string);
|
||||
const shouldClearArray =
|
||||
databaseType === 'postgresql' &&
|
||||
!supportsArrayDataType(newTypeName) &&
|
||||
field.array;
|
||||
|
||||
updateField({
|
||||
characterMaximumLength,
|
||||
precision,
|
||||
@@ -193,7 +185,6 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
name: value as string,
|
||||
}
|
||||
),
|
||||
...(shouldClearArray ? { array: false } : {}),
|
||||
});
|
||||
},
|
||||
[
|
||||
@@ -202,7 +193,6 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
field.characterMaximumLength,
|
||||
field.precision,
|
||||
field.scale,
|
||||
field.array,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { Ellipsis, Trash2, KeyRound } from 'lucide-react';
|
||||
import { Ellipsis, Trash2 } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import {
|
||||
databaseIndexTypes,
|
||||
@@ -106,45 +106,29 @@ export const TableIndex: React.FC<TableIndexProps> = ({
|
||||
'side_panel.tables_section.table.no_types_found'
|
||||
)}
|
||||
keepOrder
|
||||
disabled={index.isPrimaryKey ?? false}
|
||||
/>
|
||||
<div className="flex shrink-0 gap-1">
|
||||
{index.isPrimaryKey ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle pressed={true} disabled>
|
||||
<KeyRound className="h-3.5" />
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('side_panel.tables_section.table.primary_key')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle
|
||||
pressed={index.unique}
|
||||
onPressedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
>
|
||||
U
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableIndexToggle
|
||||
pressed={index.unique}
|
||||
onPressedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
>
|
||||
U
|
||||
</TableIndexToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
@@ -180,56 +164,52 @@ export const TableIndex: React.FC<TableIndexProps> = ({
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{!index.isPrimaryKey ? (
|
||||
<>
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={index.unique}
|
||||
onCheckedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{indexTypeOptions.length > 0 ? (
|
||||
<div className="mt-2 flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="indexType"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.index_type'
|
||||
)}
|
||||
</Label>
|
||||
<SelectBox
|
||||
options={indexTypeOptions}
|
||||
value={index.type || 'btree'}
|
||||
onChange={updateIndexType}
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
<Separator orientation="horizontal" />
|
||||
<Button
|
||||
variant="outline"
|
||||
className="flex gap-2 !text-red-700"
|
||||
onClick={removeIndex}
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.unique'
|
||||
)}
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={index.unique}
|
||||
onCheckedChange={(value) =>
|
||||
updateIndex({
|
||||
unique: !!value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{indexTypeOptions.length > 0 ? (
|
||||
<div className="mt-2 flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="indexType"
|
||||
className="text-subtitle"
|
||||
>
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.delete_index'
|
||||
'side_panel.tables_section.table.index_actions.index_type'
|
||||
)}
|
||||
</Button>
|
||||
</>
|
||||
</Label>
|
||||
<SelectBox
|
||||
options={indexTypeOptions}
|
||||
value={index.type || 'btree'}
|
||||
onChange={updateIndexType}
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
<Separator orientation="horizontal" />
|
||||
<Button
|
||||
variant="outline"
|
||||
className="flex gap-2 !text-red-700"
|
||||
onClick={removeIndex}
|
||||
>
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
{t(
|
||||
'side_panel.tables_section.table.index_actions.delete_index'
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
@@ -224,27 +224,19 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
</div>
|
||||
</AccordionTrigger>
|
||||
<AccordionContent className="pb-0 pt-1">
|
||||
{[...table.indexes]
|
||||
.sort((a, b) => {
|
||||
// Sort PK indexes first
|
||||
if (a.isPrimaryKey && !b.isPrimaryKey)
|
||||
return -1;
|
||||
if (!a.isPrimaryKey && b.isPrimaryKey) return 1;
|
||||
return 0;
|
||||
})
|
||||
.map((index) => (
|
||||
<TableIndex
|
||||
key={index.id}
|
||||
index={index}
|
||||
removeIndex={() =>
|
||||
removeIndex(table.id, index.id)
|
||||
}
|
||||
updateIndex={(attrs: Partial<DBIndex>) =>
|
||||
updateIndex(table.id, index.id, attrs)
|
||||
}
|
||||
fields={table.fields}
|
||||
/>
|
||||
))}
|
||||
{table.indexes.map((index) => (
|
||||
<TableIndex
|
||||
key={index.id}
|
||||
index={index}
|
||||
removeIndex={() =>
|
||||
removeIndex(table.id, index.id)
|
||||
}
|
||||
updateIndex={(attrs: Partial<DBIndex>) =>
|
||||
updateIndex(table.id, index.id, attrs)
|
||||
}
|
||||
fields={table.fields}
|
||||
/>
|
||||
))}
|
||||
<div className="flex justify-start p-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
Reference in New Issue
Block a user