mirror of
https://github.com/chartdb/chartdb.git
synced 2025-11-03 13:33:25 +00:00
Compare commits
1 Commits
fix/relati
...
jf/fix_fk_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1377bd524b |
@@ -43,8 +43,6 @@ export interface CodeSnippetProps {
|
||||
isComplete?: boolean;
|
||||
editorProps?: React.ComponentProps<EditorType>;
|
||||
actions?: CodeSnippetAction[];
|
||||
actionsTooltipSide?: 'top' | 'right' | 'bottom' | 'left';
|
||||
allowCopy?: boolean;
|
||||
}
|
||||
|
||||
export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
@@ -58,8 +56,6 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
isComplete = true,
|
||||
editorProps,
|
||||
actions,
|
||||
actionsTooltipSide,
|
||||
allowCopy = true,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const monaco = useMonaco();
|
||||
@@ -133,37 +129,33 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
<Suspense fallback={<Spinner />}>
|
||||
{isComplete ? (
|
||||
<div className="absolute right-1 top-1 z-10 flex flex-col gap-1">
|
||||
{allowCopy ? (
|
||||
<Tooltip
|
||||
onOpenChange={setTooltipOpen}
|
||||
open={isCopied || tooltipOpen}
|
||||
>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
className="h-fit p-1.5"
|
||||
variant="outline"
|
||||
onClick={copyToClipboard}
|
||||
>
|
||||
{isCopied ? (
|
||||
<CopyCheck size={16} />
|
||||
) : (
|
||||
<Copy size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent
|
||||
side={actionsTooltipSide}
|
||||
>
|
||||
{t(
|
||||
isCopied
|
||||
? 'copied'
|
||||
: 'copy_to_clipboard'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : null}
|
||||
<Tooltip
|
||||
onOpenChange={setTooltipOpen}
|
||||
open={isCopied || tooltipOpen}
|
||||
>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
className="h-fit p-1.5"
|
||||
variant="outline"
|
||||
onClick={copyToClipboard}
|
||||
>
|
||||
{isCopied ? (
|
||||
<CopyCheck size={16} />
|
||||
) : (
|
||||
<Copy size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
isCopied
|
||||
? 'copied'
|
||||
: 'copy_to_clipboard'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
{actions &&
|
||||
actions.length > 0 &&
|
||||
@@ -182,9 +174,7 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent
|
||||
side={actionsTooltipSide}
|
||||
>
|
||||
<TooltipContent>
|
||||
{action.label}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -43,19 +43,12 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
root: [
|
||||
[/\b(Table|Ref|Indexes)\b/, 'keyword'],
|
||||
[/\[.*?\]/, 'annotation'],
|
||||
[/'''/, 'string', '@tripleQuoteString'],
|
||||
[/".*?"/, 'string'],
|
||||
[/'.*?'/, 'string'],
|
||||
[/`.*?`/, 'string'],
|
||||
[/[{}]/, 'delimiter'],
|
||||
[/[<>]/, 'operator'],
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'], // Added 'i' flag for case-insensitive matching
|
||||
],
|
||||
tripleQuoteString: [
|
||||
[/[^']+/, 'string'],
|
||||
[/'''/, 'string', '@pop'],
|
||||
[/'/, 'string'],
|
||||
],
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
@@ -93,8 +93,6 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
(isOpen: boolean) => {
|
||||
setOpen?.(isOpen);
|
||||
setIsOpen(isOpen);
|
||||
|
||||
setTimeout(() => (document.body.style.pointerEvents = ''), 500);
|
||||
},
|
||||
[setOpen]
|
||||
);
|
||||
@@ -229,7 +227,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
onSelect={() =>
|
||||
handleSelect(
|
||||
option.value,
|
||||
matches?.map((match) => match?.toString())
|
||||
matches?.map((match) => match.toString())
|
||||
)
|
||||
}
|
||||
>
|
||||
|
||||
@@ -78,9 +78,6 @@ export interface ChartDBContext {
|
||||
events: EventEmitter<ChartDBEvent>;
|
||||
readonly?: boolean;
|
||||
|
||||
highlightedCustomType?: DBCustomType;
|
||||
highlightCustomTypeId: (id?: string) => void;
|
||||
|
||||
filteredSchemas?: string[];
|
||||
filterSchemas: (schemaIds: string[]) => void;
|
||||
|
||||
@@ -297,7 +294,6 @@ export const chartDBContext = createContext<ChartDBContext>({
|
||||
areas: [],
|
||||
customTypes: [],
|
||||
schemas: [],
|
||||
highlightCustomTypeId: emptyFn,
|
||||
filteredSchemas: [],
|
||||
filterSchemas: emptyFn,
|
||||
currentDiagram: {
|
||||
|
||||
@@ -74,9 +74,6 @@ export const ChartDBProvider: React.FC<
|
||||
const [hiddenTableIds, setHiddenTableIds] = useState<string[]>([]);
|
||||
const { events: diffEvents } = useDiff();
|
||||
|
||||
const [highlightedCustomTypeId, setHighlightedCustomTypeId] =
|
||||
useState<string>();
|
||||
|
||||
const diffCalculatedHandler = useCallback((event: DiffCalculatedEvent) => {
|
||||
const { tablesAdded, fieldsAdded, relationshipsAdded } = event.data;
|
||||
setTables((tables) =>
|
||||
@@ -1534,37 +1531,22 @@ export const ChartDBProvider: React.FC<
|
||||
[db, diagramId, setAreas, getArea, addUndoAction, resetRedoStack]
|
||||
);
|
||||
|
||||
const highlightCustomTypeId = useCallback(
|
||||
(id?: string) => setHighlightedCustomTypeId(id),
|
||||
[setHighlightedCustomTypeId]
|
||||
);
|
||||
|
||||
const highlightedCustomType = useMemo(() => {
|
||||
return highlightedCustomTypeId
|
||||
? customTypes.find((type) => type.id === highlightedCustomTypeId)
|
||||
: undefined;
|
||||
}, [highlightedCustomTypeId, customTypes]);
|
||||
|
||||
const loadDiagramFromData: ChartDBContext['loadDiagramFromData'] =
|
||||
useCallback(
|
||||
(diagram) => {
|
||||
async (diagram) => {
|
||||
setDiagramId(diagram.id);
|
||||
setDiagramName(diagram.name);
|
||||
setDatabaseType(diagram.databaseType);
|
||||
setDatabaseEdition(diagram.databaseEdition);
|
||||
setTables(diagram.tables ?? []);
|
||||
setRelationships(diagram.relationships ?? []);
|
||||
setDependencies(diagram.dependencies ?? []);
|
||||
setAreas(diagram.areas ?? []);
|
||||
setCustomTypes(diagram.customTypes ?? []);
|
||||
setTables(diagram?.tables ?? []);
|
||||
setRelationships(diagram?.relationships ?? []);
|
||||
setDependencies(diagram?.dependencies ?? []);
|
||||
setAreas(diagram?.areas ?? []);
|
||||
setCustomTypes(diagram?.customTypes ?? []);
|
||||
setDiagramCreatedAt(diagram.createdAt);
|
||||
setDiagramUpdatedAt(diagram.updatedAt);
|
||||
setHighlightedCustomTypeId(undefined);
|
||||
|
||||
events.emit({ action: 'load_diagram', data: { diagram } });
|
||||
|
||||
resetRedoStack();
|
||||
resetUndoStack();
|
||||
},
|
||||
[
|
||||
setDiagramId,
|
||||
@@ -1578,10 +1560,7 @@ export const ChartDBProvider: React.FC<
|
||||
setCustomTypes,
|
||||
setDiagramCreatedAt,
|
||||
setDiagramUpdatedAt,
|
||||
setHighlightedCustomTypeId,
|
||||
events,
|
||||
resetRedoStack,
|
||||
resetUndoStack,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -1846,8 +1825,6 @@ export const ChartDBProvider: React.FC<
|
||||
hiddenTableIds,
|
||||
addHiddenTableId,
|
||||
removeHiddenTableId,
|
||||
highlightCustomTypeId,
|
||||
highlightedCustomType,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -19,9 +19,6 @@ export interface LocalConfigContext {
|
||||
showCardinality: boolean;
|
||||
setShowCardinality: (showCardinality: boolean) => void;
|
||||
|
||||
showFieldAttributes: boolean;
|
||||
setShowFieldAttributes: (showFieldAttributes: boolean) => void;
|
||||
|
||||
hideMultiSchemaNotification: boolean;
|
||||
setHideMultiSchemaNotification: (
|
||||
hideMultiSchemaNotification: boolean
|
||||
@@ -53,9 +50,6 @@ export const LocalConfigContext = createContext<LocalConfigContext>({
|
||||
showCardinality: true,
|
||||
setShowCardinality: emptyFn,
|
||||
|
||||
showFieldAttributes: true,
|
||||
setShowFieldAttributes: emptyFn,
|
||||
|
||||
hideMultiSchemaNotification: false,
|
||||
setHideMultiSchemaNotification: emptyFn,
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ const themeKey = 'theme';
|
||||
const scrollActionKey = 'scroll_action';
|
||||
const schemasFilterKey = 'schemas_filter';
|
||||
const showCardinalityKey = 'show_cardinality';
|
||||
const showFieldAttributesKey = 'show_field_attributes';
|
||||
const hideMultiSchemaNotificationKey = 'hide_multi_schema_notification';
|
||||
const githubRepoOpenedKey = 'github_repo_opened';
|
||||
const starUsDialogLastOpenKey = 'star_us_dialog_last_open';
|
||||
@@ -35,11 +34,6 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
(localStorage.getItem(showCardinalityKey) || 'true') === 'true'
|
||||
);
|
||||
|
||||
const [showFieldAttributes, setShowFieldAttributes] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(showFieldAttributesKey) || 'true') === 'true'
|
||||
);
|
||||
|
||||
const [hideMultiSchemaNotification, setHideMultiSchemaNotification] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(hideMultiSchemaNotificationKey) ||
|
||||
@@ -125,8 +119,6 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
setSchemasFilter,
|
||||
showCardinality,
|
||||
setShowCardinality,
|
||||
showFieldAttributes,
|
||||
setShowFieldAttributes,
|
||||
hideMultiSchemaNotification,
|
||||
setHideMultiSchemaNotification,
|
||||
setGithubRepoOpened,
|
||||
|
||||
@@ -61,7 +61,6 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
const [showTables, setShowTables] = useState(true);
|
||||
const [showViews, setShowViews] = useState(false);
|
||||
const { t } = useTranslation();
|
||||
const [isImporting, setIsImporting] = useState(false);
|
||||
|
||||
// Prepare all tables and views with their metadata
|
||||
const allTables = useMemo(() => {
|
||||
@@ -259,37 +258,22 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
setSelectedTables(new Set());
|
||||
}, []);
|
||||
|
||||
const handleConfirm = useCallback(async () => {
|
||||
if (isImporting) {
|
||||
return;
|
||||
}
|
||||
const handleConfirm = useCallback(() => {
|
||||
const selectedTableObjects: SelectedTable[] = Array.from(selectedTables)
|
||||
.map((key): SelectedTable | null => {
|
||||
const table = allTables.find((t) => t.key === key);
|
||||
if (!table) return null;
|
||||
|
||||
setIsImporting(true);
|
||||
return {
|
||||
schema: table.schema,
|
||||
table: table.tableName,
|
||||
type: table.type,
|
||||
} satisfies SelectedTable;
|
||||
})
|
||||
.filter((t): t is SelectedTable => t !== null);
|
||||
|
||||
try {
|
||||
const selectedTableObjects: SelectedTable[] = Array.from(
|
||||
selectedTables
|
||||
)
|
||||
.map((key): SelectedTable | null => {
|
||||
const table = allTables.find((t) => t.key === key);
|
||||
if (!table) return null;
|
||||
|
||||
return {
|
||||
schema: table.schema,
|
||||
table: table.tableName,
|
||||
type: table.type,
|
||||
} satisfies SelectedTable;
|
||||
})
|
||||
.filter((t): t is SelectedTable => t !== null);
|
||||
|
||||
await onImport({
|
||||
selectedTables: selectedTableObjects,
|
||||
databaseMetadata,
|
||||
});
|
||||
} finally {
|
||||
setIsImporting(false);
|
||||
}
|
||||
}, [selectedTables, allTables, onImport, databaseMetadata, isImporting]);
|
||||
onImport({ selectedTables: selectedTableObjects, databaseMetadata });
|
||||
}, [selectedTables, allTables, onImport, databaseMetadata]);
|
||||
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
@@ -651,29 +635,27 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
</div>
|
||||
{isDesktop ? renderPagination() : null}
|
||||
</DialogInternalContent>
|
||||
<DialogFooter className="flex flex-col-reverse gap-2 sm:flex-row sm:justify-end sm:space-x-2 md:justify-between md:gap-0">
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
onClick={onBack}
|
||||
disabled={isImporting}
|
||||
>
|
||||
<DialogFooter
|
||||
// className={cn(
|
||||
// 'gap-2',
|
||||
// isDesktop
|
||||
// ? 'flex items-center justify-between'
|
||||
// : 'flex flex-col'
|
||||
// )}
|
||||
className="flex flex-col-reverse gap-2 sm:flex-row sm:justify-end sm:space-x-2 md:justify-between md:gap-0"
|
||||
>
|
||||
{/* Desktop layout */}
|
||||
|
||||
<Button type="button" variant="secondary" onClick={onBack}>
|
||||
{t('new_diagram_dialog.back')}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
onClick={handleConfirm}
|
||||
disabled={selectedTables.size === 0 || isImporting}
|
||||
disabled={selectedTables.size === 0}
|
||||
className="bg-pink-500 text-white hover:bg-pink-600"
|
||||
>
|
||||
{isImporting ? (
|
||||
<>
|
||||
<Spinner className="mr-2 size-4 text-white" />
|
||||
Importing...
|
||||
</>
|
||||
) : (
|
||||
`Import ${selectedTables.size} Tables`
|
||||
)}
|
||||
Import {selectedTables.size} Tables
|
||||
</Button>
|
||||
|
||||
{!isDesktop ? renderPagination() : null}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import React, { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
@@ -17,23 +17,11 @@ import type { DBSchema } from '@/lib/domain/db-schema';
|
||||
import { schemaNameToSchemaId } from '@/lib/domain/db-schema';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import { Group, SquarePlus } from 'lucide-react';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { Label } from '@/components/label/label';
|
||||
|
||||
export interface TableSchemaDialogProps extends BaseDialogProps {
|
||||
table?: DBTable;
|
||||
schemas: DBSchema[];
|
||||
onConfirm: ({ schema }: { schema: DBSchema }) => void;
|
||||
allowSchemaCreation?: boolean;
|
||||
}
|
||||
|
||||
export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
@@ -41,32 +29,13 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
table,
|
||||
schemas,
|
||||
onConfirm,
|
||||
allowSchemaCreation = false,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const { databaseType, filteredSchemas, filterSchemas } = useChartDB();
|
||||
const [selectedSchemaId, setSelectedSchemaId] = useState<string>(
|
||||
const [selectedSchemaId, setSelectedSchemaId] = React.useState<string>(
|
||||
table?.schema
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
);
|
||||
const allowSchemaSelection = useMemo(
|
||||
() => schemas && schemas.length > 0,
|
||||
[schemas]
|
||||
);
|
||||
|
||||
const defaultSchemaName = useMemo(
|
||||
() => defaultSchemas?.[databaseType],
|
||||
[databaseType]
|
||||
);
|
||||
|
||||
const [isCreatingNew, setIsCreatingNew] =
|
||||
useState<boolean>(!allowSchemaSelection);
|
||||
const [newSchemaName, setNewSchemaName] = useState<string>(
|
||||
allowSchemaCreation && !allowSchemaSelection
|
||||
? (defaultSchemaName ?? '')
|
||||
: ''
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) return;
|
||||
@@ -75,56 +44,15 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
);
|
||||
setIsCreatingNew(!allowSchemaSelection);
|
||||
setNewSchemaName(
|
||||
allowSchemaCreation && !allowSchemaSelection
|
||||
? (defaultSchemaName ?? '')
|
||||
: ''
|
||||
);
|
||||
}, [
|
||||
defaultSchemaName,
|
||||
dialog.open,
|
||||
schemas,
|
||||
table?.schema,
|
||||
allowSchemaSelection,
|
||||
allowSchemaCreation,
|
||||
]);
|
||||
|
||||
}, [dialog.open, schemas, table?.schema]);
|
||||
const { closeTableSchemaDialog } = useDialog();
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
let createdSchemaId: string;
|
||||
if (isCreatingNew && newSchemaName.trim()) {
|
||||
const newSchema: DBSchema = {
|
||||
id: schemaNameToSchemaId(newSchemaName.trim()),
|
||||
name: newSchemaName.trim(),
|
||||
tableCount: 0,
|
||||
};
|
||||
const schema = schemas.find((s) => s.id === selectedSchemaId);
|
||||
if (!schema) return;
|
||||
|
||||
createdSchemaId = newSchema.id;
|
||||
|
||||
onConfirm({ schema: newSchema });
|
||||
} else {
|
||||
const schema = schemas.find((s) => s.id === selectedSchemaId);
|
||||
if (!schema) return;
|
||||
|
||||
createdSchemaId = schema.id;
|
||||
onConfirm({ schema });
|
||||
}
|
||||
|
||||
filterSchemas([
|
||||
...(filteredSchemas ?? schemas.map((s) => s.id)),
|
||||
createdSchemaId,
|
||||
]);
|
||||
}, [
|
||||
onConfirm,
|
||||
selectedSchemaId,
|
||||
schemas,
|
||||
isCreatingNew,
|
||||
newSchemaName,
|
||||
filteredSchemas,
|
||||
filterSchemas,
|
||||
]);
|
||||
onConfirm({ schema });
|
||||
}, [onConfirm, selectedSchemaId, schemas]);
|
||||
|
||||
const schemaOptions: SelectBoxOption[] = useMemo(
|
||||
() =>
|
||||
@@ -135,25 +63,6 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
[schemas]
|
||||
);
|
||||
|
||||
const renderSwitchCreateOrSelectButton = useCallback(
|
||||
() => (
|
||||
<Button
|
||||
variant="outline"
|
||||
className="w-full justify-start"
|
||||
onClick={() => setIsCreatingNew(!isCreatingNew)}
|
||||
disabled={!allowSchemaSelection || !allowSchemaCreation}
|
||||
>
|
||||
{!isCreatingNew ? (
|
||||
<SquarePlus className="mr-2 size-4 " />
|
||||
) : (
|
||||
<Group className="mr-2 size-4 " />
|
||||
)}
|
||||
{isCreatingNew ? 'Select existing schema' : 'Create new schema'}
|
||||
</Button>
|
||||
),
|
||||
[isCreatingNew, allowSchemaSelection, allowSchemaCreation]
|
||||
);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
@@ -161,106 +70,48 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
if (!open) {
|
||||
closeTableSchemaDialog();
|
||||
}
|
||||
|
||||
setTimeout(() => (document.body.style.pointerEvents = ''), 500);
|
||||
}}
|
||||
>
|
||||
<DialogContent className="flex flex-col" showClose>
|
||||
<DialogHeader>
|
||||
<DialogTitle>
|
||||
{!allowSchemaSelection && allowSchemaCreation
|
||||
? t('create_table_schema_dialog.title')
|
||||
: table
|
||||
? t('update_table_schema_dialog.title')
|
||||
: t('new_table_schema_dialog.title')}
|
||||
{table
|
||||
? t('update_table_schema_dialog.title')
|
||||
: t('new_table_schema_dialog.title')}
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
{!allowSchemaSelection && allowSchemaCreation
|
||||
? t('create_table_schema_dialog.description')
|
||||
: table
|
||||
? t('update_table_schema_dialog.description', {
|
||||
tableName: table.name,
|
||||
})
|
||||
: t('new_table_schema_dialog.description')}
|
||||
{table
|
||||
? t('update_table_schema_dialog.description', {
|
||||
tableName: table.name,
|
||||
})
|
||||
: t('new_table_schema_dialog.description')}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="grid gap-4 py-1">
|
||||
<div className="grid w-full items-center gap-4">
|
||||
{!isCreatingNew ? (
|
||||
<SelectBox
|
||||
options={schemaOptions}
|
||||
multiple={false}
|
||||
value={selectedSchemaId}
|
||||
onChange={(value) =>
|
||||
setSelectedSchemaId(value as string)
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<div className="flex flex-col gap-2">
|
||||
{allowSchemaCreation &&
|
||||
!allowSchemaSelection ? (
|
||||
<Label htmlFor="new-schema-name">
|
||||
Schema Name
|
||||
</Label>
|
||||
) : null}
|
||||
<Input
|
||||
id="new-schema-name"
|
||||
value={newSchemaName}
|
||||
onChange={(e) =>
|
||||
setNewSchemaName(e.target.value)
|
||||
}
|
||||
placeholder={`Enter schema name.${defaultSchemaName ? ` e.g. ${defaultSchemaName}.` : ''}`}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{allowSchemaCreation && allowSchemaSelection ? (
|
||||
<>
|
||||
<div className="relative">
|
||||
<Separator className="my-2" />
|
||||
<span className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2 bg-background px-2 text-xs text-muted-foreground">
|
||||
or
|
||||
</span>
|
||||
</div>
|
||||
{allowSchemaSelection ? (
|
||||
renderSwitchCreateOrSelectButton()
|
||||
) : (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
{renderSwitchCreateOrSelectButton()}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>No existing schemas available</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
) : null}
|
||||
<SelectBox
|
||||
options={schemaOptions}
|
||||
multiple={false}
|
||||
value={selectedSchemaId}
|
||||
onChange={(value) =>
|
||||
setSelectedSchemaId(value as string)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter className="flex gap-1 md:justify-between">
|
||||
<DialogClose asChild>
|
||||
<Button variant="secondary">
|
||||
{isCreatingNew
|
||||
? t('create_table_schema_dialog.cancel')
|
||||
: table
|
||||
? t('update_table_schema_dialog.cancel')
|
||||
: t('new_table_schema_dialog.cancel')}
|
||||
{table
|
||||
? t('update_table_schema_dialog.cancel')
|
||||
: t('new_table_schema_dialog.cancel')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
<DialogClose asChild>
|
||||
<Button
|
||||
onClick={handleConfirm}
|
||||
disabled={isCreatingNew && !newSchemaName.trim()}
|
||||
>
|
||||
{isCreatingNew
|
||||
? t('create_table_schema_dialog.create')
|
||||
: table
|
||||
? t('update_table_schema_dialog.confirm')
|
||||
: t('new_table_schema_dialog.confirm')}
|
||||
<Button onClick={handleConfirm}>
|
||||
{table
|
||||
? t('update_table_schema_dialog.confirm')
|
||||
: t('new_table_schema_dialog.confirm')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
</DialogFooter>
|
||||
|
||||
@@ -83,7 +83,6 @@
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
overscroll-behavior-x: none;
|
||||
}
|
||||
|
||||
.text-editable {
|
||||
|
||||
@@ -23,25 +23,23 @@ import { bn, bnMetadata } from './locales/bn';
|
||||
import { gu, guMetadata } from './locales/gu';
|
||||
import { vi, viMetadata } from './locales/vi';
|
||||
import { ar, arMetadata } from './locales/ar';
|
||||
import { hr, hrMetadata } from './locales/hr';
|
||||
|
||||
export const languages: LanguageMetadata[] = [
|
||||
enMetadata,
|
||||
esMetadata,
|
||||
frMetadata,
|
||||
deMetadata,
|
||||
esMetadata,
|
||||
ukMetadata,
|
||||
ruMetadata,
|
||||
trMetadata,
|
||||
hrMetadata,
|
||||
pt_BRMetadata,
|
||||
hiMetadata,
|
||||
jaMetadata,
|
||||
ko_KRMetadata,
|
||||
pt_BRMetadata,
|
||||
ukMetadata,
|
||||
ruMetadata,
|
||||
zh_CNMetadata,
|
||||
zh_TWMetadata,
|
||||
neMetadata,
|
||||
mrMetadata,
|
||||
trMetadata,
|
||||
id_IDMetadata,
|
||||
teMetadata,
|
||||
bnMetadata,
|
||||
@@ -72,7 +70,6 @@ const resources = {
|
||||
gu,
|
||||
vi,
|
||||
ar,
|
||||
hr,
|
||||
};
|
||||
|
||||
i18n.use(LanguageDetector)
|
||||
|
||||
@@ -26,8 +26,6 @@ export const ar: LanguageTranslation = {
|
||||
hide_sidebar: 'إخفاء الشريط الجانبي',
|
||||
hide_cardinality: 'إخفاء الكاردينالية',
|
||||
show_cardinality: 'إظهار الكاردينالية',
|
||||
hide_field_attributes: 'إخفاء خصائص الحقل',
|
||||
show_field_attributes: 'إظهار خصائص الحقل',
|
||||
zoom_on_scroll: 'تكبير/تصغير عند التمرير',
|
||||
theme: 'المظهر',
|
||||
show_dependencies: 'إظهار الاعتمادات',
|
||||
@@ -153,8 +151,6 @@ export const ar: LanguageTranslation = {
|
||||
delete_field: 'حذف الحقل',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'الدقة',
|
||||
scale: 'النطاق',
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
},
|
||||
@@ -257,12 +253,9 @@ export const ar: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -280,9 +273,6 @@ export const ar: LanguageTranslation = {
|
||||
highlight_overlapping_tables: 'تمييز الجداول المتداخلة',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
@@ -414,13 +404,6 @@ export const ar: LanguageTranslation = {
|
||||
cancel: 'إلغاء',
|
||||
confirm: 'تغيير',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'إنشاء مخطط جديد',
|
||||
description:
|
||||
'لا توجد مخططات حتى الآن. قم بإنشاء أول مخطط لتنظيم جداولك.',
|
||||
create: 'إنشاء',
|
||||
cancel: 'إلغاء',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '!ساعدنا على التحسن',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const bn: LanguageTranslation = {
|
||||
hide_sidebar: 'সাইডবার লুকান',
|
||||
hide_cardinality: 'কার্ডিনালিটি লুকান',
|
||||
show_cardinality: 'কার্ডিনালিটি দেখান',
|
||||
hide_field_attributes: 'ফিল্ড অ্যাট্রিবিউট লুকান',
|
||||
show_field_attributes: 'ফিল্ড অ্যাট্রিবিউট দেখান',
|
||||
zoom_on_scroll: 'স্ক্রলে জুম করুন',
|
||||
theme: 'থিম',
|
||||
show_dependencies: 'নির্ভরতাগুলি দেখান',
|
||||
@@ -157,8 +155,6 @@ export const bn: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'নির্ভুলতা',
|
||||
scale: 'স্কেল',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ইনডেক্স কর্ম',
|
||||
@@ -258,12 +254,9 @@ export const bn: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -279,11 +272,7 @@ export const bn: LanguageTranslation = {
|
||||
redo: 'পুনরায় করুন',
|
||||
reorder_diagram: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
highlight_overlapping_tables: 'ওভারল্যাপিং টেবিল হাইলাইট করুন',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -416,13 +405,6 @@ export const bn: LanguageTranslation = {
|
||||
cancel: 'বাতিল করুন',
|
||||
confirm: 'পরিবর্তন করুন',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'নতুন স্কিমা তৈরি করুন',
|
||||
description:
|
||||
'এখনও কোনো স্কিমা নেই। আপনার টেবিলগুলি সংগঠিত করতে আপনার প্রথম স্কিমা তৈরি করুন।',
|
||||
create: 'তৈরি করুন',
|
||||
cancel: 'বাতিল করুন',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'আমাদের উন্নত করতে সাহায্য করুন!',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const de: LanguageTranslation = {
|
||||
hide_sidebar: 'Seitenleiste ausblenden',
|
||||
hide_cardinality: 'Kardinalität ausblenden',
|
||||
show_cardinality: 'Kardinalität anzeigen',
|
||||
hide_field_attributes: 'Feldattribute ausblenden',
|
||||
show_field_attributes: 'Feldattribute anzeigen',
|
||||
zoom_on_scroll: 'Zoom beim Scrollen',
|
||||
theme: 'Stil',
|
||||
show_dependencies: 'Abhängigkeiten anzeigen',
|
||||
@@ -158,8 +156,6 @@ export const de: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Präzision',
|
||||
scale: 'Skalierung',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Indexattribute',
|
||||
@@ -260,12 +256,9 @@ export const de: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -280,11 +273,6 @@ export const de: LanguageTranslation = {
|
||||
undo: 'Rückgängig',
|
||||
redo: 'Wiederholen',
|
||||
reorder_diagram: 'Diagramm neu anordnen',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Überlappende Tabellen hervorheben',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -420,13 +408,6 @@ export const de: LanguageTranslation = {
|
||||
cancel: 'Abbrechen',
|
||||
confirm: 'Ändern',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Neues Schema erstellen',
|
||||
description:
|
||||
'Es existieren noch keine Schemas. Erstellen Sie Ihr erstes Schema, um Ihre Tabellen zu organisieren.',
|
||||
create: 'Erstellen',
|
||||
cancel: 'Abbrechen',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Hilf uns, uns zu verbessern!',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const en = {
|
||||
hide_sidebar: 'Hide Sidebar',
|
||||
hide_cardinality: 'Hide Cardinality',
|
||||
show_cardinality: 'Show Cardinality',
|
||||
hide_field_attributes: 'Hide Field Attributes',
|
||||
show_field_attributes: 'Show Field Attributes',
|
||||
zoom_on_scroll: 'Zoom on Scroll',
|
||||
theme: 'Theme',
|
||||
show_dependencies: 'Show Dependencies',
|
||||
@@ -145,8 +143,6 @@ export const en = {
|
||||
title: 'Field Attributes',
|
||||
unique: 'Unique',
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
comments: 'Comments',
|
||||
no_comments: 'No comments',
|
||||
default_value: 'Default Value',
|
||||
@@ -250,11 +246,8 @@ export const en = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
delete_custom_type: 'Delete',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
@@ -271,9 +264,6 @@ export const en = {
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Reorder Diagram',
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -406,14 +396,6 @@ export const en = {
|
||||
confirm: 'Change',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Create New Schema',
|
||||
description:
|
||||
'No schemas exist yet. Create your first schema to organize your tables.',
|
||||
create: 'Create',
|
||||
cancel: 'Cancel',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Help us improve!',
|
||||
description:
|
||||
|
||||
@@ -24,8 +24,6 @@ export const es: LanguageTranslation = {
|
||||
view: 'Ver',
|
||||
hide_cardinality: 'Ocultar Cardinalidad',
|
||||
show_cardinality: 'Mostrar Cardinalidad',
|
||||
show_field_attributes: 'Mostrar Atributos de Campo',
|
||||
hide_field_attributes: 'Ocultar Atributos de Campo',
|
||||
show_sidebar: 'Mostrar Barra Lateral',
|
||||
hide_sidebar: 'Ocultar Barra Lateral',
|
||||
zoom_on_scroll: 'Zoom al Desplazarse',
|
||||
@@ -147,8 +145,6 @@ export const es: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precisión',
|
||||
scale: 'Escala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos del Índice',
|
||||
@@ -248,12 +244,9 @@ export const es: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -268,10 +261,6 @@ export const es: LanguageTranslation = {
|
||||
undo: 'Deshacer',
|
||||
redo: 'Rehacer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Resaltar tablas superpuestas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -408,13 +397,6 @@ export const es: LanguageTranslation = {
|
||||
cancel: 'Cancelar',
|
||||
confirm: 'Cambiar',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Crear Nuevo Esquema',
|
||||
description:
|
||||
'Aún no existen esquemas. Crea tu primer esquema para organizar tus tablas.',
|
||||
create: 'Crear',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '¡Ayúdanos a mejorar!',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const fr: LanguageTranslation = {
|
||||
hide_sidebar: 'Cacher la Barre Latérale',
|
||||
hide_cardinality: 'Cacher la Cardinalité',
|
||||
show_cardinality: 'Afficher la Cardinalité',
|
||||
hide_field_attributes: 'Masquer les Attributs de Champ',
|
||||
show_field_attributes: 'Afficher les Attributs de Champ',
|
||||
zoom_on_scroll: 'Zoom sur le Défilement',
|
||||
theme: 'Thème',
|
||||
show_dependencies: 'Afficher les Dépendances',
|
||||
@@ -145,8 +143,6 @@ export const fr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Précision',
|
||||
scale: 'Échelle',
|
||||
},
|
||||
index_actions: {
|
||||
title: "Attributs de l'Index",
|
||||
@@ -246,12 +242,9 @@ export const fr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -266,10 +259,6 @@ export const fr: LanguageTranslation = {
|
||||
undo: 'Annuler',
|
||||
redo: 'Rétablir',
|
||||
reorder_diagram: 'Réorganiser le Diagramme',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Surligner les tables chevauchées',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -388,13 +377,6 @@ export const fr: LanguageTranslation = {
|
||||
cancel: 'Annuler',
|
||||
confirm: 'Modifier',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Créer un Nouveau Schéma',
|
||||
description:
|
||||
"Aucun schéma n'existe encore. Créez votre premier schéma pour organiser vos tables.",
|
||||
create: 'Créer',
|
||||
cancel: 'Annuler',
|
||||
},
|
||||
|
||||
create_relationship_dialog: {
|
||||
title: 'Créer une Relation',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const gu: LanguageTranslation = {
|
||||
hide_sidebar: 'સાઇડબાર છુપાવો',
|
||||
hide_cardinality: 'કાર્ડિનાલિટી છુપાવો',
|
||||
show_cardinality: 'કાર્ડિનાલિટી બતાવો',
|
||||
hide_field_attributes: 'ફીલ્ડ અટ્રિબ્યુટ્સ છુપાવો',
|
||||
show_field_attributes: 'ફીલ્ડ અટ્રિબ્યુટ્સ બતાવો',
|
||||
zoom_on_scroll: 'સ્ક્રોલ પર ઝૂમ કરો',
|
||||
theme: 'થિમ',
|
||||
show_dependencies: 'નિર્ભરતાઓ બતાવો',
|
||||
@@ -158,8 +156,6 @@ export const gu: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'ચોકસાઈ',
|
||||
scale: 'માપ',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ઇન્ડેક્સ લક્ષણો',
|
||||
@@ -259,12 +255,9 @@ export const gu: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -279,10 +272,6 @@ export const gu: LanguageTranslation = {
|
||||
undo: 'અનડુ',
|
||||
redo: 'રીડુ',
|
||||
reorder_diagram: 'ડાયાગ્રામ ફરીથી વ્યવસ્થિત કરો',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ઓવરલેપ કરતો ટેબલ હાઇલાઇટ કરો',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -417,14 +406,6 @@ export const gu: LanguageTranslation = {
|
||||
confirm: 'બદલો',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'નવું સ્કીમા બનાવો',
|
||||
description:
|
||||
'હજી સુધી કોઈ સ્કીમા અસ્તિત્વમાં નથી. તમારા ટેબલ્સ ને વ્યવસ્થિત કરવા માટે તમારું પહેલું સ્કીમા બનાવો.',
|
||||
create: 'બનાવો',
|
||||
cancel: 'રદ કરો',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'અમને સુધારવામાં મદદ કરો!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const hi: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार छिपाएँ',
|
||||
hide_cardinality: 'कार्डिनैलिटी छिपाएँ',
|
||||
show_cardinality: 'कार्डिनैलिटी दिखाएँ',
|
||||
hide_field_attributes: 'फ़ील्ड विशेषताएँ छिपाएँ',
|
||||
show_field_attributes: 'फ़ील्ड विशेषताएँ दिखाएँ',
|
||||
zoom_on_scroll: 'स्क्रॉल पर ज़ूम',
|
||||
theme: 'थीम',
|
||||
show_dependencies: 'निर्भरता दिखाएँ',
|
||||
@@ -157,8 +155,6 @@ export const hi: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचकांक विशेषताएँ',
|
||||
@@ -259,12 +255,9 @@ export const hi: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -279,10 +272,6 @@ export const hi: LanguageTranslation = {
|
||||
undo: 'पूर्ववत करें',
|
||||
redo: 'पुनः करें',
|
||||
reorder_diagram: 'आरेख पुनः व्यवस्थित करें',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ओवरलैपिंग तालिकाओं को हाइलाइट करें',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -420,14 +409,6 @@ export const hi: LanguageTranslation = {
|
||||
confirm: 'बदलें',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नया स्कीमा बनाएं',
|
||||
description:
|
||||
'अभी तक कोई स्कीमा मौजूद नहीं है। अपनी तालिकाओं को व्यवस्थित करने के लिए अपना पहला स्कीमा बनाएं।',
|
||||
create: 'बनाएं',
|
||||
cancel: 'रद्द करें',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'हमें सुधारने में मदद करें!',
|
||||
description:
|
||||
|
||||
@@ -1,503 +0,0 @@
|
||||
import type { LanguageMetadata, LanguageTranslation } from '../types';
|
||||
|
||||
export const hr: LanguageTranslation = {
|
||||
translation: {
|
||||
menu: {
|
||||
file: {
|
||||
file: 'Datoteka',
|
||||
new: 'Nova',
|
||||
open: 'Otvori',
|
||||
save: 'Spremi',
|
||||
import: 'Uvezi',
|
||||
export_sql: 'Izvezi SQL',
|
||||
export_as: 'Izvezi kao',
|
||||
delete_diagram: 'Izbriši dijagram',
|
||||
exit: 'Izađi',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Uredi',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
clear: 'Očisti',
|
||||
},
|
||||
view: {
|
||||
view: 'Prikaz',
|
||||
show_sidebar: 'Prikaži bočnu traku',
|
||||
hide_sidebar: 'Sakrij bočnu traku',
|
||||
hide_cardinality: 'Sakrij kardinalnost',
|
||||
show_cardinality: 'Prikaži kardinalnost',
|
||||
hide_field_attributes: 'Sakrij atribute polja',
|
||||
show_field_attributes: 'Prikaži atribute polja',
|
||||
zoom_on_scroll: 'Zumiranje pri skrolanju',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Prikaži ovisnosti',
|
||||
hide_dependencies: 'Sakrij ovisnosti',
|
||||
show_minimap: 'Prikaži mini kartu',
|
||||
hide_minimap: 'Sakrij mini kartu',
|
||||
},
|
||||
backup: {
|
||||
backup: 'Sigurnosna kopija',
|
||||
export_diagram: 'Izvezi dijagram',
|
||||
restore_diagram: 'Vrati dijagram',
|
||||
},
|
||||
help: {
|
||||
help: 'Pomoć',
|
||||
docs_website: 'Dokumentacija',
|
||||
join_discord: 'Pridružite nam se na Discordu',
|
||||
},
|
||||
},
|
||||
|
||||
delete_diagram_alert: {
|
||||
title: 'Izbriši dijagram',
|
||||
description:
|
||||
'Ova radnja se ne može poništiti. Ovo će trajno izbrisati dijagram.',
|
||||
cancel: 'Odustani',
|
||||
delete: 'Izbriši',
|
||||
},
|
||||
|
||||
clear_diagram_alert: {
|
||||
title: 'Očisti dijagram',
|
||||
description:
|
||||
'Ova radnja se ne može poništiti. Ovo će trajno izbrisati sve podatke u dijagramu.',
|
||||
cancel: 'Odustani',
|
||||
clear: 'Očisti',
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Preuredi dijagram',
|
||||
description:
|
||||
'Ova radnja će preurediti sve tablice u dijagramu. Želite li nastaviti?',
|
||||
reorder: 'Preuredi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
multiple_schemas_alert: {
|
||||
title: 'Više shema',
|
||||
description:
|
||||
'{{schemasCount}} shema u ovom dijagramu. Trenutno prikazano: {{formattedSchemas}}.',
|
||||
show_me: 'Prikaži mi',
|
||||
none: 'nijedna',
|
||||
},
|
||||
|
||||
copy_to_clipboard_toast: {
|
||||
unsupported: {
|
||||
title: 'Kopiranje neuspješno',
|
||||
description: 'Međuspremnik nije podržan.',
|
||||
},
|
||||
failed: {
|
||||
title: 'Kopiranje neuspješno',
|
||||
description: 'Nešto je pošlo po zlu. Molimo pokušajte ponovno.',
|
||||
},
|
||||
},
|
||||
|
||||
theme: {
|
||||
system: 'Sustav',
|
||||
light: 'Svijetla',
|
||||
dark: 'Tamna',
|
||||
},
|
||||
|
||||
zoom: {
|
||||
on: 'Uključeno',
|
||||
off: 'Isključeno',
|
||||
},
|
||||
|
||||
last_saved: 'Zadnje spremljeno',
|
||||
saved: 'Spremljeno',
|
||||
loading_diagram: 'Učitavanje dijagrama...',
|
||||
deselect_all: 'Odznači sve',
|
||||
select_all: 'Označi sve',
|
||||
clear: 'Očisti',
|
||||
show_more: 'Prikaži više',
|
||||
show_less: 'Prikaži manje',
|
||||
copy_to_clipboard: 'Kopiraj u međuspremnik',
|
||||
copied: 'Kopirano!',
|
||||
|
||||
side_panel: {
|
||||
schema: 'Shema:',
|
||||
filter_by_schema: 'Filtriraj po shemi',
|
||||
search_schema: 'Pretraži shemu...',
|
||||
no_schemas_found: 'Nema pronađenih shema.',
|
||||
view_all_options: 'Prikaži sve opcije...',
|
||||
tables_section: {
|
||||
tables: 'Tablice',
|
||||
add_table: 'Dodaj tablicu',
|
||||
filter: 'Filtriraj',
|
||||
collapse: 'Sažmi sve',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih tablica koje odgovaraju vašem filteru.',
|
||||
show_list: 'Prikaži popis tablica',
|
||||
show_dbml: 'Prikaži DBML uređivač',
|
||||
|
||||
table: {
|
||||
fields: 'Polja',
|
||||
nullable: 'Može biti null?',
|
||||
primary_key: 'Primarni ključ',
|
||||
indexes: 'Indeksi',
|
||||
comments: 'Komentari',
|
||||
no_comments: 'Nema komentara',
|
||||
add_field: 'Dodaj polje',
|
||||
add_index: 'Dodaj indeks',
|
||||
index_select_fields: 'Odaberi polja',
|
||||
no_types_found: 'Nema pronađenih tipova',
|
||||
field_name: 'Naziv',
|
||||
field_type: 'Tip',
|
||||
field_actions: {
|
||||
title: 'Atributi polja',
|
||||
unique: 'Jedinstven',
|
||||
character_length: 'Maksimalna dužina',
|
||||
precision: 'Preciznost',
|
||||
scale: 'Skala',
|
||||
comments: 'Komentari',
|
||||
no_comments: 'Nema komentara',
|
||||
default_value: 'Zadana vrijednost',
|
||||
no_default: 'Nema zadane vrijednosti',
|
||||
delete_field: 'Izbriši polje',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributi indeksa',
|
||||
name: 'Naziv',
|
||||
unique: 'Jedinstven',
|
||||
delete_index: 'Izbriši indeks',
|
||||
},
|
||||
table_actions: {
|
||||
title: 'Radnje nad tablicom',
|
||||
change_schema: 'Promijeni shemu',
|
||||
add_field: 'Dodaj polje',
|
||||
add_index: 'Dodaj indeks',
|
||||
duplicate_table: 'Dupliciraj tablicu',
|
||||
delete_table: 'Izbriši tablicu',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema tablica',
|
||||
description: 'Stvorite tablicu za početak',
|
||||
},
|
||||
},
|
||||
relationships_section: {
|
||||
relationships: 'Veze',
|
||||
filter: 'Filtriraj',
|
||||
add_relationship: 'Dodaj vezu',
|
||||
collapse: 'Sažmi sve',
|
||||
relationship: {
|
||||
primary: 'Primarna tablica',
|
||||
foreign: 'Referentna tablica',
|
||||
cardinality: 'Kardinalnost',
|
||||
delete_relationship: 'Izbriši',
|
||||
relationship_actions: {
|
||||
title: 'Radnje',
|
||||
delete_relationship: 'Izbriši',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema veza',
|
||||
description: 'Stvorite vezu za povezivanje tablica',
|
||||
},
|
||||
},
|
||||
dependencies_section: {
|
||||
dependencies: 'Ovisnosti',
|
||||
filter: 'Filtriraj',
|
||||
collapse: 'Sažmi sve',
|
||||
dependency: {
|
||||
table: 'Tablica',
|
||||
dependent_table: 'Ovisni pogled',
|
||||
delete_dependency: 'Izbriši',
|
||||
dependency_actions: {
|
||||
title: 'Radnje',
|
||||
delete_dependency: 'Izbriši',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema ovisnosti',
|
||||
description: 'Stvorite pogled za početak',
|
||||
},
|
||||
},
|
||||
|
||||
areas_section: {
|
||||
areas: 'Područja',
|
||||
add_area: 'Dodaj područje',
|
||||
filter: 'Filtriraj',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih područja koja odgovaraju vašem filteru.',
|
||||
|
||||
area: {
|
||||
area_actions: {
|
||||
title: 'Radnje nad područjem',
|
||||
edit_name: 'Uredi naziv',
|
||||
delete_area: 'Izbriši područje',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema područja',
|
||||
description: 'Stvorite područje za početak',
|
||||
},
|
||||
},
|
||||
|
||||
custom_types_section: {
|
||||
custom_types: 'Prilagođeni tipovi',
|
||||
filter: 'Filtriraj',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih prilagođenih tipova koji odgovaraju vašem filteru.',
|
||||
empty_state: {
|
||||
title: 'Nema prilagođenih tipova',
|
||||
description:
|
||||
'Prilagođeni tipovi će se pojaviti ovdje kada budu dostupni u vašoj bazi podataka',
|
||||
},
|
||||
custom_type: {
|
||||
kind: 'Vrsta',
|
||||
enum_values: 'Enum vrijednosti',
|
||||
composite_fields: 'Polja',
|
||||
no_fields: 'Nema definiranih polja',
|
||||
field_name_placeholder: 'Naziv polja',
|
||||
field_type_placeholder: 'Odaberi tip',
|
||||
add_field: 'Dodaj polje',
|
||||
no_fields_tooltip:
|
||||
'Nema definiranih polja za ovaj prilagođeni tip',
|
||||
custom_type_actions: {
|
||||
title: 'Radnje',
|
||||
highlight_fields: 'Istakni polja',
|
||||
clear_field_highlight: 'Ukloni isticanje',
|
||||
delete_custom_type: 'Izbriši',
|
||||
},
|
||||
delete_custom_type: 'Izbriši tip',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
toolbar: {
|
||||
zoom_in: 'Uvećaj',
|
||||
zoom_out: 'Smanji',
|
||||
save: 'Spremi',
|
||||
show_all: 'Prikaži sve',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
reorder_diagram: 'Preuredi dijagram',
|
||||
highlight_overlapping_tables: 'Istakni preklapajuće tablice',
|
||||
clear_custom_type_highlight: 'Ukloni isticanje za "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Isticanje "{{typeName}}" - Kliknite za uklanjanje',
|
||||
filter: 'Filtriraj tablice',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
database_selection: {
|
||||
title: 'Koja je vaša baza podataka?',
|
||||
description:
|
||||
'Svaka baza podataka ima svoje jedinstvene značajke i mogućnosti.',
|
||||
check_examples_long: 'Pogledaj primjere',
|
||||
check_examples_short: 'Primjeri',
|
||||
},
|
||||
|
||||
import_database: {
|
||||
title: 'Uvezite svoju bazu podataka',
|
||||
database_edition: 'Verzija baze podataka:',
|
||||
step_1: 'Pokrenite ovu skriptu u svojoj bazi podataka:',
|
||||
step_2: 'Zalijepite rezultat skripte u ovaj dio →',
|
||||
script_results_placeholder: 'Rezultati skripte ovdje...',
|
||||
ssms_instructions: {
|
||||
button_text: 'SSMS upute',
|
||||
title: 'Upute',
|
||||
step_1: 'Idite na Tools > Options > Query Results > SQL Server.',
|
||||
step_2: 'Ako koristite "Results to Grid," promijenite Maximum Characters Retrieved za Non-XML podatke (postavite na 9999999).',
|
||||
},
|
||||
instructions_link: 'Trebate pomoć? Pogledajte kako',
|
||||
check_script_result: 'Provjeri rezultat skripte',
|
||||
},
|
||||
|
||||
cancel: 'Odustani',
|
||||
import_from_file: 'Uvezi iz datoteke',
|
||||
back: 'Natrag',
|
||||
empty_diagram: 'Prazan dijagram',
|
||||
continue: 'Nastavi',
|
||||
import: 'Uvezi',
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Otvori dijagram',
|
||||
description: 'Odaberite dijagram za otvaranje iz popisa ispod.',
|
||||
table_columns: {
|
||||
name: 'Naziv',
|
||||
created_at: 'Stvoreno',
|
||||
last_modified: 'Zadnje izmijenjeno',
|
||||
tables_count: 'Tablice',
|
||||
},
|
||||
cancel: 'Odustani',
|
||||
open: 'Otvori',
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
title: 'Izvezi SQL',
|
||||
description:
|
||||
'Izvezite shemu vašeg dijagrama u {{databaseType}} skriptu',
|
||||
close: 'Zatvori',
|
||||
loading: {
|
||||
text: 'AI generira SQL za {{databaseType}}...',
|
||||
description: 'Ovo bi trebalo potrajati do 30 sekundi.',
|
||||
},
|
||||
error: {
|
||||
message:
|
||||
'Greška pri generiranju SQL skripte. Molimo pokušajte ponovno kasnije ili <0>kontaktirajte nas</0>.',
|
||||
description:
|
||||
'Slobodno koristite svoj OPENAI_TOKEN, pogledajte priručnik <0>ovdje</0>.',
|
||||
},
|
||||
},
|
||||
|
||||
create_relationship_dialog: {
|
||||
title: 'Kreiraj vezu',
|
||||
primary_table: 'Primarna tablica',
|
||||
primary_field: 'Primarno polje',
|
||||
referenced_table: 'Referentna tablica',
|
||||
referenced_field: 'Referentno polje',
|
||||
primary_table_placeholder: 'Odaberi tablicu',
|
||||
primary_field_placeholder: 'Odaberi polje',
|
||||
referenced_table_placeholder: 'Odaberi tablicu',
|
||||
referenced_field_placeholder: 'Odaberi polje',
|
||||
no_tables_found: 'Nema pronađenih tablica',
|
||||
no_fields_found: 'Nema pronađenih polja',
|
||||
create: 'Kreiraj',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
import_database_dialog: {
|
||||
title: 'Uvezi u trenutni dijagram',
|
||||
override_alert: {
|
||||
title: 'Uvezi bazu podataka',
|
||||
content: {
|
||||
alert: 'Uvoz ovog dijagrama će utjecati na postojeće tablice i veze.',
|
||||
new_tables:
|
||||
'<bold>{{newTablesNumber}}</bold> novih tablica će biti dodano.',
|
||||
new_relationships:
|
||||
'<bold>{{newRelationshipsNumber}}</bold> novih veza će biti stvoreno.',
|
||||
tables_override:
|
||||
'<bold>{{tablesOverrideNumber}}</bold> tablica će biti prepisano.',
|
||||
proceed: 'Želite li nastaviti?',
|
||||
},
|
||||
import: 'Uvezi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
},
|
||||
|
||||
export_image_dialog: {
|
||||
title: 'Izvezi sliku',
|
||||
description: 'Odaberite faktor veličine za izvoz:',
|
||||
scale_1x: '1x Obično',
|
||||
scale_2x: '2x (Preporučeno)',
|
||||
scale_3x: '3x',
|
||||
scale_4x: '4x',
|
||||
cancel: 'Odustani',
|
||||
export: 'Izvezi',
|
||||
advanced_options: 'Napredne opcije',
|
||||
pattern: 'Uključi pozadinski uzorak',
|
||||
pattern_description: 'Dodaj suptilni mrežni uzorak u pozadinu.',
|
||||
transparent: 'Prozirna pozadina',
|
||||
transparent_description: 'Ukloni boju pozadine iz slike.',
|
||||
},
|
||||
|
||||
new_table_schema_dialog: {
|
||||
title: 'Odaberi shemu',
|
||||
description:
|
||||
'Trenutno je prikazano više shema. Odaberite jednu za novu tablicu.',
|
||||
cancel: 'Odustani',
|
||||
confirm: 'Potvrdi',
|
||||
},
|
||||
|
||||
update_table_schema_dialog: {
|
||||
title: 'Promijeni shemu',
|
||||
description: 'Ažuriraj shemu tablice "{{tableName}}"',
|
||||
cancel: 'Odustani',
|
||||
confirm: 'Promijeni',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Stvori novu shemu',
|
||||
description:
|
||||
'Još ne postoje sheme. Stvorite svoju prvu shemu za organiziranje tablica.',
|
||||
create: 'Stvori',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Pomozite nam da se poboljšamo!',
|
||||
description:
|
||||
'Želite li nam dati zvjezdicu na GitHubu? Samo je jedan klik!',
|
||||
close: 'Ne sada',
|
||||
confirm: 'Naravno!',
|
||||
},
|
||||
export_diagram_dialog: {
|
||||
title: 'Izvezi dijagram',
|
||||
description: 'Odaberite format za izvoz:',
|
||||
format_json: 'JSON',
|
||||
cancel: 'Odustani',
|
||||
export: 'Izvezi',
|
||||
error: {
|
||||
title: 'Greška pri izvozu dijagrama',
|
||||
description:
|
||||
'Nešto je pošlo po zlu. Trebate pomoć? support@chartdb.io',
|
||||
},
|
||||
},
|
||||
|
||||
import_diagram_dialog: {
|
||||
title: 'Uvezi dijagram',
|
||||
description: 'Uvezite dijagram iz JSON datoteke.',
|
||||
cancel: 'Odustani',
|
||||
import: 'Uvezi',
|
||||
error: {
|
||||
title: 'Greška pri uvozu dijagrama',
|
||||
description:
|
||||
'JSON dijagrama je nevažeći. Molimo provjerite JSON i pokušajte ponovno. Trebate pomoć? support@chartdb.io',
|
||||
},
|
||||
},
|
||||
|
||||
import_dbml_dialog: {
|
||||
example_title: 'Uvezi primjer DBML-a',
|
||||
title: 'Uvezi DBML',
|
||||
description: 'Uvezite shemu baze podataka iz DBML formata.',
|
||||
import: 'Uvezi',
|
||||
cancel: 'Odustani',
|
||||
skip_and_empty: 'Preskoči i isprazni',
|
||||
show_example: 'Prikaži primjer',
|
||||
error: {
|
||||
title: 'Greška pri uvozu DBML-a',
|
||||
description:
|
||||
'Neuspješno parsiranje DBML-a. Molimo provjerite sintaksu.',
|
||||
},
|
||||
},
|
||||
relationship_type: {
|
||||
one_to_one: 'Jedan na jedan',
|
||||
one_to_many: 'Jedan na više',
|
||||
many_to_one: 'Više na jedan',
|
||||
many_to_many: 'Više na više',
|
||||
},
|
||||
|
||||
canvas_context_menu: {
|
||||
new_table: 'Nova tablica',
|
||||
new_relationship: 'Nova veza',
|
||||
new_area: 'Novo područje',
|
||||
},
|
||||
|
||||
table_node_context_menu: {
|
||||
edit_table: 'Uredi tablicu',
|
||||
duplicate_table: 'Dupliciraj tablicu',
|
||||
delete_table: 'Izbriši tablicu',
|
||||
add_relationship: 'Dodaj vezu',
|
||||
},
|
||||
|
||||
snap_to_grid_tooltip: 'Priljepljivanje na mrežu (Drži {{key}})',
|
||||
|
||||
tool_tips: {
|
||||
double_click_to_edit: 'Dvostruki klik za uređivanje',
|
||||
},
|
||||
|
||||
language_select: {
|
||||
change_language: 'Jezik',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const hrMetadata: LanguageMetadata = {
|
||||
name: 'Croatian',
|
||||
nativeName: 'Hrvatski',
|
||||
code: 'hr',
|
||||
};
|
||||
@@ -26,8 +26,6 @@ export const id_ID: LanguageTranslation = {
|
||||
hide_sidebar: 'Sembunyikan Sidebar',
|
||||
hide_cardinality: 'Sembunyikan Kardinalitas',
|
||||
show_cardinality: 'Tampilkan Kardinalitas',
|
||||
hide_field_attributes: 'Sembunyikan Atribut Kolom',
|
||||
show_field_attributes: 'Tampilkan Atribut Kolom',
|
||||
zoom_on_scroll: 'Perbesar saat Scroll',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Tampilkan Dependensi',
|
||||
@@ -156,8 +154,6 @@ export const id_ID: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Presisi',
|
||||
scale: 'Skala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atribut Indeks',
|
||||
@@ -257,12 +253,9 @@ export const id_ID: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -277,10 +270,6 @@ export const id_ID: LanguageTranslation = {
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Atur Ulang Diagram',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Sorot Tabel yang Tumpang Tindih',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -415,14 +404,6 @@ export const id_ID: LanguageTranslation = {
|
||||
confirm: 'Ubah',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Buat Skema Baru',
|
||||
description:
|
||||
'Belum ada skema yang tersedia. Buat skema pertama Anda untuk mengatur tabel-tabel Anda.',
|
||||
create: 'Buat',
|
||||
cancel: 'Batal',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Bantu kami meningkatkan!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const ja: LanguageTranslation = {
|
||||
hide_sidebar: 'サイドバーを非表示',
|
||||
hide_cardinality: 'カーディナリティを非表示',
|
||||
show_cardinality: 'カーディナリティを表示',
|
||||
hide_field_attributes: 'フィールド属性を非表示',
|
||||
show_field_attributes: 'フィールド属性を表示',
|
||||
zoom_on_scroll: 'スクロールでズーム',
|
||||
theme: 'テーマ',
|
||||
// TODO: Translate
|
||||
@@ -160,8 +158,6 @@ export const ja: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小数点以下桁数',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'インデックス属性',
|
||||
@@ -263,12 +259,9 @@ export const ja: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -285,9 +278,7 @@ export const ja: LanguageTranslation = {
|
||||
reorder_diagram: 'ダイアグラムを並べ替え',
|
||||
// TODO: Translate
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear', // TODO: Translate
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -422,14 +413,6 @@ export const ja: LanguageTranslation = {
|
||||
confirm: '変更',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '新しいスキーマを作成',
|
||||
description:
|
||||
'スキーマがまだ存在しません。テーブルを整理するために最初のスキーマを作成してください。',
|
||||
create: '作成',
|
||||
cancel: 'キャンセル',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '改善をサポートしてください!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const ko_KR: LanguageTranslation = {
|
||||
hide_sidebar: '사이드바 숨기기',
|
||||
hide_cardinality: '카디널리티 숨기기',
|
||||
show_cardinality: '카디널리티 보이기',
|
||||
hide_field_attributes: '필드 속성 숨기기',
|
||||
show_field_attributes: '필드 속성 보이기',
|
||||
zoom_on_scroll: '스크롤 시 확대',
|
||||
theme: '테마',
|
||||
show_dependencies: '종속성 보이기',
|
||||
@@ -156,8 +154,6 @@ export const ko_KR: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '정밀도',
|
||||
scale: '소수점 자릿수',
|
||||
},
|
||||
index_actions: {
|
||||
title: '인덱스 속성',
|
||||
@@ -257,12 +253,9 @@ export const ko_KR: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -277,10 +270,6 @@ export const ko_KR: LanguageTranslation = {
|
||||
undo: '실행 취소',
|
||||
redo: '다시 실행',
|
||||
reorder_diagram: '다이어그램 재정렬',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '겹치는 테이블 강조 표시',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -415,14 +404,6 @@ export const ko_KR: LanguageTranslation = {
|
||||
confirm: '변경',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '새 스키마 생성',
|
||||
description:
|
||||
'아직 스키마가 없습니다. 테이블을 정리하기 위해 첫 번째 스키마를 생성하세요.',
|
||||
create: '생성',
|
||||
cancel: '취소',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '개선할 수 있도록 도와주세요!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const mr: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार लपवा',
|
||||
hide_cardinality: 'कार्डिनॅलिटी लपवा',
|
||||
show_cardinality: 'कार्डिनॅलिटी दाखवा',
|
||||
hide_field_attributes: 'फील्ड गुणधर्म लपवा',
|
||||
show_field_attributes: 'फील्ड गुणधर्म दाखवा',
|
||||
zoom_on_scroll: 'स्क्रोलवर झूम करा',
|
||||
theme: 'थीम',
|
||||
show_dependencies: 'डिपेंडेन्सि दाखवा',
|
||||
@@ -159,8 +157,6 @@ export const mr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'अचूकता',
|
||||
scale: 'प्रमाण',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'इंडेक्स गुणधर्म',
|
||||
@@ -262,12 +258,9 @@ export const mr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -282,10 +275,6 @@ export const mr: LanguageTranslation = {
|
||||
undo: 'पूर्ववत करा',
|
||||
redo: 'पुन्हा करा',
|
||||
reorder_diagram: 'आरेख पुनःक्रमित करा',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ओव्हरलॅपिंग टेबल्स हायलाइट करा',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -423,14 +412,6 @@ export const mr: LanguageTranslation = {
|
||||
confirm: 'बदला',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नवीन स्कीमा तयार करा',
|
||||
description:
|
||||
'अजून कोणतीही स्कीमा अस्तित्वात नाही. आपल्या टेबल्स व्यवस्थित करण्यासाठी आपली पहिली स्कीमा तयार करा.',
|
||||
create: 'तयार करा',
|
||||
cancel: 'रद्द करा',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'आम्हाला सुधारण्यास मदत करा!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const ne: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार लुकाउनुहोस्',
|
||||
hide_cardinality: 'कार्डिन्यालिटी लुकाउनुहोस्',
|
||||
show_cardinality: 'कार्डिन्यालिटी देखाउनुहोस्',
|
||||
hide_field_attributes: 'फिल्ड विशेषताहरू लुकाउनुहोस्',
|
||||
show_field_attributes: 'फिल्ड विशेषताहरू देखाउनुहोस्',
|
||||
zoom_on_scroll: 'स्क्रोलमा जुम गर्नुहोस्',
|
||||
theme: 'थिम',
|
||||
show_dependencies: 'डिपेन्डेन्सीहरू देखाउनुहोस्',
|
||||
@@ -157,8 +155,6 @@ export const ne: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'परिशुद्धता',
|
||||
scale: 'स्केल',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचक विशेषताहरू',
|
||||
@@ -259,12 +255,9 @@ export const ne: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -279,10 +272,6 @@ export const ne: LanguageTranslation = {
|
||||
undo: 'पूर्ववत',
|
||||
redo: 'पुनः गर्नुहोस्',
|
||||
reorder_diagram: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables:
|
||||
'अतिरिक्त तालिकाहरू हाइलाइट गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
@@ -420,14 +409,6 @@ export const ne: LanguageTranslation = {
|
||||
confirm: 'परिवर्तन गर्नुहोस्',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नयाँ स्कीम सिर्जना गर्नुहोस्',
|
||||
description:
|
||||
'अहिलेसम्म कुनै स्कीम अस्तित्वमा छैन। आफ्ना तालिकाहरू व्यवस्थित गर्न आफ्नो पहिलो स्कीम सिर्जना गर्नुहोस्।',
|
||||
create: 'सिर्जना गर्नुहोस्',
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'हामीलाई अझ राम्रो हुन मदत गर्नुहोस!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const pt_BR: LanguageTranslation = {
|
||||
hide_sidebar: 'Ocultar Barra Lateral',
|
||||
hide_cardinality: 'Ocultar Cardinalidade',
|
||||
show_cardinality: 'Mostrar Cardinalidade',
|
||||
hide_field_attributes: 'Ocultar Atributos de Campo',
|
||||
show_field_attributes: 'Mostrar Atributos de Campo',
|
||||
zoom_on_scroll: 'Zoom ao Rolar',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Mostrar Dependências',
|
||||
@@ -157,8 +155,6 @@ export const pt_BR: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precisão',
|
||||
scale: 'Escala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos do Índice',
|
||||
@@ -258,12 +254,9 @@ export const pt_BR: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -278,10 +271,6 @@ export const pt_BR: LanguageTranslation = {
|
||||
undo: 'Desfazer',
|
||||
redo: 'Refazer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Destacar Tabelas Sobrepostas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -418,14 +407,6 @@ export const pt_BR: LanguageTranslation = {
|
||||
confirm: 'Alterar',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Criar Novo Esquema',
|
||||
description:
|
||||
'Ainda não existem esquemas. Crie seu primeiro esquema para organizar suas tabelas.',
|
||||
create: 'Criar',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Ajude-nos a melhorar!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const ru: LanguageTranslation = {
|
||||
hide_sidebar: 'Скрыть боковую панель',
|
||||
hide_cardinality: 'Скрыть виды связи',
|
||||
show_cardinality: 'Показать виды связи',
|
||||
show_field_attributes: 'Показать атрибуты поля',
|
||||
hide_field_attributes: 'Скрыть атрибуты поля',
|
||||
zoom_on_scroll: 'Увеличение при прокрутке',
|
||||
theme: 'Тема',
|
||||
show_dependencies: 'Показать зависимости',
|
||||
@@ -153,8 +151,6 @@ export const ru: LanguageTranslation = {
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
character_length: 'Макс. длина',
|
||||
precision: 'Точность',
|
||||
scale: 'Масштаб',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибуты индекса',
|
||||
@@ -255,12 +251,9 @@ export const ru: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -275,10 +268,6 @@ export const ru: LanguageTranslation = {
|
||||
undo: 'Отменить',
|
||||
redo: 'Вернуть',
|
||||
reorder_diagram: 'Переупорядочить диаграмму',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Выделение перекрывающихся таблиц',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -415,14 +404,6 @@ export const ru: LanguageTranslation = {
|
||||
confirm: 'Изменить',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Создать новую схему',
|
||||
description:
|
||||
'Схемы еще не существуют. Создайте вашу первую схему, чтобы организовать таблицы.',
|
||||
create: 'Создать',
|
||||
cancel: 'Отменить',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Помогите нам стать лучше!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const te: LanguageTranslation = {
|
||||
hide_sidebar: 'సైడ్బార్ దాచండి',
|
||||
hide_cardinality: 'కార్డినాలిటీని దాచండి',
|
||||
show_cardinality: 'కార్డినాలిటీని చూపించండి',
|
||||
show_field_attributes: 'ఫీల్డ్ గుణాలను చూపించు',
|
||||
hide_field_attributes: 'ఫీల్డ్ గుణాలను దాచండి',
|
||||
zoom_on_scroll: 'స్క్రోల్పై జూమ్',
|
||||
theme: 'థీమ్',
|
||||
show_dependencies: 'ఆధారాలు చూపించండి',
|
||||
@@ -157,8 +155,6 @@ export const te: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'సూక్ష్మత',
|
||||
scale: 'స్కేల్',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ఇండెక్స్ గుణాలు',
|
||||
@@ -259,12 +255,9 @@ export const te: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -279,10 +272,6 @@ export const te: LanguageTranslation = {
|
||||
undo: 'తిరిగి చేయు',
|
||||
redo: 'మరలా చేయు',
|
||||
reorder_diagram: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'అవకాశించు పట్టికలను హైలైట్ చేయండి',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -419,14 +408,6 @@ export const te: LanguageTranslation = {
|
||||
confirm: 'మార్చు',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'కొత్త స్కీమా సృష్టించండి',
|
||||
description:
|
||||
'ఇంకా ఏ స్కీమాలు లేవు. మీ పట్టికలను వ్యవస్థీకరించడానికి మీ మొదటి స్కీమాను సృష్టించండి.',
|
||||
create: 'సృష్టించు',
|
||||
cancel: 'రద్దు',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'మా సహాయంతో మెరుగుపరచండి!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const tr: LanguageTranslation = {
|
||||
hide_sidebar: 'Kenar Çubuğunu Gizle',
|
||||
hide_cardinality: 'Kardinaliteyi Gizle',
|
||||
show_cardinality: 'Kardinaliteyi Göster',
|
||||
show_field_attributes: 'Alan Özelliklerini Göster',
|
||||
hide_field_attributes: 'Alan Özelliklerini Gizle',
|
||||
zoom_on_scroll: 'Kaydırarak Yakınlaştır',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Bağımlılıkları Göster',
|
||||
@@ -156,8 +154,6 @@ export const tr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Hassasiyet',
|
||||
scale: 'Ölçek',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'İndeks Özellikleri',
|
||||
@@ -258,12 +254,9 @@ export const tr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -277,10 +270,6 @@ export const tr: LanguageTranslation = {
|
||||
undo: 'Geri Al',
|
||||
redo: 'Yinele',
|
||||
reorder_diagram: 'Diyagramı Yeniden Sırala',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Çakışan Tabloları Vurgula',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -408,14 +397,6 @@ export const tr: LanguageTranslation = {
|
||||
cancel: 'İptal',
|
||||
confirm: 'Değiştir',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Yeni Şema Oluştur',
|
||||
description:
|
||||
'Henüz hiç şema mevcut değil. Tablolarınızı düzenlemek için ilk şemanızı oluşturun.',
|
||||
create: 'Oluştur',
|
||||
cancel: 'İptal',
|
||||
},
|
||||
star_us_dialog: {
|
||||
title: 'Bize yardım et!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const uk: LanguageTranslation = {
|
||||
hide_sidebar: 'Приховати бічну панель',
|
||||
hide_cardinality: 'Приховати потужність',
|
||||
show_cardinality: 'Показати кардинальність',
|
||||
show_field_attributes: 'Показати атрибути полів',
|
||||
hide_field_attributes: 'Приховати атрибути полів',
|
||||
zoom_on_scroll: 'Масштабувати прокручуванням',
|
||||
theme: 'Тема',
|
||||
show_dependencies: 'Показати залежності',
|
||||
@@ -155,8 +153,6 @@ export const uk: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Точність',
|
||||
scale: 'Масштаб',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибути індексу',
|
||||
@@ -256,12 +252,9 @@ export const uk: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -276,10 +269,6 @@ export const uk: LanguageTranslation = {
|
||||
undo: 'Скасувати',
|
||||
redo: 'Повторити',
|
||||
reorder_diagram: 'Перевпорядкувати діаграму',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Показати таблиці, що перекриваються',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -416,14 +405,6 @@ export const uk: LanguageTranslation = {
|
||||
confirm: 'Змінити',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Створити нову схему',
|
||||
description:
|
||||
'Поки що не існує жодної схеми. Створіть свою першу схему, щоб організувати ваші таблиці.',
|
||||
create: 'Створити',
|
||||
cancel: 'Скасувати',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Допоможіть нам покращитися!',
|
||||
description: 'Поставне на зірку на GitHub? Це лише один клік!',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const vi: LanguageTranslation = {
|
||||
hide_sidebar: 'Ẩn thanh bên',
|
||||
hide_cardinality: 'Ẩn số lượng',
|
||||
show_cardinality: 'Hiển thị số lượng',
|
||||
show_field_attributes: 'Hiển thị thuộc tính trường',
|
||||
hide_field_attributes: 'Ẩn thuộc tính trường',
|
||||
zoom_on_scroll: 'Thu phóng khi cuộn',
|
||||
theme: 'Chủ đề',
|
||||
show_dependencies: 'Hiển thị các phụ thuộc',
|
||||
@@ -156,8 +154,6 @@ export const vi: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Độ chính xác',
|
||||
scale: 'Tỷ lệ',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Thuộc tính chỉ mục',
|
||||
@@ -257,12 +253,9 @@ export const vi: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -277,10 +270,6 @@ export const vi: LanguageTranslation = {
|
||||
undo: 'Hoàn tác',
|
||||
redo: 'Làm lại',
|
||||
reorder_diagram: 'Sắp xếp lại sơ đồ',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Làm nổi bật các bảng chồng chéo',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -415,14 +404,6 @@ export const vi: LanguageTranslation = {
|
||||
confirm: 'Xác nhận',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Tạo lược đồ mới',
|
||||
description:
|
||||
'Chưa có lược đồ nào. Tạo lược đồ đầu tiên của bạn để tổ chức các bảng.',
|
||||
create: 'Tạo',
|
||||
cancel: 'Hủy',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Hãy giúp chúng tôi cải thiện!',
|
||||
description:
|
||||
|
||||
@@ -26,8 +26,6 @@ export const zh_CN: LanguageTranslation = {
|
||||
hide_sidebar: '隐藏侧边栏',
|
||||
hide_cardinality: '隐藏基数',
|
||||
show_cardinality: '展示基数',
|
||||
show_field_attributes: '展示字段属性',
|
||||
hide_field_attributes: '隐藏字段属性',
|
||||
zoom_on_scroll: '滚动缩放',
|
||||
theme: '主题',
|
||||
show_dependencies: '展示依赖',
|
||||
@@ -153,8 +151,6 @@ export const zh_CN: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小数位',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引属性',
|
||||
@@ -254,12 +250,9 @@ export const zh_CN: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -274,10 +267,6 @@ export const zh_CN: LanguageTranslation = {
|
||||
undo: '撤销',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列关系图',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '突出显示重叠的表',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -411,13 +400,6 @@ export const zh_CN: LanguageTranslation = {
|
||||
confirm: '更改',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '创建新模式',
|
||||
description: '尚未存在任何模式。创建您的第一个模式来组织您的表。',
|
||||
create: '创建',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '帮助我们改进!',
|
||||
description: '您想在 GitHub 上为我们加注星标吗?只需点击一下即可!',
|
||||
|
||||
@@ -26,8 +26,6 @@ export const zh_TW: LanguageTranslation = {
|
||||
hide_sidebar: '隱藏側邊欄',
|
||||
hide_cardinality: '隱藏基數',
|
||||
show_cardinality: '顯示基數',
|
||||
hide_field_attributes: '隱藏欄位屬性',
|
||||
show_field_attributes: '顯示欄位屬性',
|
||||
zoom_on_scroll: '滾動縮放',
|
||||
theme: '主題',
|
||||
show_dependencies: '顯示相依性',
|
||||
@@ -153,8 +151,6 @@ export const zh_TW: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小數位',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引屬性',
|
||||
@@ -254,12 +250,9 @@ export const zh_TW: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -274,10 +267,6 @@ export const zh_TW: LanguageTranslation = {
|
||||
undo: '復原',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列圖表',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '突出顯示重疊表格',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -410,14 +399,6 @@ export const zh_TW: LanguageTranslation = {
|
||||
confirm: '變更',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '建立新 Schema',
|
||||
description:
|
||||
'尚未存在任何 Schema。建立您的第一個 Schema 來組織您的表格。',
|
||||
create: '建立',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '協助我們改善!',
|
||||
description: '請在 GitHub 上給我們一顆星,只需點擊一下!',
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { DBCustomType } from './domain';
|
||||
import type { Area } from './domain/area';
|
||||
import type { DBDependency } from './domain/db-dependency';
|
||||
import type { DBField } from './domain/db-field';
|
||||
@@ -49,10 +48,6 @@ const generateIdsMapFromDiagram = (
|
||||
idsMap.set(area.id, generateId());
|
||||
});
|
||||
|
||||
diagram.customTypes?.forEach((customType) => {
|
||||
idsMap.set(customType.id, generateId());
|
||||
});
|
||||
|
||||
return idsMap;
|
||||
};
|
||||
|
||||
@@ -218,22 +213,6 @@ export const cloneDiagram = (
|
||||
})
|
||||
.filter((area): area is Area => area !== null) ?? [];
|
||||
|
||||
const customTypes: DBCustomType[] =
|
||||
diagram.customTypes
|
||||
?.map((customType) => {
|
||||
const id = getNewId(customType.id);
|
||||
if (!id) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...customType,
|
||||
id,
|
||||
} satisfies DBCustomType;
|
||||
})
|
||||
.filter(
|
||||
(customType): customType is DBCustomType => customType !== null
|
||||
) ?? [];
|
||||
|
||||
return {
|
||||
diagram: {
|
||||
...diagram,
|
||||
@@ -242,7 +221,6 @@ export const cloneDiagram = (
|
||||
relationships,
|
||||
tables,
|
||||
areas,
|
||||
customTypes,
|
||||
createdAt: diagram.createdAt
|
||||
? new Date(diagram.createdAt)
|
||||
: new Date(),
|
||||
|
||||
@@ -48,30 +48,18 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'char large object', id: 'char_large_object' },
|
||||
{
|
||||
name: 'char varying',
|
||||
id: 'char_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'char varying', id: 'char_varying', hasCharMaxLength: true },
|
||||
{ name: 'character large object', id: 'character_large_object' },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'nchar large object', id: 'nchar_large_object' },
|
||||
{
|
||||
name: 'nchar varying',
|
||||
id: 'nchar_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'nchar varying', id: 'nchar_varying', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'national character large object',
|
||||
id: 'national_character_large_object',
|
||||
@@ -79,34 +67,22 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [
|
||||
{
|
||||
name: 'national character varying',
|
||||
id: 'national_character_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{
|
||||
name: 'national char varying',
|
||||
id: 'national_char_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{
|
||||
name: 'national character',
|
||||
id: 'national_character',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'national char',
|
||||
id: 'national_char',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'national char', id: 'national_char', hasCharMaxLength: true },
|
||||
{ name: 'binary large object', id: 'binary_large_object' },
|
||||
{
|
||||
name: 'binary varying',
|
||||
id: 'binary_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'fixedstring',
|
||||
id: 'fixedstring',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'binary varying', id: 'binary_varying', hasCharMaxLength: true },
|
||||
{ name: 'fixedstring', id: 'fixedstring', hasCharMaxLength: true },
|
||||
{ name: 'string', id: 'string' },
|
||||
|
||||
// Date Types
|
||||
|
||||
@@ -14,23 +14,9 @@ export interface DataType {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface FieldAttributeRange {
|
||||
max: number;
|
||||
min: number;
|
||||
default: number;
|
||||
}
|
||||
|
||||
interface FieldAttributes {
|
||||
hasCharMaxLength?: boolean;
|
||||
hasCharMaxLengthOption?: boolean;
|
||||
precision?: FieldAttributeRange;
|
||||
scale?: FieldAttributeRange;
|
||||
maxLength?: number;
|
||||
}
|
||||
|
||||
export interface DataTypeData extends DataType {
|
||||
hasCharMaxLength?: boolean;
|
||||
usageLevel?: 1 | 2; // Level 1 is most common, Level 2 is second most common
|
||||
fieldAttributes?: FieldAttributes;
|
||||
}
|
||||
|
||||
export const dataTypeSchema: z.ZodType<DataType> = z.object({
|
||||
|
||||
@@ -2,12 +2,7 @@ import type { DataTypeData } from './data-types';
|
||||
|
||||
export const genericDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
@@ -15,62 +10,23 @@ export const genericDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 999,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 999,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
{ name: 'uuid', id: 'uuid', usageLevel: 2 },
|
||||
|
||||
// Less common types
|
||||
{ name: 'bigint', id: 'bigint' },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'enum', id: 'enum' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 999,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 999,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'set', id: 'set' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'time', id: 'time' },
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
] as const;
|
||||
|
||||
@@ -4,32 +4,12 @@ export const mariadbDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 1 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 1,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
@@ -40,39 +20,16 @@ export const mariadbDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'mediumint', id: 'mediumint' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'bit', id: 'bit' },
|
||||
{ name: 'bool', id: 'bool' },
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'year', id: 'year' },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
|
||||
@@ -3,12 +3,7 @@ import type { DataTypeData } from './data-types';
|
||||
export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
@@ -16,23 +11,7 @@ export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
@@ -43,7 +22,7 @@ export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'bit', id: 'bit' },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'tinytext', id: 'tinytext' },
|
||||
{ name: 'mediumtext', id: 'mediumtext' },
|
||||
{ name: 'longtext', id: 'longtext' },
|
||||
|
||||
@@ -2,30 +2,15 @@ import type { DataTypeData } from './data-types';
|
||||
|
||||
export const oracleDataTypes: readonly DataTypeData[] = [
|
||||
// Character types
|
||||
{
|
||||
name: 'VARCHAR2',
|
||||
id: 'varchar2',
|
||||
usageLevel: 1,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'VARCHAR2', id: 'varchar2', usageLevel: 1, hasCharMaxLength: true },
|
||||
{
|
||||
name: 'NVARCHAR2',
|
||||
id: 'nvarchar2',
|
||||
usageLevel: 1,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'CHAR',
|
||||
id: 'char',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'NCHAR',
|
||||
id: 'nchar',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'CHAR', id: 'char', usageLevel: 2, hasCharMaxLength: true },
|
||||
{ name: 'NCHAR', id: 'nchar', usageLevel: 2, hasCharMaxLength: true },
|
||||
{ name: 'CLOB', id: 'clob', usageLevel: 2 },
|
||||
{ name: 'NCLOB', id: 'nclob', usageLevel: 2 },
|
||||
|
||||
@@ -64,12 +49,7 @@ export const oracleDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'BFILE', id: 'bfile', usageLevel: 2 },
|
||||
|
||||
// Other types
|
||||
{
|
||||
name: 'RAW',
|
||||
id: 'raw',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'RAW', id: 'raw', usageLevel: 2, hasCharMaxLength: true },
|
||||
{ name: 'LONG RAW', id: 'long_raw', usageLevel: 2 },
|
||||
{ name: 'ROWID', id: 'rowid', usageLevel: 2 },
|
||||
{ name: 'UROWID', id: 'urowid', usageLevel: 2 },
|
||||
|
||||
@@ -3,12 +3,7 @@ import type { DataTypeData } from './data-types';
|
||||
export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'integer', id: 'integer', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
@@ -16,23 +11,7 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 131072,
|
||||
min: 0,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 16383,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'serial', id: 'serial', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
{ name: 'jsonb', id: 'jsonb', usageLevel: 2 },
|
||||
@@ -44,33 +23,18 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
},
|
||||
|
||||
// Less common types
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 131072,
|
||||
min: 0,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 16383,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'double precision', id: 'double_precision' },
|
||||
{ name: 'smallserial', id: 'smallserial' },
|
||||
{ name: 'bigserial', id: 'bigserial' },
|
||||
{ name: 'money', id: 'money' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'timestamp without time zone', id: 'timestamp_without_time_zone' },
|
||||
|
||||
@@ -4,93 +4,32 @@ export const sqlServerDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'bit', id: 'bit', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 8000,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{
|
||||
name: 'nvarchar',
|
||||
id: 'nvarchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 4000,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'nvarchar', id: 'nvarchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 38,
|
||||
min: 1,
|
||||
default: 18,
|
||||
},
|
||||
scale: {
|
||||
max: 38,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'datetime2', id: 'datetime2', usageLevel: 2 },
|
||||
{ name: 'uniqueidentifier', id: 'uniqueidentifier', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
// Less common types
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 38,
|
||||
min: 1,
|
||||
default: 18,
|
||||
},
|
||||
scale: {
|
||||
max: 38,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'smallmoney', id: 'smallmoney' },
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'money', id: 'money' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'nchar', id: 'nchar', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'nchar', id: 'nchar', hasCharMaxLength: true },
|
||||
{ name: 'ntext', id: 'ntext' },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 8000,
|
||||
},
|
||||
},
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'image', id: 'image' },
|
||||
{ name: 'datetimeoffset', id: 'datetimeoffset' },
|
||||
{ name: 'smalldatetime', id: 'smalldatetime' },
|
||||
|
||||
@@ -10,41 +10,21 @@ export const sqliteDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// SQLite type aliases and common types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{
|
||||
name: 'timestamp',
|
||||
id: 'timestamp',
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'numeric', id: 'numeric', usageLevel: 2 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'float', id: 'float', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
},
|
||||
{ name: 'double', id: 'double', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
// Less common types (all map to SQLite storage classes)
|
||||
{
|
||||
name: 'char',
|
||||
id: 'char',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
usageLevel: 2,
|
||||
},
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
|
||||
@@ -4,5 +4,4 @@ export const defaultSchemas: { [key in DatabaseType]?: string } = {
|
||||
[DatabaseType.POSTGRESQL]: 'public',
|
||||
[DatabaseType.SQL_SERVER]: 'dbo',
|
||||
[DatabaseType.CLICKHOUSE]: 'default',
|
||||
[DatabaseType.COCKROACHDB]: 'public',
|
||||
};
|
||||
|
||||
@@ -73,13 +73,7 @@ function parseMSSQLDefault(field: DBField): string {
|
||||
return `'${defaultValue}'`;
|
||||
}
|
||||
|
||||
export function exportMSSQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
export function exportMSSQL(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -89,254 +83,202 @@ export function exportMSSQL({
|
||||
|
||||
// Create CREATE SCHEMA statements for all schemas
|
||||
let sqlScript = '';
|
||||
const schemas = new Set<string>();
|
||||
|
||||
if (!onlyRelationships) {
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${schema}')\nBEGIN\n EXEC('CREATE SCHEMA [${schema}]');\nEND;\n\n`;
|
||||
});
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
});
|
||||
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${schema}')\nBEGIN\n EXEC('CREATE SCHEMA [${schema}]');\nEND;\n`;
|
||||
});
|
||||
const tableName = table.schema
|
||||
? `[${table.schema}].[${table.name}]`
|
||||
: `[${table.name}]`;
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
return `${
|
||||
table.comments ? formatMSSQLTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `[${field.name}]`;
|
||||
const typeName = field.type.name;
|
||||
|
||||
const tableName = table.schema
|
||||
? `[${table.schema}].[${table.name}]`
|
||||
: `[${table.name}]`;
|
||||
|
||||
return `${
|
||||
table.comments
|
||||
? formatMSSQLTableComment(table.comments)
|
||||
: ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `[${field.name}]`;
|
||||
const typeName = field.type.name;
|
||||
|
||||
// Handle SQL Server specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'nvarchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'nchar'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
// Handle SQL Server specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'nvarchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'nchar'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Check if identity column
|
||||
const identity = field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity')
|
||||
? ' IDENTITY(1,1)'
|
||||
// Check if identity column
|
||||
const identity = field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity')
|
||||
? ' IDENTITY(1,1)'
|
||||
: '';
|
||||
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using SQL Server specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parseMSSQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using SQL Server specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parseMSSQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n${(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
const indexName = table.schema
|
||||
? `[${table.schema}_${index.name}]`
|
||||
: `[${index.name}]`;
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? `[${field.name}]` : '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// SQL Server has a limit of 32 columns in an index
|
||||
if (indexFields.length > 32) {
|
||||
const warningComment = `/* WARNING: This index originally had ${indexFields.length} columns. It has been truncated to 32 columns due to SQL Server's index column limit. */\n`;
|
||||
console.warn(
|
||||
`Warning: Index ${indexName} on table ${tableName} has ${indexFields.length} columns. SQL Server limits indexes to 32 columns. The index will be truncated.`
|
||||
);
|
||||
indexFields.length = 32;
|
||||
return indexFields.length > 0
|
||||
? `${warningComment}CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});`
|
||||
: '';
|
||||
}
|
||||
|
||||
return indexFields.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});`
|
||||
: '';
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${table.indexes
|
||||
.map((index) => {
|
||||
const indexName = table.schema
|
||||
? `[${table.schema}_${index.name}]`
|
||||
: `[${index.name}]`;
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? `[${field.name}]` : '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
// SQL Server has a limit of 32 columns in an index
|
||||
if (indexFields.length > 32) {
|
||||
const warningComment = `/* WARNING: This index originally had ${indexFields.length} columns. It has been truncated to 32 columns due to SQL Server's index column limit. */\n`;
|
||||
console.warn(
|
||||
`Warning: Index ${indexName} on table ${tableName} has ${indexFields.length} columns. SQL Server limits indexes to 32 columns. The index will be truncated.`
|
||||
);
|
||||
indexFields.length = 32;
|
||||
return indexFields.length > 0
|
||||
? `${warningComment}CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});\n\n`
|
||||
: '';
|
||||
}
|
||||
|
||||
return indexFields.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});\n\n`
|
||||
: '';
|
||||
})()}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
})
|
||||
.join('')}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate foreign keys
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript += `\n${relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
|
||||
// Process all relationships and create FK objects with schema info
|
||||
const foreignKeys = relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
const fkTableName = fkTable.schema
|
||||
? `[${fkTable.schema}].[${fkTable.name}]`
|
||||
: `[${fkTable.name}]`;
|
||||
const refTableName = refTable.schema
|
||||
? `[${refTable.schema}].[${refTable.name}]`
|
||||
: `[${refTable.name}]`;
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `[${fkTable.schema}].[${fkTable.name}]`
|
||||
: `[${fkTable.name}]`;
|
||||
const refTableName = refTable.schema
|
||||
? `[${refTable.schema}].[${refTable.name}]`
|
||||
: `[${refTable.name}]`;
|
||||
|
||||
return {
|
||||
schema: fkTable.schema || 'dbo',
|
||||
sql: `ALTER TABLE ${fkTableName} ADD CONSTRAINT [${r.name}] FOREIGN KEY([${fkField.name}]) REFERENCES ${refTableName}([${refField.name}]);`,
|
||||
};
|
||||
})
|
||||
.filter(Boolean); // Remove empty objects
|
||||
|
||||
// Group foreign keys by schema
|
||||
const fksBySchema = foreignKeys.reduce(
|
||||
(acc, fk) => {
|
||||
if (!fk) return acc;
|
||||
const schema = fk.schema;
|
||||
if (!acc[schema]) {
|
||||
acc[schema] = [];
|
||||
}
|
||||
acc[schema].push(fk.sql);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string[]>
|
||||
);
|
||||
|
||||
// Sort schemas and generate SQL with separators
|
||||
const sortedSchemas = Object.keys(fksBySchema).sort();
|
||||
const fkSql = sortedSchemas
|
||||
.map((schema, index) => {
|
||||
const schemaFks = fksBySchema[schema].join('\n');
|
||||
if (index === 0) {
|
||||
return `-- Schema: ${schema}\n${schemaFks}`;
|
||||
} else {
|
||||
return `\n-- Schema: ${schema}\n${schemaFks}`;
|
||||
}
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
sqlScript += fkSql;
|
||||
}
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT [${r.name}] FOREIGN KEY([${fkField.name}]) REFERENCES ${refTableName}([${refField.name}]);\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -170,13 +170,7 @@ function mapMySQLType(typeName: string): string {
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportMySQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
export function exportMySQL(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -185,245 +179,224 @@ export function exportMySQL({
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script
|
||||
let sqlScript = '-- MySQL database export\n';
|
||||
let sqlScript = '-- MySQL database export\n\n';
|
||||
|
||||
if (!onlyRelationships) {
|
||||
// MySQL doesn't really use transactions for DDL statements but we'll add it for consistency
|
||||
sqlScript += 'START TRANSACTION;\n';
|
||||
// MySQL doesn't really use transactions for DDL statements but we'll add it for consistency
|
||||
sqlScript += 'START TRANSACTION;\n\n';
|
||||
|
||||
// Create databases (schemas) if they don't exist
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
|
||||
});
|
||||
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
// Create databases (schemas) if they don't exist
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
|
||||
});
|
||||
|
||||
// Use schema prefix if available
|
||||
const tableName = table.schema
|
||||
? `\`${table.schema}\`.\`${table.name}\``
|
||||
: `\`${table.name}\``;
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
);
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}\nCREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
// Use schema prefix if available
|
||||
const tableName = table.schema
|
||||
? `\`${table.schema}\`.\`${table.name}\``
|
||||
: `\`${table.name}\``;
|
||||
|
||||
// Handle type name - map to MySQL compatible types
|
||||
const typeName = mapMySQLType(field.type.name);
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
// Handle MySQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'varbinary'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
}
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
|
||||
// Set a default size for VARCHAR columns if not specified
|
||||
// Handle type name - map to MySQL compatible types
|
||||
const typeName = mapMySQLType(field.type.name);
|
||||
|
||||
// Handle MySQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' &&
|
||||
!field.characterMaximumLength
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'varbinary'
|
||||
) {
|
||||
typeWithSize = `${typeName}(255)`;
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle auto_increment - MySQL uses AUTO_INCREMENT keyword
|
||||
let autoIncrement = '';
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
field.primaryKey &&
|
||||
(field.default
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Set a default size for VARCHAR columns if not specified
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
typeWithSize = `${typeName}(255)`;
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle auto_increment - MySQL uses AUTO_INCREMENT keyword
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTO_INCREMENT';
|
||||
}
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTO_INCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
? ` DEFAULT ${parseMySQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${escapeSQLComment(field.comments)}'`
|
||||
// Handle default value
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
? ` DEFAULT ${parseMySQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${escapeSQLComment(table.comments)}'`
|
||||
: ''
|
||||
};\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${escapeSQLComment(field.comments)}'`
|
||||
: '';
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique index name by combining table name, field names, and a unique/non-unique indicator
|
||||
const fieldNamesForIndex = indexFields
|
||||
.map((field) => field?.name || '')
|
||||
.join('_');
|
||||
const uniqueIndicator = index.unique
|
||||
? '_unique'
|
||||
: '';
|
||||
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `\`${field.name}\`` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
// Check for text/blob fields that need special handling
|
||||
const hasTextOrBlob = indexFields.some(
|
||||
(field) => {
|
||||
const typeName =
|
||||
field?.type.name.toLowerCase() ||
|
||||
'';
|
||||
return (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
);
|
||||
}
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${escapeSQLComment(table.comments)}'`
|
||||
: ''
|
||||
};\n\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
|
||||
// If there are TEXT/BLOB fields, need to add prefix length
|
||||
const indexFieldsWithPrefix = hasTextOrBlob
|
||||
? indexFieldNames.map((name) => {
|
||||
const field = indexFields.find(
|
||||
(f) => `\`${f?.name}\`` === name
|
||||
);
|
||||
if (!field) return name;
|
||||
|
||||
const typeName =
|
||||
field.type.name.toLowerCase();
|
||||
if (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
) {
|
||||
// Add a prefix length for TEXT/BLOB fields (required in MySQL)
|
||||
return `${name}(255)`;
|
||||
}
|
||||
return name;
|
||||
})
|
||||
: indexFieldNames;
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${indexFieldsWithPrefix.join(', ')});`
|
||||
: '';
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique index name by combining table name, field names, and a unique/non-unique indicator
|
||||
const fieldNamesForIndex = indexFields
|
||||
.map((field) => field?.name || '')
|
||||
.join('_');
|
||||
const uniqueIndicator = index.unique ? '_unique' : '';
|
||||
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `\`${field.name}\`` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Check for text/blob fields that need special handling
|
||||
const hasTextOrBlob = indexFields.some((field) => {
|
||||
const typeName =
|
||||
field?.type.name.toLowerCase() || '';
|
||||
return (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
);
|
||||
});
|
||||
|
||||
// If there are TEXT/BLOB fields, need to add prefix length
|
||||
const indexFieldsWithPrefix = hasTextOrBlob
|
||||
? indexFieldNames.map((name) => {
|
||||
const field = indexFields.find(
|
||||
(f) => `\`${f?.name}\`` === name
|
||||
);
|
||||
if (!field) return name;
|
||||
|
||||
const typeName =
|
||||
field.type.name.toLowerCase();
|
||||
if (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
) {
|
||||
// Add a prefix length for TEXT/BLOB fields (required in MySQL)
|
||||
return `${name}(255)`;
|
||||
}
|
||||
return name;
|
||||
})
|
||||
: indexFieldNames;
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldsWithPrefix.join(', ')});\n`
|
||||
: '';
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate foreign keys
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript += '\n-- Foreign key constraints\n\n';
|
||||
|
||||
const foreignKeys = relationships
|
||||
sqlScript += relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
@@ -499,15 +472,14 @@ export function exportMySQL({
|
||||
const constraintName = `\`fk_${fkTable.name}_${fkField.name}\``;
|
||||
|
||||
// MySQL supports ON DELETE and ON UPDATE actions
|
||||
return `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${fkField.name}\`) REFERENCES ${refTableName}(\`${refField.name}\`);`;
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${fkField.name}\`) REFERENCES ${refTableName}(\`${refField.name}\`)\nON UPDATE CASCADE ON DELETE RESTRICT;\n`;
|
||||
})
|
||||
.filter(Boolean); // Remove empty strings
|
||||
|
||||
sqlScript += foreignKeys.join('\n');
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
// Commit transaction
|
||||
sqlScript += '\n\nCOMMIT;\n';
|
||||
sqlScript += '\nCOMMIT;\n';
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -142,16 +142,10 @@ function exportCustomTypes(customTypes: DBCustomType[]): string {
|
||||
}
|
||||
});
|
||||
|
||||
return typesSql ? typesSql + '\n' : '';
|
||||
return typesSql + '\n';
|
||||
}
|
||||
|
||||
export function exportPostgreSQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
export function exportPostgreSQL(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -162,391 +156,326 @@ export function exportPostgreSQL({
|
||||
|
||||
// Create CREATE SCHEMA statements for all schemas
|
||||
let sqlScript = '';
|
||||
if (!onlyRelationships) {
|
||||
const schemas = new Set<string>();
|
||||
const schemas = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Also collect schemas from custom types
|
||||
customTypes.forEach((customType) => {
|
||||
if (customType.schema) {
|
||||
schemas.add(customType.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add custom types (enums and composite types)
|
||||
sqlScript += exportCustomTypes(customTypes);
|
||||
|
||||
// Add sequence creation statements
|
||||
const sequences = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.default) {
|
||||
// Match nextval('schema.sequence_name') or nextval('sequence_name')
|
||||
const match = field.default.match(
|
||||
/nextval\('([^']+)'(?:::[^)]+)?\)/
|
||||
);
|
||||
if (match) {
|
||||
sequences.add(match[1]);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
if (sequences.size > 0) {
|
||||
sqlScript += '\n';
|
||||
// Also collect schemas from custom types
|
||||
customTypes.forEach((customType) => {
|
||||
if (customType.schema) {
|
||||
schemas.add(customType.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
const tableName = table.schema
|
||||
? `"${table.schema}"."${table.name}"`
|
||||
: `"${table.name}"`;
|
||||
// Add custom types (enums and composite types)
|
||||
sqlScript += exportCustomTypes(customTypes);
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
// Add sequence creation statements
|
||||
const sequences = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.default) {
|
||||
// Match nextval('schema.sequence_name') or nextval('sequence_name')
|
||||
const match = field.default.match(
|
||||
/nextval\('([^']+)'(?:::[^)]+)?\)/
|
||||
);
|
||||
if (match) {
|
||||
sequences.add(match[1]);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
// Handle type name - map problematic types to PostgreSQL compatible types
|
||||
const typeName = mapPostgresType(
|
||||
field.type.name,
|
||||
field.name
|
||||
);
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Handle PostgreSQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
let serialType = null;
|
||||
const tableName = table.schema
|
||||
? `"${table.schema}"."${table.name}"`
|
||||
: `"${table.name}"`;
|
||||
|
||||
if (field.increment && !field.nullable) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'integer' ||
|
||||
typeName.toLowerCase() === 'int'
|
||||
) {
|
||||
serialType = 'SERIAL';
|
||||
} else if (typeName.toLowerCase() === 'bigint') {
|
||||
serialType = 'BIGSERIAL';
|
||||
} else if (typeName.toLowerCase() === 'smallint') {
|
||||
serialType = 'SMALLSERIAL';
|
||||
}
|
||||
}
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() ===
|
||||
'character varying' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'character'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
}
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
// Handle type name - map problematic types to PostgreSQL compatible types
|
||||
const typeName = mapPostgresType(
|
||||
field.type.name,
|
||||
field.name
|
||||
);
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
// Handle PostgreSQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
let serialType = null;
|
||||
|
||||
// Handle identity generation
|
||||
let identity = '';
|
||||
if (field.increment && !field.nullable) {
|
||||
if (
|
||||
field.default &&
|
||||
field.default.includes('nextval')
|
||||
typeName.toLowerCase() === 'integer' ||
|
||||
typeName.toLowerCase() === 'int'
|
||||
) {
|
||||
// PostgreSQL already handles this with DEFAULT nextval()
|
||||
} else if (
|
||||
field.default &&
|
||||
field.default.toLowerCase().includes('identity')
|
||||
) {
|
||||
identity = ' GENERATED BY DEFAULT AS IDENTITY';
|
||||
serialType = 'SERIAL';
|
||||
} else if (typeName.toLowerCase() === 'bigint') {
|
||||
serialType = 'BIGSERIAL';
|
||||
} else if (typeName.toLowerCase() === 'smallint') {
|
||||
serialType = 'SMALLSERIAL';
|
||||
}
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
// This avoids redundant uniqueness constraints
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'character varying' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'character'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle default value using PostgreSQL specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parsePostgresDefault(field)}`
|
||||
: '';
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize = typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${serialType || typeWithSize}${serialType ? '' : notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `\nCOMMENT ON TABLE ${tableName} IS '${escapeSQLComment(table.comments)}';`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
table.fields
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`\nCOMMENT ON COLUMN ${tableName}."${f.name}" IS '${escapeSQLComment(f.comments || '')}';`
|
||||
)
|
||||
.join('')
|
||||
}${
|
||||
// Add indexes only for non-primary key fields or composite indexes
|
||||
// This avoids duplicate indexes on primary key columns
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
// Handle identity generation
|
||||
let identity = '';
|
||||
if (field.default && field.default.includes('nextval')) {
|
||||
// PostgreSQL already handles this with DEFAULT nextval()
|
||||
} else if (
|
||||
field.default &&
|
||||
field.default.toLowerCase().includes('identity')
|
||||
) {
|
||||
identity = ' GENERATED BY DEFAULT AS IDENTITY';
|
||||
}
|
||||
|
||||
// Create unique index name using table name and index name
|
||||
// This ensures index names are unique across the database
|
||||
const safeTableName = table.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
// This avoids redundant uniqueness constraints
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using PostgreSQL specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parsePostgresDefault(field)}`
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${serialType || typeWithSize}${serialType ? '' : notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `COMMENT ON TABLE ${tableName} IS '${escapeSQLComment(table.comments)}';\n\n`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
table.fields
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`COMMENT ON COLUMN ${tableName}."${f.name}" IS '${escapeSQLComment(f.comments || '')}';\n`
|
||||
)
|
||||
.join('')
|
||||
}\n${
|
||||
// Add indexes only for non-primary key fields or composite indexes
|
||||
// This avoids duplicate indexes on primary key columns
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
const safeIndexName = index.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
|
||||
// Limit index name length to avoid PostgreSQL's 63-character identifier limit
|
||||
let combinedName = `${safeTableName}_${safeIndexName}`;
|
||||
if (combinedName.length > 60) {
|
||||
// If too long, use just the index name or a truncated version
|
||||
combinedName =
|
||||
safeIndexName.length > 60
|
||||
? safeIndexName.substring(0, 60)
|
||||
: safeIndexName;
|
||||
}
|
||||
|
||||
const indexName = `"${combinedName}"`;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `"${field.name}"` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${indexFieldNames.join(', ')});`
|
||||
: '';
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create unique index name using table name and index name
|
||||
// This ensures index names are unique across the database
|
||||
const safeTableName = table.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
const safeIndexName = index.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
|
||||
// Limit index name length to avoid PostgreSQL's 63-character identifier limit
|
||||
let combinedName = `${safeTableName}_${safeIndexName}`;
|
||||
if (combinedName.length > 60) {
|
||||
// If too long, use just the index name or a truncated version
|
||||
combinedName =
|
||||
safeIndexName.length > 60
|
||||
? safeIndexName.substring(0, 60)
|
||||
: safeIndexName;
|
||||
}
|
||||
|
||||
const indexName = `"${combinedName}"`;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldNames.join(', ')});\n\n`
|
||||
: '';
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate foreign keys
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript += `\n${relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
|
||||
// Process all relationships and create FK objects with schema info
|
||||
const foreignKeys = relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
const fkTableName = fkTable.schema
|
||||
? `"${fkTable.schema}"."${fkTable.name}"`
|
||||
: `"${fkTable.name}"`;
|
||||
const refTableName = refTable.schema
|
||||
? `"${refTable.schema}"."${refTable.name}"`
|
||||
: `"${refTable.name}"`;
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `"${fkTable.schema}"."${fkTable.name}"`
|
||||
: `"${fkTable.name}"`;
|
||||
const refTableName = refTable.schema
|
||||
? `"${refTable.schema}"."${refTable.name}"`
|
||||
: `"${refTable.name}"`;
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${fkTable.name}_${fkField.name}_${refTable.name}_${refField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
? baseName.substring(0, 60).replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
: baseName.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${fkTable.name}_${fkField.name}_${refTable.name}_${refField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
? baseName
|
||||
.substring(0, 60)
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
: baseName.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
|
||||
return {
|
||||
schema: fkTable.schema || 'public',
|
||||
sql: `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${constraintName} FOREIGN KEY("${fkField.name}") REFERENCES ${refTableName}("${refField.name}");`,
|
||||
};
|
||||
})
|
||||
.filter(Boolean); // Remove empty objects
|
||||
|
||||
// Group foreign keys by schema
|
||||
const fksBySchema = foreignKeys.reduce(
|
||||
(acc, fk) => {
|
||||
if (!fk) return acc;
|
||||
const schema = fk.schema;
|
||||
if (!acc[schema]) {
|
||||
acc[schema] = [];
|
||||
}
|
||||
acc[schema].push(fk.sql);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string[]>
|
||||
);
|
||||
|
||||
// Sort schemas and generate SQL with separators
|
||||
const sortedSchemas = Object.keys(fksBySchema).sort();
|
||||
const fkSql = sortedSchemas
|
||||
.map((schema, index) => {
|
||||
const schemaFks = fksBySchema[schema].join('\n');
|
||||
if (index === 0) {
|
||||
return `-- Schema: ${schema}\n${schemaFks}`;
|
||||
} else {
|
||||
return `\n-- Schema: ${schema}\n${schemaFks}`;
|
||||
}
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
sqlScript += fkSql;
|
||||
}
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY("${fkField.name}") REFERENCES ${refTableName}("${refField.name}");\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -140,13 +140,7 @@ function mapSQLiteType(typeName: string, isPrimaryKey: boolean): string {
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportSQLite({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
export function exportSQLite(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -155,10 +149,10 @@ export function exportSQLite({
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script - SQLite doesn't use schemas, so we skip schema creation
|
||||
let sqlScript = '-- SQLite database export\n';
|
||||
let sqlScript = '-- SQLite database export\n\n';
|
||||
|
||||
// Begin transaction for faster import
|
||||
sqlScript += 'BEGIN TRANSACTION;\n';
|
||||
sqlScript += 'BEGIN TRANSACTION;\n\n';
|
||||
|
||||
// SQLite doesn't have sequences, so we skip sequence creation
|
||||
|
||||
@@ -172,167 +166,151 @@ export function exportSQLite({
|
||||
'sqlite_master',
|
||||
];
|
||||
|
||||
if (!onlyRelationships) {
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Skip SQLite system tables
|
||||
if (sqliteSystemTables.includes(table.name.toLowerCase())) {
|
||||
return `-- Skipping SQLite system table: "${table.name}"\n`;
|
||||
}
|
||||
// Skip SQLite system tables
|
||||
if (sqliteSystemTables.includes(table.name.toLowerCase())) {
|
||||
return `-- Skipping SQLite system table: "${table.name}"\n`;
|
||||
}
|
||||
|
||||
// SQLite doesn't use schema prefixes, so we use just the table name
|
||||
// Include the schema in a comment if it exists
|
||||
const schemaComment = table.schema
|
||||
? `-- Original schema: ${table.schema}\n`
|
||||
: '';
|
||||
const tableName = `"${table.name}"`;
|
||||
// SQLite doesn't use schema prefixes, so we use just the table name
|
||||
// Include the schema in a comment if it exists
|
||||
const schemaComment = table.schema
|
||||
? `-- Original schema: ${table.schema}\n`
|
||||
: '';
|
||||
const tableName = `"${table.name}"`;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
);
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
// Check if this is a single-column INTEGER PRIMARY KEY (for AUTOINCREMENT)
|
||||
const singleIntegerPrimaryKey =
|
||||
primaryKeyFields.length === 1 &&
|
||||
(primaryKeyFields[0].type.name.toLowerCase() ===
|
||||
'integer' ||
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
// Check if this is a single-column INTEGER PRIMARY KEY (for AUTOINCREMENT)
|
||||
const singleIntegerPrimaryKey =
|
||||
primaryKeyFields.length === 1 &&
|
||||
(primaryKeyFields[0].type.name.toLowerCase() === 'integer' ||
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
|
||||
return `${schemaComment}${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
return `${schemaComment}${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle type name - map to SQLite compatible types
|
||||
const typeName = mapSQLiteType(
|
||||
field.type.name,
|
||||
field.primaryKey
|
||||
);
|
||||
// Handle type name - map to SQLite compatible types
|
||||
const typeName = mapSQLiteType(
|
||||
field.type.name,
|
||||
field.primaryKey
|
||||
);
|
||||
|
||||
// SQLite ignores length specifiers, so we don't add them
|
||||
// We'll keep this simple without size info
|
||||
const typeWithoutSize = typeName;
|
||||
// SQLite ignores length specifiers, so we don't add them
|
||||
// We'll keep this simple without size info
|
||||
const typeWithoutSize = typeName;
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle autoincrement - only works with INTEGER PRIMARY KEY
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
singleIntegerPrimaryKey &&
|
||||
(field.default
|
||||
// Handle autoincrement - only works with INTEGER PRIMARY KEY
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
singleIntegerPrimaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTOINCREMENT';
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTOINCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value - Special handling for datetime() function
|
||||
let defaultValue = '';
|
||||
if (
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
) {
|
||||
// Special handling for quoted functions like 'datetime(\'\'now\'\')' - remove extra quotes
|
||||
if (field.default.includes("datetime(''now'')")) {
|
||||
defaultValue = ' DEFAULT CURRENT_TIMESTAMP';
|
||||
} else {
|
||||
defaultValue = ` DEFAULT ${parseSQLiteDefault(field)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
// Add PRIMARY KEY inline only for single INTEGER primary key
|
||||
const primaryKey =
|
||||
field.primaryKey && singleIntegerPrimaryKey
|
||||
? ' PRIMARY KEY' + autoIncrement
|
||||
: '';
|
||||
|
||||
// Handle default value - Special handling for datetime() function
|
||||
let defaultValue = '';
|
||||
if (
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
) {
|
||||
// Special handling for quoted functions like 'datetime(\'\'now\'\')' - remove extra quotes
|
||||
if (field.default.includes("datetime(''now'')")) {
|
||||
defaultValue = ' DEFAULT CURRENT_TIMESTAMP';
|
||||
} else {
|
||||
defaultValue = ` DEFAULT ${parseSQLiteDefault(field)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add PRIMARY KEY inline only for single INTEGER primary key
|
||||
const primaryKey =
|
||||
field.primaryKey && singleIntegerPrimaryKey
|
||||
? ' PRIMARY KEY' + autoIncrement
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithoutSize}${primaryKey}${notNull}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint for composite primary keys or non-INTEGER primary keys
|
||||
primaryKeyFields.length > 0 && !singleIntegerPrimaryKey
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n${
|
||||
// Add indexes - SQLite doesn't support indexes in CREATE TABLE
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Skip indexes that exactly match the primary key
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `"${field.name}"` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create safe index name
|
||||
const safeIndexName =
|
||||
`${table.name}_${index.name}`
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
.substring(0, 60);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX IF NOT EXISTS "${safeIndexName}"\nON ${tableName} (${indexFieldNames.join(', ')});`
|
||||
: '';
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithoutSize}${primaryKey}${notNull}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint for composite primary keys or non-INTEGER primary keys
|
||||
primaryKeyFields.length > 0 && !singleIntegerPrimaryKey
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add indexes - SQLite doesn't support indexes in CREATE TABLE
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Skip indexes that exactly match the primary key
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create safe index name
|
||||
const safeIndexName = `${table.name}_${index.name}`
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
.substring(0, 60);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX IF NOT EXISTS "${safeIndexName}"\nON ${tableName} (${indexFieldNames.join(', ')});\n`
|
||||
: '';
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate table constraints and triggers for foreign keys
|
||||
// SQLite handles foreign keys differently - we'll add them with CREATE TABLE statements
|
||||
// But we'll also provide individual ALTER TABLE statements as comments for reference
|
||||
@@ -341,7 +319,7 @@ export function exportSQLite({
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript +=
|
||||
'-- Note: SQLite requires foreign_keys pragma to be enabled:\n';
|
||||
sqlScript += '-- PRAGMA foreign_keys = ON;\n';
|
||||
sqlScript += '-- PRAGMA foreign_keys = ON;\n\n';
|
||||
|
||||
relationships.forEach((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
|
||||
@@ -36,12 +36,10 @@ export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
isDBMLFlow = false,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
targetDatabaseType: DatabaseType;
|
||||
isDBMLFlow?: boolean;
|
||||
onlyRelationships?: boolean;
|
||||
}): string => {
|
||||
const { tables, relationships } = diagram;
|
||||
|
||||
@@ -52,16 +50,16 @@ export const exportBaseSQL = ({
|
||||
if (!isDBMLFlow && diagram.databaseType === targetDatabaseType) {
|
||||
switch (diagram.databaseType) {
|
||||
case DatabaseType.SQL_SERVER:
|
||||
return exportMSSQL({ diagram, onlyRelationships });
|
||||
return exportMSSQL(diagram);
|
||||
case DatabaseType.POSTGRESQL:
|
||||
return exportPostgreSQL({ diagram, onlyRelationships });
|
||||
return exportPostgreSQL(diagram);
|
||||
case DatabaseType.SQLITE:
|
||||
return exportSQLite({ diagram, onlyRelationships });
|
||||
return exportSQLite(diagram);
|
||||
case DatabaseType.MYSQL:
|
||||
case DatabaseType.MARIADB:
|
||||
return exportMySQL({ diagram, onlyRelationships });
|
||||
return exportMySQL(diagram);
|
||||
default:
|
||||
return exportPostgreSQL({ diagram, onlyRelationships });
|
||||
return exportPostgreSQL(diagram);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,23 +131,7 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
}
|
||||
});
|
||||
if (
|
||||
diagram.customTypes.some(
|
||||
(ct) =>
|
||||
(ct.kind === 'enum' &&
|
||||
ct.values &&
|
||||
ct.values.length > 0 &&
|
||||
targetDatabaseType === DatabaseType.POSTGRESQL &&
|
||||
!isDBMLFlow) ||
|
||||
(ct.kind === 'composite' &&
|
||||
ct.fields &&
|
||||
ct.fields.length > 0 &&
|
||||
(targetDatabaseType === DatabaseType.POSTGRESQL ||
|
||||
isDBMLFlow))
|
||||
)
|
||||
) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
sqlScript += '\n'; // Add a newline if custom types were processed
|
||||
}
|
||||
|
||||
// Add CREATE SEQUENCE statements
|
||||
@@ -172,9 +154,7 @@ export const exportBaseSQL = ({
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
if (sequences.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
sqlScript += '\n';
|
||||
|
||||
// Loop through each non-view table to generate the SQL statements
|
||||
nonViewTables.forEach((table) => {
|
||||
@@ -254,8 +234,7 @@ export const exportBaseSQL = ({
|
||||
// Add size for character types
|
||||
if (
|
||||
field.characterMaximumLength &&
|
||||
parseInt(field.characterMaximumLength) > 0 &&
|
||||
field.type.name.toLowerCase() !== 'decimal'
|
||||
parseInt(field.characterMaximumLength) > 0
|
||||
) {
|
||||
sqlScript += `(${field.characterMaximumLength})`;
|
||||
} else if (field.type.name.toLowerCase().includes('varchar')) {
|
||||
@@ -337,7 +316,7 @@ export const exportBaseSQL = ({
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
|
||||
sqlScript += '\n);\n';
|
||||
sqlScript += '\n);\n\n';
|
||||
|
||||
// Add table comment
|
||||
if (table.comments) {
|
||||
@@ -362,18 +341,15 @@ export const exportBaseSQL = ({
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
const indexName =
|
||||
table.schema && !isDBMLFlow
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
const indexName = table.schema
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
sqlScript += `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${fieldNames});\n`;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (nonViewTables.length > 0 && (relationships?.length ?? 0) > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Handle relationships (foreign keys)
|
||||
relationships?.forEach((relationship) => {
|
||||
|
||||
@@ -2,8 +2,7 @@ const withExtras = false;
|
||||
const withDefault = `IFNULL(REPLACE(REPLACE(cols.column_default, '\\\\', ''), '"', 'ֿֿֿ\\"'), '')`;
|
||||
const withoutDefault = `""`;
|
||||
|
||||
export const mariaDBQuery = `SET SESSION group_concat_max_len = 10000000;
|
||||
SELECT CAST(CONCAT(
|
||||
export const mariaDBQuery = `SELECT CAST(CONCAT(
|
||||
'{"fk_info": [',
|
||||
IFNULL((SELECT GROUP_CONCAT(
|
||||
CONCAT('{"schema":"', cast(fk.table_schema as CHAR),
|
||||
|
||||
@@ -18,14 +18,11 @@ export interface SQLColumn {
|
||||
nullable: boolean;
|
||||
primaryKey: boolean;
|
||||
unique: boolean;
|
||||
typeArgs?:
|
||||
| {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
}
|
||||
| number[]
|
||||
| string;
|
||||
typeArgs?: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
comment?: string;
|
||||
default?: string;
|
||||
increment?: boolean;
|
||||
@@ -562,38 +559,6 @@ export function convertToChartDBDiagram(
|
||||
id: column.type.toLowerCase(),
|
||||
name: column.type,
|
||||
};
|
||||
}
|
||||
// Handle SQL Server types specifically
|
||||
else if (
|
||||
sourceDatabaseType === DatabaseType.SQL_SERVER &&
|
||||
targetDatabaseType === DatabaseType.SQL_SERVER
|
||||
) {
|
||||
const normalizedType = column.type.toLowerCase();
|
||||
|
||||
// Preserve SQL Server specific types when target is also SQL Server
|
||||
if (
|
||||
normalizedType === 'nvarchar' ||
|
||||
normalizedType === 'nchar' ||
|
||||
normalizedType === 'ntext' ||
|
||||
normalizedType === 'uniqueidentifier' ||
|
||||
normalizedType === 'datetime2' ||
|
||||
normalizedType === 'datetimeoffset' ||
|
||||
normalizedType === 'money' ||
|
||||
normalizedType === 'smallmoney' ||
|
||||
normalizedType === 'bit' ||
|
||||
normalizedType === 'xml' ||
|
||||
normalizedType === 'hierarchyid' ||
|
||||
normalizedType === 'geography' ||
|
||||
normalizedType === 'geometry'
|
||||
) {
|
||||
mappedType = { id: normalizedType, name: normalizedType };
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
column.type,
|
||||
sourceDatabaseType
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
@@ -616,68 +581,22 @@ export function convertToChartDBDiagram(
|
||||
|
||||
// Add type arguments if present
|
||||
if (column.typeArgs) {
|
||||
// Handle string typeArgs (e.g., 'max' for varchar(max))
|
||||
if (typeof column.typeArgs === 'string') {
|
||||
if (
|
||||
(field.type.id === 'varchar' ||
|
||||
field.type.id === 'nvarchar') &&
|
||||
column.typeArgs === 'max'
|
||||
) {
|
||||
field.characterMaximumLength = 'max';
|
||||
}
|
||||
}
|
||||
// Handle array typeArgs (SQL Server format)
|
||||
else if (
|
||||
Array.isArray(column.typeArgs) &&
|
||||
column.typeArgs.length > 0
|
||||
// Transfer length for varchar/char types
|
||||
if (
|
||||
column.typeArgs.length !== undefined &&
|
||||
(field.type.id === 'varchar' || field.type.id === 'char')
|
||||
) {
|
||||
if (
|
||||
field.type.id === 'varchar' ||
|
||||
field.type.id === 'nvarchar' ||
|
||||
field.type.id === 'char' ||
|
||||
field.type.id === 'nchar'
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
column.typeArgs[0].toString();
|
||||
} else if (
|
||||
(field.type.id === 'numeric' ||
|
||||
field.type.id === 'decimal') &&
|
||||
column.typeArgs.length >= 2
|
||||
) {
|
||||
field.precision = column.typeArgs[0];
|
||||
field.scale = column.typeArgs[1];
|
||||
}
|
||||
field.characterMaximumLength =
|
||||
column.typeArgs.length.toString();
|
||||
}
|
||||
// Handle object typeArgs (standard format)
|
||||
else if (
|
||||
typeof column.typeArgs === 'object' &&
|
||||
!Array.isArray(column.typeArgs)
|
||||
|
||||
// Transfer precision/scale for numeric types
|
||||
if (
|
||||
column.typeArgs.precision !== undefined &&
|
||||
(field.type.id === 'numeric' || field.type.id === 'decimal')
|
||||
) {
|
||||
const typeArgsObj = column.typeArgs as {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
|
||||
// Transfer length for varchar/char types
|
||||
if (
|
||||
typeArgsObj.length !== undefined &&
|
||||
(field.type.id === 'varchar' ||
|
||||
field.type.id === 'char')
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
typeArgsObj.length.toString();
|
||||
}
|
||||
|
||||
// Transfer precision/scale for numeric types
|
||||
if (
|
||||
typeArgsObj.precision !== undefined &&
|
||||
(field.type.id === 'numeric' ||
|
||||
field.type.id === 'decimal')
|
||||
) {
|
||||
field.precision = typeArgsObj.precision;
|
||||
field.scale = typeArgsObj.scale;
|
||||
}
|
||||
field.precision = column.typeArgs.precision;
|
||||
field.scale = column.typeArgs.scale;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,350 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Core Parser Tests', () => {
|
||||
it('should parse basic tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INT PRIMARY KEY,
|
||||
name NVARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should parse tables with schemas', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spells] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
name NVARCHAR(100) NOT NULL,
|
||||
level INT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizards] (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name NVARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.find((t) => t.name === 'spells')).toBeDefined();
|
||||
expect(result.tables.find((t) => t.name === 'spells')?.schema).toBe(
|
||||
'magic'
|
||||
);
|
||||
expect(result.tables.find((t) => t.name === 'wizards')?.schema).toBe(
|
||||
'dbo'
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INT PRIMARY KEY);
|
||||
CREATE TABLE mages (
|
||||
id INT PRIMARY KEY,
|
||||
guild_id INT FOREIGN KEY REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
expect(result.relationships[0].sourceColumn).toBe('guild_id');
|
||||
expect(result.relationships[0].targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with schema references', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[schools] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
name NVARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[towers] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
school_id UNIQUEIDENTIFIER NOT NULL,
|
||||
name NVARCHAR(100) NOT NULL,
|
||||
CONSTRAINT FK_towers_schools FOREIGN KEY (school_id) REFERENCES [magic].[schools](id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('towers');
|
||||
expect(result.relationships[0].targetTable).toBe('schools');
|
||||
expect(result.relationships[0].sourceSchema).toBe('magic');
|
||||
expect(result.relationships[0].targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle GO statements and SQL Server specific syntax', async () => {
|
||||
const sql = `
|
||||
USE [MagicalRealm]
|
||||
GO
|
||||
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
|
||||
CREATE TABLE [dbo].[enchantments] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Power] [decimal](18, 2) NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
CONSTRAINT [PK_enchantments] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('enchantments');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
expect(
|
||||
result.tables[0].columns.find((c) => c.name === 'Power')?.type
|
||||
).toBe('decimal');
|
||||
});
|
||||
|
||||
it('should parse ALTER TABLE ADD CONSTRAINT for foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [calibration].[Calibration] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Average] [decimal](18, 2) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [calibration].[CalibrationProcess] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[CalibrationId] [uniqueidentifier] NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [calibration].[CalibrationProcess]
|
||||
ADD CONSTRAINT [FK_CalibrationProcess_Calibration]
|
||||
FOREIGN KEY ([CalibrationId])
|
||||
REFERENCES [calibration].[Calibration]([Id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('CalibrationProcess');
|
||||
expect(result.relationships[0].targetTable).toBe('Calibration');
|
||||
expect(result.relationships[0].name).toBe(
|
||||
'FK_CalibrationProcess_Calibration'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle multiple schemas from the test file', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA [magic]
|
||||
GO
|
||||
CREATE SCHEMA [artifacts]
|
||||
GO
|
||||
|
||||
CREATE TABLE [magic].[wizards] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [artifacts].[wands] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[WoodType] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_wands_wizards] FOREIGN KEY ([WizardId]) REFERENCES [magic].[wizards]([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.find((t) => t.schema === 'magic')).toBeDefined();
|
||||
expect(
|
||||
result.tables.find((t) => t.schema === 'artifacts')
|
||||
).toBeDefined();
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceSchema).toBe('artifacts');
|
||||
expect(result.relationships[0].targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle SQL Server data types correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spell_components] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Quantity] [int] NOT NULL,
|
||||
[Weight] [decimal](10, 2) NOT NULL,
|
||||
[IsPowerful] [bit] NOT NULL,
|
||||
[DiscoveredAt] [datetime2](7) NOT NULL,
|
||||
[Description] [nvarchar](max) NULL,
|
||||
[RarityLevel] [tinyint] NOT NULL,
|
||||
[MarketValue] [money] NOT NULL,
|
||||
[AlchemicalFormula] [xml] NULL,
|
||||
PRIMARY KEY ([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
|
||||
expect(columns.find((c) => c.name === 'Id')?.type).toBe(
|
||||
'uniqueidentifier'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'Name')?.type).toBe('nvarchar');
|
||||
expect(columns.find((c) => c.name === 'Quantity')?.type).toBe('int');
|
||||
expect(columns.find((c) => c.name === 'Weight')?.type).toBe('decimal');
|
||||
expect(columns.find((c) => c.name === 'IsPowerful')?.type).toBe('bit');
|
||||
expect(columns.find((c) => c.name === 'DiscoveredAt')?.type).toBe(
|
||||
'datetime2'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'Description')?.type).toBe(
|
||||
'nvarchar'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'RarityLevel')?.type).toBe(
|
||||
'tinyint'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'MarketValue')?.type).toBe(
|
||||
'money'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'AlchemicalFormula')?.type).toBe(
|
||||
'xml'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle IDENTITY columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[magical_creatures] (
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](100) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const idColumn = result.tables[0].columns.find((c) => c.name === 'Id');
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse composite primary keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spell_ingredients] (
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[IngredientId] [uniqueidentifier] NOT NULL,
|
||||
[Quantity] [int] NOT NULL,
|
||||
CONSTRAINT [PK_spell_ingredients] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[SpellId] ASC,
|
||||
[IngredientId] ASC
|
||||
)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
expect(table.columns.filter((c) => c.primaryKey)).toHaveLength(2);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'SpellId')?.primaryKey
|
||||
).toBe(true);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'IngredientId')?.primaryKey
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle unique constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[arcane_libraries] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Code] [nvarchar](50) NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [UQ_arcane_libraries_code] UNIQUE ([Code])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].indexes).toHaveLength(1);
|
||||
expect(result.tables[0].indexes[0].name).toBe(
|
||||
'UQ_arcane_libraries_code'
|
||||
);
|
||||
expect(result.tables[0].indexes[0].unique).toBe(true);
|
||||
expect(result.tables[0].indexes[0].columns).toContain('Code');
|
||||
});
|
||||
|
||||
it('should handle default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[potion_recipes] (
|
||||
[Id] [uniqueidentifier] NOT NULL DEFAULT NEWID(),
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[IsActive] [bit] NOT NULL DEFAULT 1,
|
||||
[CreatedAt] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
[Difficulty] [int] NOT NULL DEFAULT 5,
|
||||
PRIMARY KEY ([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
|
||||
expect(columns.find((c) => c.name === 'Id')?.default).toBeDefined();
|
||||
expect(columns.find((c) => c.name === 'IsActive')?.default).toBe('1');
|
||||
expect(
|
||||
columns.find((c) => c.name === 'CreatedAt')?.default
|
||||
).toBeDefined();
|
||||
expect(columns.find((c) => c.name === 'Difficulty')?.default).toBe('5');
|
||||
});
|
||||
|
||||
it('should parse indexes created separately', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[spell_books] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Title] [nvarchar](255) NOT NULL,
|
||||
[Author] [nvarchar](255) NOT NULL,
|
||||
[PublishedYear] [int] NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX [IX_spell_books_author] ON [dbo].[spell_books] ([Author]);
|
||||
CREATE UNIQUE INDEX [UIX_spell_books_title] ON [dbo].[spell_books] ([Title]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].indexes).toHaveLength(2);
|
||||
|
||||
const authorIndex = result.tables[0].indexes.find(
|
||||
(i) => i.name === 'IX_spell_books_author'
|
||||
);
|
||||
expect(authorIndex?.unique).toBe(false);
|
||||
expect(authorIndex?.columns).toContain('Author');
|
||||
|
||||
const titleIndex = result.tables[0].indexes.find(
|
||||
(i) => i.name === 'UIX_spell_books_title'
|
||||
);
|
||||
expect(titleIndex?.unique).toBe(true);
|
||||
expect(titleIndex?.columns).toContain('Title');
|
||||
});
|
||||
});
|
||||
@@ -1,478 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Real-World Examples', () => {
|
||||
describe('Magical Academy Example', () => {
|
||||
it('should parse the magical academy example with all 16 tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[schools](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE()
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[towers](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_towers_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[ranks](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_ranks_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[spell_permissions](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[spell_type] [nvarchar](255) NOT NULL,
|
||||
[casting_level] [nvarchar](255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[rank_spell_permissions](
|
||||
[rank_id] [uniqueidentifier] NOT NULL,
|
||||
[spell_permission_id] [uniqueidentifier] NOT NULL,
|
||||
PRIMARY KEY ([rank_id], [spell_permission_id]),
|
||||
CONSTRAINT [FK_rsp_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_rsp_permissions] FOREIGN KEY ([spell_permission_id]) REFERENCES [dbo].[spell_permissions]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[grimoire_types](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_grimoire_types_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizards](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[wizard_name] [nvarchar](255) NOT NULL,
|
||||
[email] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_wizards_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wizards_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_wizards_school_name] UNIQUE ([school_id], [wizard_name])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizard_ranks](
|
||||
[wizard_id] [uniqueidentifier] NOT NULL,
|
||||
[rank_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[assigned_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
PRIMARY KEY ([wizard_id], [rank_id], [tower_id]),
|
||||
CONSTRAINT [FK_wr_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wr_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wr_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[apprentices](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[first_name] [nvarchar](255) NOT NULL,
|
||||
[last_name] [nvarchar](255) NOT NULL,
|
||||
[enrollment_date] [date] NOT NULL,
|
||||
[primary_mentor] [uniqueidentifier] NULL,
|
||||
[sponsoring_wizard] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [FK_apprentices_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_apprentices_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_apprentices_mentor] FOREIGN KEY ([primary_mentor]) REFERENCES [dbo].[wizards]([id]),
|
||||
CONSTRAINT [FK_apprentices_sponsor] FOREIGN KEY ([sponsoring_wizard]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[spell_lessons](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[instructor_id] [uniqueidentifier] NOT NULL,
|
||||
[lesson_date] [datetime2](7) NOT NULL,
|
||||
CONSTRAINT [FK_sl_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_instructors] FOREIGN KEY ([instructor_id]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[grimoires](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[grimoire_type_id] [uniqueidentifier] NOT NULL,
|
||||
[author_wizard_id] [uniqueidentifier] NOT NULL,
|
||||
[content] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [FK_g_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_types] FOREIGN KEY ([grimoire_type_id]) REFERENCES [dbo].[grimoire_types]([id]),
|
||||
CONSTRAINT [FK_g_authors] FOREIGN KEY ([author_wizard_id]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[tuition_scrolls](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[total_amount] [decimal](10,2) NOT NULL,
|
||||
[status] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_ts_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ts_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ts_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[tuition_items](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[description] [nvarchar](max) NOT NULL,
|
||||
[amount] [decimal](10,2) NOT NULL,
|
||||
CONSTRAINT [FK_ti_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[patron_sponsorships](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[patron_house] [nvarchar](255) NOT NULL,
|
||||
[sponsorship_code] [nvarchar](50) NOT NULL,
|
||||
[status] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_ps_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[gold_payments](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[amount] [decimal](10,2) NOT NULL,
|
||||
[payment_date] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
CONSTRAINT [FK_gp_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[arcane_logs](
|
||||
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[school_id] [uniqueidentifier] NULL,
|
||||
[wizard_id] [uniqueidentifier] NULL,
|
||||
[tower_id] [uniqueidentifier] NULL,
|
||||
[table_name] [nvarchar](255) NOT NULL,
|
||||
[operation] [nvarchar](50) NOT NULL,
|
||||
[record_id] [uniqueidentifier] NULL,
|
||||
[changes] [nvarchar](max) NULL,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
CONSTRAINT [FK_al_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE SET NULL,
|
||||
CONSTRAINT [FK_al_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE SET NULL,
|
||||
CONSTRAINT [FK_al_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE SET NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'patron_sponsorships',
|
||||
'rank_spell_permissions',
|
||||
'ranks',
|
||||
'schools',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_items',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables).toHaveLength(16);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships exist
|
||||
const relationships = result.relationships;
|
||||
|
||||
// Check some critical relationships
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizard_ranks' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'wizard_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enchanted Bazaar Example', () => {
|
||||
it('should parse the enchanted bazaar example with complex features', async () => {
|
||||
const sql = `
|
||||
-- Enchanted Bazaar tables with complex features
|
||||
CREATE TABLE [dbo].[merchants](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[email] [nvarchar](255) NOT NULL,
|
||||
[created_at] [datetime] DEFAULT GETDATE(),
|
||||
CONSTRAINT [UQ_merchants_email] UNIQUE ([email])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[artifacts](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[merchant_id] [int] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[price] [decimal](10, 2) NOT NULL CHECK ([price] >= 0),
|
||||
[enchantment_charges] [int] DEFAULT 0 CHECK ([enchantment_charges] >= 0),
|
||||
CONSTRAINT [FK_artifacts_merchants] FOREIGN KEY ([merchant_id]) REFERENCES [dbo].[merchants]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[trades](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[created_at] [datetime] DEFAULT GETDATE(),
|
||||
[status] [varchar](50) DEFAULT 'negotiating'
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[trade_items](
|
||||
[trade_id] [int] NOT NULL,
|
||||
[artifact_id] [int] NOT NULL,
|
||||
[quantity] [int] NOT NULL CHECK ([quantity] > 0),
|
||||
[agreed_price] [decimal](10, 2) NOT NULL,
|
||||
PRIMARY KEY ([trade_id], [artifact_id]),
|
||||
CONSTRAINT [FK_ti_trades] FOREIGN KEY ([trade_id]) REFERENCES [dbo].[trades]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ti_artifacts] FOREIGN KEY ([artifact_id]) REFERENCES [dbo].[artifacts]([id])
|
||||
);
|
||||
|
||||
-- Create indexes
|
||||
CREATE INDEX [IX_artifacts_merchant_id] ON [dbo].[artifacts] ([merchant_id]);
|
||||
CREATE INDEX [IX_artifacts_price] ON [dbo].[artifacts] ([price] DESC);
|
||||
CREATE UNIQUE INDEX [UIX_artifacts_name_merchant] ON [dbo].[artifacts] ([name], [merchant_id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should parse all tables
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
// Check for specific tables
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('merchants');
|
||||
expect(tableNames).toContain('artifacts');
|
||||
expect(tableNames).toContain('trades');
|
||||
expect(tableNames).toContain('trade_items');
|
||||
|
||||
// Check relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'artifacts' &&
|
||||
r.targetTable === 'merchants'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'trade_items' &&
|
||||
r.targetTable === 'trades'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check indexes were created
|
||||
const artifactsTable = result.tables.find(
|
||||
(t) => t.name === 'artifacts'
|
||||
);
|
||||
expect(artifactsTable?.indexes.length).toBeGreaterThanOrEqual(2);
|
||||
expect(
|
||||
artifactsTable?.indexes.some(
|
||||
(i) => i.name === 'IX_artifacts_merchant_id'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
artifactsTable?.indexes.some(
|
||||
(i) => i.unique && i.name === 'UIX_artifacts_name_merchant'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex SQL Server Schema Example', () => {
|
||||
it('should parse complex multi-schema database with various SQL Server features', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA [magic];
|
||||
GO
|
||||
CREATE SCHEMA [inventory];
|
||||
GO
|
||||
CREATE SCHEMA [academy];
|
||||
GO
|
||||
|
||||
-- Magic schema tables
|
||||
CREATE TABLE [magic].[spell_categories](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
|
||||
[name] [nvarchar](100) NOT NULL,
|
||||
[description] [nvarchar](max) NULL,
|
||||
[is_forbidden] [bit] NOT NULL DEFAULT 0,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT SYSDATETIME()
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[spells](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
|
||||
[category_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](200) NOT NULL,
|
||||
[mana_cost] [smallint] NOT NULL CHECK ([mana_cost] > 0),
|
||||
[damage_output] [decimal](10,2) NULL,
|
||||
[cast_time_ms] [int] NOT NULL DEFAULT 1000,
|
||||
[is_active] [bit] NOT NULL DEFAULT 1,
|
||||
[metadata] [xml] NULL,
|
||||
CONSTRAINT [FK_spells_categories] FOREIGN KEY ([category_id])
|
||||
REFERENCES [magic].[spell_categories]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_spells_name] UNIQUE ([name])
|
||||
);
|
||||
|
||||
-- Inventory schema tables
|
||||
CREATE TABLE [inventory].[item_types](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[type_code] [char](3) NOT NULL UNIQUE,
|
||||
[type_name] [varchar](50) NOT NULL,
|
||||
[max_stack_size] [tinyint] NOT NULL DEFAULT 99
|
||||
);
|
||||
|
||||
CREATE TABLE [inventory].[magical_items](
|
||||
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[item_type_id] [int] NOT NULL,
|
||||
[item_name] [nvarchar](255) NOT NULL,
|
||||
[rarity] [varchar](20) NOT NULL,
|
||||
[weight_kg] [float] NOT NULL,
|
||||
[base_value] [money] NOT NULL,
|
||||
[enchantment_level] [tinyint] NULL CHECK ([enchantment_level] BETWEEN 0 AND 10),
|
||||
[discovered_date] [date] NULL,
|
||||
[discovered_time] [time](7) NULL,
|
||||
[full_discovered_at] [datetimeoffset](7) NULL,
|
||||
CONSTRAINT [FK_items_types] FOREIGN KEY ([item_type_id])
|
||||
REFERENCES [inventory].[item_types]([id])
|
||||
);
|
||||
|
||||
-- Academy schema tables
|
||||
CREATE TABLE [academy].[courses](
|
||||
[course_id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[course_code] [nvarchar](10) NOT NULL UNIQUE,
|
||||
[course_name] [nvarchar](200) NOT NULL,
|
||||
[credits] [decimal](3,1) NOT NULL,
|
||||
[prerequisite_spell_id] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [FK_courses_spells] FOREIGN KEY ([prerequisite_spell_id])
|
||||
REFERENCES [magic].[spells]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [academy].[enrollments](
|
||||
[enrollment_id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[student_id] [uniqueidentifier] NOT NULL,
|
||||
[course_id] [uniqueidentifier] NOT NULL,
|
||||
[enrollment_date] [datetime2](0) NOT NULL DEFAULT GETDATE(),
|
||||
[grade] [decimal](4,2) NULL CHECK ([grade] >= 0 AND [grade] <= 100),
|
||||
[completion_status] [nvarchar](20) NOT NULL DEFAULT 'enrolled',
|
||||
CONSTRAINT [FK_enrollments_courses] FOREIGN KEY ([course_id])
|
||||
REFERENCES [academy].[courses]([course_id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_enrollment] UNIQUE ([student_id], [course_id])
|
||||
);
|
||||
|
||||
-- Cross-schema relationships
|
||||
CREATE TABLE [inventory].[spell_reagents](
|
||||
[spell_id] [uniqueidentifier] NOT NULL,
|
||||
[item_id] [bigint] NOT NULL,
|
||||
[quantity_required] [smallint] NOT NULL DEFAULT 1,
|
||||
PRIMARY KEY ([spell_id], [item_id]),
|
||||
CONSTRAINT [FK_reagents_spells] FOREIGN KEY ([spell_id])
|
||||
REFERENCES [magic].[spells]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_reagents_items] FOREIGN KEY ([item_id])
|
||||
REFERENCES [inventory].[magical_items]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Additional indexes
|
||||
CREATE INDEX [IX_spells_category] ON [magic].[spells] ([category_id]);
|
||||
CREATE INDEX [IX_items_type_rarity] ON [inventory].[magical_items] ([item_type_id], [rarity]);
|
||||
CREATE UNIQUE INDEX [UIX_items_name_type] ON [inventory].[magical_items] ([item_name], [item_type_id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Verify all tables are parsed
|
||||
expect(result.tables).toHaveLength(7);
|
||||
|
||||
// Check schema assignment
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'magic')
|
||||
).toHaveLength(2);
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'inventory')
|
||||
).toHaveLength(3);
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'academy')
|
||||
).toHaveLength(2);
|
||||
|
||||
// Verify cross-schema relationships
|
||||
const crossSchemaRel = result.relationships.find(
|
||||
(r) => r.sourceTable === 'courses' && r.targetTable === 'spells'
|
||||
);
|
||||
expect(crossSchemaRel).toBeDefined();
|
||||
expect(crossSchemaRel?.sourceSchema).toBe('academy');
|
||||
expect(crossSchemaRel?.targetSchema).toBe('magic');
|
||||
|
||||
// Check various SQL Server data types
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(
|
||||
spellsTable?.columns.find((c) => c.name === 'mana_cost')?.type
|
||||
).toBe('smallint');
|
||||
expect(
|
||||
spellsTable?.columns.find((c) => c.name === 'metadata')?.type
|
||||
).toBe('xml');
|
||||
|
||||
const itemsTable = result.tables.find(
|
||||
(t) => t.name === 'magical_items'
|
||||
);
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'weight_kg')?.type
|
||||
).toBe('float');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'base_value')?.type
|
||||
).toBe('money');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'discovered_date')
|
||||
?.type
|
||||
).toBe('date');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'discovered_time')
|
||||
?.type
|
||||
).toBe('time');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'full_discovered_at')
|
||||
?.type
|
||||
).toBe('datetimeoffset');
|
||||
|
||||
// Verify IDENTITY columns
|
||||
const itemTypesTable = result.tables.find(
|
||||
(t) => t.name === 'item_types'
|
||||
);
|
||||
expect(
|
||||
itemTypesTable?.columns.find((c) => c.name === 'id')?.increment
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,675 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Fantasy Database Import Tests', () => {
|
||||
it('should parse the magical realm database correctly', async () => {
|
||||
// Fantasy-themed SQL Server database with multiple schemas
|
||||
const sql = `
|
||||
USE [MagicalRealmDB]
|
||||
GO
|
||||
/****** Object: Schema [spellcasting] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [spellcasting]
|
||||
GO
|
||||
/****** Object: Schema [enchantments] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [enchantments]
|
||||
GO
|
||||
/****** Object: Schema [artifacts] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [artifacts]
|
||||
GO
|
||||
/****** Object: Schema [wizards] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [wizards]
|
||||
GO
|
||||
|
||||
/****** Object: Table [spellcasting].[Spell] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [spellcasting].[Spell](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[UpdatedBy] [uniqueidentifier] NULL,
|
||||
[UpdatedAt] [datetime2](7) NULL,
|
||||
[PowerLevel] [decimal](18, 2) NOT NULL,
|
||||
[Incantation] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Description] [nvarchar](max) NOT NULL,
|
||||
[RunicInscription] [varchar](max) NULL,
|
||||
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [spellcasting].[SpellCasting] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [spellcasting].[SpellCasting](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[UpdatedBy] [uniqueidentifier] NULL,
|
||||
[UpdatedAt] [datetime2](7) NULL,
|
||||
[WizardLevel] [int] NOT NULL,
|
||||
[ManaCost] [decimal](18, 2) NOT NULL,
|
||||
[CastingTime] [decimal](18, 2) NULL,
|
||||
[Components] [nvarchar](max) NULL,
|
||||
[CastingNumber] [int] NULL,
|
||||
[SuccessRate] [decimal](18, 2) NULL,
|
||||
[CriticalChance] [decimal](18, 2) NULL,
|
||||
[ExtendedDuration] [decimal](18, 2) NULL,
|
||||
[Status] [int] NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[CastingNotes] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [PK_SpellCasting] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [enchantments].[MagicalItem] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [enchantments].[MagicalItem](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[WandId] [uniqueidentifier] NOT NULL,
|
||||
[EnchanterId] [uniqueidentifier] NOT NULL,
|
||||
[OrderNumber] [nvarchar](max) NOT NULL,
|
||||
[EnchantmentDate] [datetime2](7) NOT NULL,
|
||||
[IsCertified] [bit] NOT NULL,
|
||||
[CertificationCode] [nvarchar](max) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[EnchantmentLevel] [int] NOT NULL,
|
||||
CONSTRAINT [PK_MagicalItem] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [enchantments].[EnchantmentFormula] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [enchantments].[EnchantmentFormula](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[FormulaTypeId] [int] NOT NULL,
|
||||
[Definition] [nvarchar](max) NOT NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[HasMultipleApplications] [bit] NOT NULL,
|
||||
[StepNumber] [int] NOT NULL,
|
||||
[Identifier] [int] NOT NULL,
|
||||
CONSTRAINT [PK_EnchantmentFormula] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [wizards].[Wizard] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [wizards].[Wizard](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Title] [nvarchar](255) NULL,
|
||||
[Biography] [nvarchar](max) NULL,
|
||||
[SpecialtySchool] [nvarchar](100) NULL,
|
||||
[PowerLevel] [int] NOT NULL,
|
||||
[JoinedGuildDate] [datetime2](7) NOT NULL,
|
||||
[IsActive] [bit] NOT NULL,
|
||||
[MagicalSignature] [nvarchar](max) NOT NULL,
|
||||
[TowerId] [uniqueidentifier] NOT NULL,
|
||||
[MentorId] [uniqueidentifier] NULL,
|
||||
[SpellbookNotes] [varchar](max) NULL,
|
||||
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [AK_Wizard_HelpId] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[HelpId] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [wizards].[WizardSpellbook] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [wizards].[WizardSpellbook](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[SuccessRate] [decimal](18, 2) NOT NULL,
|
||||
[ManaCostReduction] [decimal](18, 2) NOT NULL,
|
||||
[CriticalBonus] [decimal](18, 2) NOT NULL,
|
||||
[PageNumber] [int] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[TowerId] [uniqueidentifier] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[EnchanterId] [uniqueidentifier] NOT NULL,
|
||||
[OrderNumber] [nvarchar](max) NOT NULL,
|
||||
[LearnedDate] [datetime2](7) NOT NULL,
|
||||
[IsMastered] [bit] NOT NULL,
|
||||
[MasteryCertificate] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_WizardSpellbook] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[MagicSchool] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[MagicSchool](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Value] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_MagicSchool] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[ArtifactType] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[ArtifactType](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Key] [nvarchar](max) NOT NULL,
|
||||
[ItemCategoryId] [int] NOT NULL,
|
||||
CONSTRAINT [PK_ArtifactType] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[AncientRelic] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[AncientRelic](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[DiscoveryDate] [datetime2](7) NULL,
|
||||
[VaultId] [uniqueidentifier] NULL,
|
||||
[AppraiserId] [uniqueidentifier] NULL,
|
||||
[NumberOfRunes] [int] NULL,
|
||||
[MagicalAura] [decimal](18, 2) NULL,
|
||||
[AuraReadingDeviceId] [uniqueidentifier] NULL,
|
||||
[PowerOutput] [decimal](18, 2) NULL,
|
||||
[PowerGaugeTypeId] [int] NULL,
|
||||
[AgeInCenturies] [decimal](18, 2) NULL,
|
||||
[CarbonDatingDeviceId] [uniqueidentifier] NULL,
|
||||
[HistoricalEra] [nvarchar](max) NULL,
|
||||
[EraVerificationMethod] [int] NULL,
|
||||
[Curse] [nvarchar](max) NULL,
|
||||
[CurseDetectorId] [uniqueidentifier] NULL,
|
||||
[CurseStrength] [decimal](18, 2) NULL,
|
||||
[ProtectionLevel] [int] NULL,
|
||||
[MagicalResonance] [decimal](18, 2) NULL,
|
||||
[ResonanceWithAdjustment] [decimal](18, 2) NULL,
|
||||
[AuthenticityVerified] [bit] NOT NULL,
|
||||
[VerificationWizardId] [uniqueidentifier] NULL,
|
||||
[RestorationNeeded] [bit] NOT NULL,
|
||||
[RestorationCost] [decimal](18, 2) NULL,
|
||||
[EstimatedValue] [decimal](18, 2) NULL,
|
||||
[MarketDemand] [decimal](18, 2) NULL,
|
||||
[ArtifactCatalogId] [uniqueidentifier] NULL,
|
||||
[OriginRealm] [nvarchar](max) NULL,
|
||||
[CreatorWizard] [nvarchar](max) NULL,
|
||||
[LegendaryStatus] [bit] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[IsSealed] [bit] NOT NULL,
|
||||
CONSTRAINT [PK_AncientRelic] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [AK_AncientRelic_HelpId] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[HelpId] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[RelicPowerMeasurements] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[RelicPowerMeasurements](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
|
||||
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
|
||||
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCorrection] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyCalculated] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCalculated] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCalculated] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCalculated] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyUncertainty] [decimal](31, 15) NULL,
|
||||
[AuraIntensityUncertainty] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyUncertainty] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxUncertainty] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyDrift] [decimal](31, 15) NULL,
|
||||
[AuraIntensityDrift] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyDrift] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxDrift] [decimal](31, 15) NULL,
|
||||
[AncientRelicId] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [PK_RelicPowerMeasurements] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Get unique schemas from parsed tables
|
||||
const foundSchemas = [
|
||||
...new Set(result.tables.map((t) => t.schema || 'dbo')),
|
||||
];
|
||||
|
||||
// Verify we found tables in multiple schemas
|
||||
expect(foundSchemas.length).toBeGreaterThan(1);
|
||||
expect(foundSchemas).toContain('spellcasting');
|
||||
expect(foundSchemas).toContain('enchantments');
|
||||
expect(foundSchemas).toContain('wizards');
|
||||
expect(foundSchemas).toContain('artifacts');
|
||||
|
||||
// Check for some specific tables we know should exist
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'SpellCasting' && t.schema === 'spellcasting'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'Wizard' && t.schema === 'wizards'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check data types are handled correctly
|
||||
const spellTable = result.tables.find(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
);
|
||||
expect(spellTable).toBeDefined();
|
||||
|
||||
if (spellTable) {
|
||||
expect(spellTable.columns.find((c) => c.name === 'Id')?.type).toBe(
|
||||
'uniqueidentifier'
|
||||
);
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'PowerLevel')?.type
|
||||
).toBe('decimal');
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'IsDeleted')?.type
|
||||
).toBe('bit');
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'CreatedAt')?.type
|
||||
).toBe('datetime2');
|
||||
|
||||
// Check nvarchar(max) fields
|
||||
const incantationField = spellTable.columns.find(
|
||||
(c) => c.name === 'Incantation'
|
||||
);
|
||||
expect(incantationField?.type).toBe('nvarchar');
|
||||
expect(incantationField?.typeArgs).toBe('max');
|
||||
|
||||
// Check varchar(max) fields
|
||||
const runicField = spellTable.columns.find(
|
||||
(c) => c.name === 'RunicInscription'
|
||||
);
|
||||
expect(runicField?.type).toBe('varchar');
|
||||
expect(runicField?.typeArgs).toBe('max');
|
||||
}
|
||||
|
||||
// Check IDENTITY columns
|
||||
const magicSchoolTable = result.tables.find(
|
||||
(t) => t.name === 'MagicSchool' && t.schema === 'artifacts'
|
||||
);
|
||||
expect(magicSchoolTable).toBeDefined();
|
||||
if (magicSchoolTable) {
|
||||
const idColumn = magicSchoolTable.columns.find(
|
||||
(c) => c.name === 'Id'
|
||||
);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
expect(idColumn?.type).toBe('int');
|
||||
}
|
||||
|
||||
// Check unique constraints converted to indexes
|
||||
const wizardTable = result.tables.find(
|
||||
(t) => t.name === 'Wizard' && t.schema === 'wizards'
|
||||
);
|
||||
expect(wizardTable).toBeDefined();
|
||||
if (wizardTable) {
|
||||
expect(wizardTable.indexes).toHaveLength(1);
|
||||
expect(wizardTable.indexes[0].unique).toBe(true);
|
||||
expect(wizardTable.indexes[0].columns).toContain('HelpId');
|
||||
expect(wizardTable.indexes[0].name).toBe('AK_Wizard_HelpId');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD CONSTRAINT statements for magical artifacts', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[MagicalArtifact] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [enchantments].[ArtifactEnchantment] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[PrimaryArtifactId] [uniqueidentifier] NOT NULL,
|
||||
[SecondaryArtifactId] [uniqueidentifier] NOT NULL,
|
||||
[EnchantmentStrength] [decimal](18, 2) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [enchantments].[ArtifactEnchantment]
|
||||
ADD CONSTRAINT [FK_ArtifactEnchantment_Primary]
|
||||
FOREIGN KEY ([PrimaryArtifactId])
|
||||
REFERENCES [artifacts].[MagicalArtifact]([Id]);
|
||||
|
||||
ALTER TABLE [enchantments].[ArtifactEnchantment]
|
||||
ADD CONSTRAINT [FK_ArtifactEnchantment_Secondary]
|
||||
FOREIGN KEY ([SecondaryArtifactId])
|
||||
REFERENCES [artifacts].[MagicalArtifact]([Id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check both foreign keys were parsed
|
||||
const primaryRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceColumn === 'PrimaryArtifactId' &&
|
||||
r.name === 'FK_ArtifactEnchantment_Primary'
|
||||
);
|
||||
expect(primaryRel).toBeDefined();
|
||||
expect(primaryRel?.sourceTable).toBe('ArtifactEnchantment');
|
||||
expect(primaryRel?.targetTable).toBe('MagicalArtifact');
|
||||
|
||||
const secondaryRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceColumn === 'SecondaryArtifactId' &&
|
||||
r.name === 'FK_ArtifactEnchantment_Secondary'
|
||||
);
|
||||
expect(secondaryRel).toBeDefined();
|
||||
expect(secondaryRel?.sourceTable).toBe('ArtifactEnchantment');
|
||||
expect(secondaryRel?.targetTable).toBe('MagicalArtifact');
|
||||
});
|
||||
|
||||
it('should handle tables with many columns including nvarchar(max)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [wizards].[SpellResearchEnvironment](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[ResearchDate] [datetime2](7) NULL,
|
||||
[LaboratoryId] [uniqueidentifier] NULL,
|
||||
[EvaluationCriteriaId] [uniqueidentifier] NULL,
|
||||
[NumberOfExperiments] [int] NULL,
|
||||
[ManaLevelStart] [decimal](18, 2) NULL,
|
||||
[ManaGaugeId] [uniqueidentifier] NULL,
|
||||
[ManaLevelEnd] [decimal](18, 2) NULL,
|
||||
[ManaGaugeTypeId] [int] NULL,
|
||||
[AetherDensityStart] [decimal](18, 2) NULL,
|
||||
[AetherGaugeId] [uniqueidentifier] NULL,
|
||||
[AetherDensityEnd] [decimal](18, 2) NULL,
|
||||
[AetherGaugeTypeId] [int] NULL,
|
||||
[MagicalFieldStart] [decimal](18, 2) NULL,
|
||||
[MagicalFieldGaugeId] [uniqueidentifier] NULL,
|
||||
[MagicalFieldEnd] [decimal](18, 2) NULL,
|
||||
[MagicalFieldGaugeTypeId] [int] NULL,
|
||||
[MagicalFieldWithCorrection] [decimal](18, 2) NULL,
|
||||
[AetherDensityWithCorrection] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceStart] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceGaugeId] [uniqueidentifier] NULL,
|
||||
[ElementalBalanceEnd] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceGaugeTypeId] [int] NULL,
|
||||
[ManaLevelWithCorrection] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceWithCorrection] [decimal](18, 2) NULL,
|
||||
[SpellResearchId] [uniqueidentifier] NULL,
|
||||
[AetherDensityValue] [decimal](18, 2) NULL,
|
||||
[MagicalFieldValue] [decimal](18, 2) NULL,
|
||||
[ManaLevelValue] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceValue] [decimal](18, 2) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[IsLocked] [bit] NOT NULL,
|
||||
CONSTRAINT [PK_SpellResearchEnvironment] PRIMARY KEY CLUSTERED ([Id] ASC),
|
||||
CONSTRAINT [AK_SpellResearchEnvironment_HelpId] UNIQUE NONCLUSTERED ([HelpId] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Should have all columns
|
||||
expect(table.columns.length).toBeGreaterThan(30);
|
||||
|
||||
// Check nvarchar(max) handling
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'CreatedByUsername')?.type
|
||||
).toBe('nvarchar');
|
||||
|
||||
// Check decimal precision handling
|
||||
const decimalColumn = table.columns.find(
|
||||
(c) => c.name === 'ManaLevelStart'
|
||||
);
|
||||
expect(decimalColumn?.type).toBe('decimal');
|
||||
expect(decimalColumn?.typeArgs).toEqual([18, 2]);
|
||||
|
||||
// Check unique constraint was converted to index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
expect(table.indexes[0].name).toBe(
|
||||
'AK_SpellResearchEnvironment_HelpId'
|
||||
);
|
||||
expect(table.indexes[0].unique).toBe(true);
|
||||
expect(table.indexes[0].columns).toContain('HelpId');
|
||||
});
|
||||
|
||||
it('should handle complex decimal types like decimal(31, 15)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[RelicPowerCalculatedValues](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
|
||||
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
|
||||
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCorrection] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
|
||||
CONSTRAINT [PK_RelicPowerCalculatedValues] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Check high precision decimal handling
|
||||
const magicalEnergyColumn = table.columns.find(
|
||||
(c) => c.name === 'MagicalEnergyMeasured'
|
||||
);
|
||||
expect(magicalEnergyColumn?.type).toBe('decimal');
|
||||
expect(magicalEnergyColumn?.typeArgs).toEqual([31, 15]);
|
||||
});
|
||||
|
||||
it('should handle IDENTITY columns in artifact lookup tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[SpellComponent](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[ComponentName] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_SpellComponent] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
|
||||
CREATE TABLE [artifacts].[RuneType](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Symbol] [nvarchar](max) NOT NULL,
|
||||
[MagicSchoolId] [int] NOT NULL,
|
||||
CONSTRAINT [PK_RuneType] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Both tables should have IDENTITY columns
|
||||
result.tables.forEach((table) => {
|
||||
const idColumn = table.columns.find((c) => c.name === 'Id');
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
expect(idColumn?.type).toBe('int');
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse all table constraints with complex WITH options', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[MagicalRegistry](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[RegistrationCode] [nvarchar](50) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL,
|
||||
CONSTRAINT [PK_MagicalRegistry] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [UQ_MagicalRegistry_Code] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[RegistrationCode] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Primary key should be set
|
||||
expect(table.columns.find((c) => c.name === 'Id')?.primaryKey).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Unique constraint should be converted to index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
expect(table.indexes[0].unique).toBe(true);
|
||||
expect(table.indexes[0].columns).toContain('RegistrationCode');
|
||||
});
|
||||
});
|
||||
@@ -1,253 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Foreign Key Relationship Tests', () => {
|
||||
it('should properly link foreign key relationships with correct table IDs', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[schools] (
|
||||
[id] [uniqueidentifier] PRIMARY KEY,
|
||||
[name] [nvarchar](100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[wizards] (
|
||||
[id] [uniqueidentifier] PRIMARY KEY,
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](100) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [magic].[wizards] WITH CHECK ADD CONSTRAINT [FK_wizards_schools]
|
||||
FOREIGN KEY ([school_id]) REFERENCES [magic].[schools]([id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Check tables are parsed
|
||||
expect(result.tables).toHaveLength(2);
|
||||
const schoolsTable = result.tables.find((t) => t.name === 'schools');
|
||||
const wizardsTable = result.tables.find((t) => t.name === 'wizards');
|
||||
expect(schoolsTable).toBeDefined();
|
||||
expect(wizardsTable).toBeDefined();
|
||||
|
||||
// Check relationship is parsed
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
const rel = result.relationships[0];
|
||||
|
||||
// Verify the relationship has proper table IDs
|
||||
expect(rel.sourceTableId).toBe(wizardsTable!.id);
|
||||
expect(rel.targetTableId).toBe(schoolsTable!.id);
|
||||
|
||||
// Verify other relationship properties
|
||||
expect(rel.sourceTable).toBe('wizards');
|
||||
expect(rel.targetTable).toBe('schools');
|
||||
expect(rel.sourceColumn).toBe('school_id');
|
||||
expect(rel.targetColumn).toBe('id');
|
||||
expect(rel.sourceSchema).toBe('magic');
|
||||
expect(rel.targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle cross-schema foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [users].[accounts] (
|
||||
[id] [int] PRIMARY KEY,
|
||||
[username] [nvarchar](50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [orders].[purchases] (
|
||||
[id] [int] PRIMARY KEY,
|
||||
[account_id] [int] NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [orders].[purchases] ADD CONSTRAINT [FK_purchases_accounts]
|
||||
FOREIGN KEY ([account_id]) REFERENCES [users].[accounts]([id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const rel = result.relationships[0];
|
||||
const accountsTable = result.tables.find(
|
||||
(t) => t.name === 'accounts' && t.schema === 'users'
|
||||
);
|
||||
const purchasesTable = result.tables.find(
|
||||
(t) => t.name === 'purchases' && t.schema === 'orders'
|
||||
);
|
||||
|
||||
// Verify cross-schema relationship IDs are properly linked
|
||||
expect(rel.sourceTableId).toBe(purchasesTable!.id);
|
||||
expect(rel.targetTableId).toBe(accountsTable!.id);
|
||||
});
|
||||
|
||||
it('should parse complex foreign keys from magical realm database with proper table IDs', async () => {
|
||||
// Fantasy-themed SQL with multiple schemas and relationships
|
||||
const sql = `
|
||||
-- Spell casting schema
|
||||
CREATE SCHEMA [spellcasting];
|
||||
GO
|
||||
|
||||
-- Create spell table
|
||||
CREATE TABLE [spellcasting].[Spell] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[School] [nvarchar](100) NOT NULL,
|
||||
[Level] [int] NOT NULL,
|
||||
[Description] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Create spell casting process table
|
||||
CREATE TABLE [spellcasting].[SpellCastingProcess] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[CastingDate] [datetime2](7) NOT NULL,
|
||||
[SuccessRate] [decimal](18, 2) NOT NULL,
|
||||
[ManaCost] [int] NOT NULL,
|
||||
[Notes] [nvarchar](max) NULL,
|
||||
CONSTRAINT [PK_SpellCastingProcess] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Wizards schema
|
||||
CREATE SCHEMA [wizards];
|
||||
GO
|
||||
|
||||
-- Create wizard table
|
||||
CREATE TABLE [wizards].[Wizard] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Title] [nvarchar](100) NULL,
|
||||
[Level] [int] NOT NULL,
|
||||
[Specialization] [nvarchar](100) NULL,
|
||||
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Create wizard apprentice table
|
||||
CREATE TABLE [wizards].[Apprentice] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[MentorId] [uniqueidentifier] NOT NULL,
|
||||
[StartDate] [datetime2](7) NOT NULL,
|
||||
[EndDate] [datetime2](7) NULL,
|
||||
CONSTRAINT [PK_Apprentice] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Add foreign key constraints
|
||||
ALTER TABLE [spellcasting].[SpellCastingProcess]
|
||||
ADD CONSTRAINT [FK_SpellCastingProcess_Spell]
|
||||
FOREIGN KEY ([SpellId])
|
||||
REFERENCES [spellcasting].[Spell]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [spellcasting].[SpellCastingProcess]
|
||||
ADD CONSTRAINT [FK_SpellCastingProcess_Wizard]
|
||||
FOREIGN KEY ([WizardId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [wizards].[Apprentice]
|
||||
ADD CONSTRAINT [FK_Apprentice_Wizard]
|
||||
FOREIGN KEY ([WizardId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [wizards].[Apprentice]
|
||||
ADD CONSTRAINT [FK_Apprentice_Mentor]
|
||||
FOREIGN KEY ([MentorId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Debug output
|
||||
console.log('Total tables:', result.tables.length);
|
||||
console.log('Total relationships:', result.relationships.length);
|
||||
|
||||
// Check if we have the expected number of tables and relationships
|
||||
expect(result.tables).toHaveLength(4);
|
||||
expect(result.relationships).toHaveLength(4);
|
||||
|
||||
// Check a specific relationship we know should exist
|
||||
const spellCastingRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellCastingProcess' &&
|
||||
r.targetTable === 'Spell' &&
|
||||
r.sourceColumn === 'SpellId'
|
||||
);
|
||||
|
||||
expect(spellCastingRel).toBeDefined();
|
||||
|
||||
if (spellCastingRel) {
|
||||
// Find the corresponding tables
|
||||
const spellTable = result.tables.find(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
);
|
||||
const spellCastingProcessTable = result.tables.find(
|
||||
(t) =>
|
||||
t.name === 'SpellCastingProcess' &&
|
||||
t.schema === 'spellcasting'
|
||||
);
|
||||
|
||||
console.log('SpellCastingProcess relationship:', {
|
||||
sourceTableId: spellCastingRel.sourceTableId,
|
||||
targetTableId: spellCastingRel.targetTableId,
|
||||
spellCastingProcessTableId: spellCastingProcessTable?.id,
|
||||
spellTableId: spellTable?.id,
|
||||
isSourceIdValid:
|
||||
spellCastingRel.sourceTableId ===
|
||||
spellCastingProcessTable?.id,
|
||||
isTargetIdValid:
|
||||
spellCastingRel.targetTableId === spellTable?.id,
|
||||
});
|
||||
|
||||
// Verify the IDs are properly linked
|
||||
expect(spellCastingRel.sourceTableId).toBeTruthy();
|
||||
expect(spellCastingRel.targetTableId).toBeTruthy();
|
||||
expect(spellCastingRel.sourceTableId).toBe(
|
||||
spellCastingProcessTable!.id
|
||||
);
|
||||
expect(spellCastingRel.targetTableId).toBe(spellTable!.id);
|
||||
}
|
||||
|
||||
// Check the apprentice self-referencing relationships
|
||||
const apprenticeWizardRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'Apprentice' &&
|
||||
r.targetTable === 'Wizard' &&
|
||||
r.sourceColumn === 'WizardId'
|
||||
);
|
||||
|
||||
const apprenticeMentorRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'Apprentice' &&
|
||||
r.targetTable === 'Wizard' &&
|
||||
r.sourceColumn === 'MentorId'
|
||||
);
|
||||
|
||||
expect(apprenticeWizardRel).toBeDefined();
|
||||
expect(apprenticeMentorRel).toBeDefined();
|
||||
|
||||
// Check that all relationships have valid table IDs
|
||||
const relationshipsWithMissingIds = result.relationships.filter(
|
||||
(r) =>
|
||||
!r.sourceTableId ||
|
||||
!r.targetTableId ||
|
||||
r.sourceTableId === '' ||
|
||||
r.targetTableId === ''
|
||||
);
|
||||
|
||||
if (relationshipsWithMissingIds.length > 0) {
|
||||
console.log(
|
||||
'Relationships with missing IDs:',
|
||||
relationshipsWithMissingIds.slice(0, 5)
|
||||
);
|
||||
}
|
||||
|
||||
expect(relationshipsWithMissingIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -1,198 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('SQL Server varchar(max) and nvarchar(max) preservation', () => {
|
||||
it('should preserve varchar(max) and nvarchar(max) in column definitions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[magical_texts] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Title] [nvarchar](255) NOT NULL,
|
||||
[Description] [nvarchar](max) NULL,
|
||||
[Content] [varchar](max) NOT NULL,
|
||||
[ShortNote] [varchar](100) NULL,
|
||||
[Metadata] [nvarchar](4000) NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
expect(table.columns).toHaveLength(6);
|
||||
|
||||
// Check that max is preserved in typeArgs
|
||||
const descriptionCol = table.columns.find(
|
||||
(c) => c.name === 'Description'
|
||||
);
|
||||
expect(descriptionCol).toBeDefined();
|
||||
expect(descriptionCol?.type).toBe('nvarchar');
|
||||
expect(descriptionCol?.typeArgs).toBe('max');
|
||||
|
||||
const contentCol = table.columns.find((c) => c.name === 'Content');
|
||||
expect(contentCol).toBeDefined();
|
||||
expect(contentCol?.type).toBe('varchar');
|
||||
expect(contentCol?.typeArgs).toBe('max');
|
||||
|
||||
// Check that numeric lengths are preserved as arrays
|
||||
const titleCol = table.columns.find((c) => c.name === 'Title');
|
||||
expect(titleCol).toBeDefined();
|
||||
expect(titleCol?.type).toBe('nvarchar');
|
||||
expect(titleCol?.typeArgs).toEqual([255]);
|
||||
|
||||
const shortNoteCol = table.columns.find((c) => c.name === 'ShortNote');
|
||||
expect(shortNoteCol).toBeDefined();
|
||||
expect(shortNoteCol?.type).toBe('varchar');
|
||||
expect(shortNoteCol?.typeArgs).toEqual([100]);
|
||||
});
|
||||
|
||||
it('should convert varchar(max) to characterMaximumLength field in diagram', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[spell_scrolls] (
|
||||
[Id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[SpellName] [nvarchar](50) NOT NULL,
|
||||
[Incantation] [nvarchar](max) NOT NULL,
|
||||
[Instructions] [varchar](max) NULL,
|
||||
[PowerLevel] [decimal](10, 2) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
const diagram = convertToChartDBDiagram(
|
||||
result,
|
||||
DatabaseType.SQL_SERVER,
|
||||
DatabaseType.SQL_SERVER
|
||||
);
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
expect(diagram.tables).toHaveLength(1);
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Check that 'max' is preserved in characterMaximumLength
|
||||
const incantationField = table.fields.find(
|
||||
(f) => f.name === 'Incantation'
|
||||
);
|
||||
expect(incantationField).toBeDefined();
|
||||
expect(incantationField?.characterMaximumLength).toBe('max');
|
||||
|
||||
const instructionsField = table.fields.find(
|
||||
(f) => f.name === 'Instructions'
|
||||
);
|
||||
expect(instructionsField).toBeDefined();
|
||||
expect(instructionsField?.characterMaximumLength).toBe('max');
|
||||
|
||||
// Check that numeric lengths are preserved
|
||||
const spellNameField = table.fields.find((f) => f.name === 'SpellName');
|
||||
expect(spellNameField).toBeDefined();
|
||||
expect(spellNameField?.characterMaximumLength).toBe('50');
|
||||
|
||||
// Check decimal precision/scale
|
||||
const powerLevelField = table.fields.find(
|
||||
(f) => f.name === 'PowerLevel'
|
||||
);
|
||||
expect(powerLevelField).toBeDefined();
|
||||
expect(powerLevelField?.precision).toBe(10);
|
||||
expect(powerLevelField?.scale).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle mixed varchar types with schema and relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [content].[authors] (
|
||||
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[Name] [nvarchar](100) NOT NULL,
|
||||
[Bio] [nvarchar](max) NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [content].[books] (
|
||||
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[AuthorId] [uniqueidentifier] NOT NULL,
|
||||
[Title] [nvarchar](500) NOT NULL,
|
||||
[Summary] [nvarchar](max) NULL,
|
||||
[FullText] [varchar](max) NOT NULL,
|
||||
[ISBN] [varchar](13) NULL,
|
||||
CONSTRAINT [FK_books_authors] FOREIGN KEY ([AuthorId]) REFERENCES [content].[authors]([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check authors table
|
||||
const authorsTable = result.tables.find((t) => t.name === 'authors');
|
||||
expect(authorsTable).toBeDefined();
|
||||
|
||||
const bioCol = authorsTable?.columns.find((c) => c.name === 'Bio');
|
||||
expect(bioCol?.typeArgs).toBe('max');
|
||||
|
||||
// Check books table
|
||||
const booksTable = result.tables.find((t) => t.name === 'books');
|
||||
expect(booksTable).toBeDefined();
|
||||
|
||||
const summaryCol = booksTable?.columns.find(
|
||||
(c) => c.name === 'Summary'
|
||||
);
|
||||
expect(summaryCol?.typeArgs).toBe('max');
|
||||
|
||||
const fullTextCol = booksTable?.columns.find(
|
||||
(c) => c.name === 'FullText'
|
||||
);
|
||||
expect(fullTextCol?.typeArgs).toBe('max');
|
||||
|
||||
const isbnCol = booksTable?.columns.find((c) => c.name === 'ISBN');
|
||||
expect(isbnCol?.typeArgs).toEqual([13]);
|
||||
|
||||
// Verify relationship is preserved
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('books');
|
||||
expect(result.relationships[0].targetTable).toBe('authors');
|
||||
});
|
||||
|
||||
it('should handle complex table with various SQL Server features including varchar(max)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [reporting].[wizard_performance](\
|
||||
[Id] [bigint] IDENTITY(1,1) NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[EvaluationDate] [datetime2](7) NOT NULL,
|
||||
[PerformanceScore] [decimal](5, 2) NOT NULL,
|
||||
[Comments] [nvarchar](max) NULL,
|
||||
[DetailedReport] [varchar](max) NULL,
|
||||
[Signature] [varbinary](max) NULL,
|
||||
[ReviewerNotes] [text] NULL,
|
||||
[IsActive] [bit] NOT NULL DEFAULT 1,
|
||||
CONSTRAINT [PK_wizard_performance] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY];
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Check varchar(max) columns
|
||||
const commentsCol = table.columns.find((c) => c.name === 'Comments');
|
||||
expect(commentsCol?.type).toBe('nvarchar');
|
||||
expect(commentsCol?.typeArgs).toBe('max');
|
||||
|
||||
const reportCol = table.columns.find(
|
||||
(c) => c.name === 'DetailedReport'
|
||||
);
|
||||
expect(reportCol?.type).toBe('varchar');
|
||||
expect(reportCol?.typeArgs).toBe('max');
|
||||
|
||||
// Note: varbinary(max) should also be preserved but might need special handling
|
||||
const signatureCol = table.columns.find((c) => c.name === 'Signature');
|
||||
expect(signatureCol?.type).toBe('varbinary');
|
||||
// varbinary(max) handling might differ
|
||||
|
||||
// Check other column types
|
||||
const scoreCol = table.columns.find(
|
||||
(c) => c.name === 'PerformanceScore'
|
||||
);
|
||||
expect(scoreCol?.typeArgs).toEqual([5, 2]);
|
||||
|
||||
const idCol = table.columns.find((c) => c.name === 'Id');
|
||||
expect(idCol?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -7,126 +7,111 @@ import type {
|
||||
SQLForeignKey,
|
||||
SQLASTNode,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
CreateIndexStatement,
|
||||
AlterTableStatement,
|
||||
} from './sqlserver-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
findTableWithSchemaSupport,
|
||||
} from './sqlserver-common';
|
||||
|
||||
/**
|
||||
* Helper function to safely build SQL from AST nodes, handling null/undefined/invalid cases
|
||||
*/
|
||||
function safelyBuildSQLFromAST(ast: unknown): string | undefined {
|
||||
if (!ast) return undefined;
|
||||
|
||||
// Make sure it's a valid AST node with a 'type' property
|
||||
if (typeof ast === 'object' && ast !== null && 'type' in ast) {
|
||||
return buildSQLFromAST(ast as SQLASTNode, DatabaseType.SQL_SERVER);
|
||||
}
|
||||
|
||||
// Return string representation for non-AST objects
|
||||
if (ast !== null && (typeof ast === 'string' || typeof ast === 'number')) {
|
||||
return String(ast);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocess SQL Server script to remove or modify parts that the parser can't handle
|
||||
*/
|
||||
function preprocessSQLServerScript(sqlContent: string): string {
|
||||
// 1. Remove USE statements
|
||||
sqlContent = sqlContent.replace(/USE\s+\[[^\]]+\]\s*;?/gi, '');
|
||||
|
||||
// 2. Remove SET statements
|
||||
sqlContent = sqlContent.replace(/SET\s+\w+\s+\w+\s*;?/gi, '');
|
||||
|
||||
// 3. Remove GO statements (batch separators)
|
||||
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
|
||||
|
||||
// 4. Remove CREATE SCHEMA statements
|
||||
sqlContent = sqlContent.replace(/CREATE\s+SCHEMA\s+\[[^\]]+\]\s*;?/gi, '');
|
||||
|
||||
// 5. Remove IF NOT EXISTS ... BEGIN ... END blocks
|
||||
// 1. Remove IF NOT EXISTS ... BEGIN ... END blocks (typically used for schema creation)
|
||||
sqlContent = sqlContent.replace(
|
||||
/IF\s+NOT\s+EXISTS\s*\([^)]+\)\s*BEGIN\s+[^;]+;\s*END;?/gi,
|
||||
''
|
||||
);
|
||||
|
||||
// 6. Remove any EXEC statements
|
||||
// 2. Remove any GO statements (batch separators)
|
||||
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
|
||||
|
||||
// 3. Remove any EXEC statements
|
||||
sqlContent = sqlContent.replace(/EXEC\s*\([^)]+\)\s*;?/gi, '');
|
||||
sqlContent = sqlContent.replace(/EXEC\s+[^;]+;/gi, '');
|
||||
|
||||
// 7. Replace any remaining procedural code blocks
|
||||
// 4. Replace any remaining procedural code blocks that might cause issues
|
||||
sqlContent = sqlContent.replace(
|
||||
/BEGIN\s+TRANSACTION|COMMIT\s+TRANSACTION|ROLLBACK\s+TRANSACTION/gi,
|
||||
'-- $&'
|
||||
);
|
||||
|
||||
// 8. Remove square brackets (SQL Server specific)
|
||||
sqlContent = sqlContent.replace(/\[/g, '');
|
||||
sqlContent = sqlContent.replace(/\]/g, '');
|
||||
// 5. Special handling for CREATE TABLE with reserved keywords as column names
|
||||
// Find CREATE TABLE statements
|
||||
const createTablePattern =
|
||||
/CREATE\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(([^;]*)\)/gi;
|
||||
|
||||
// 9. Remove ON PRIMARY and TEXTIMAGE_ON PRIMARY clauses
|
||||
sqlContent = sqlContent.replace(
|
||||
/ON\s+PRIMARY(\s+TEXTIMAGE_ON\s+PRIMARY)?/gi,
|
||||
''
|
||||
createTablePattern,
|
||||
(_, schema, tableName, columnDefs) => {
|
||||
// Process column definitions to rename problematic columns
|
||||
let processedColumnDefs = columnDefs;
|
||||
|
||||
// Replace any column named "column" with "column_name"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[column\]/gi,
|
||||
'[column_name]'
|
||||
);
|
||||
|
||||
// Replace any column named "int" with "int_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[int\]/gi,
|
||||
'[int_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "time" with "time_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[time\]/gi,
|
||||
'[time_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "order" with "order_column"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[order\]/gi,
|
||||
'[order_column]'
|
||||
);
|
||||
|
||||
// Rebuild the CREATE TABLE statement
|
||||
return `CREATE TABLE [${schema || 'dbo'}].[${tableName}] (${processedColumnDefs})`;
|
||||
}
|
||||
);
|
||||
|
||||
// 10. Remove WITH options from constraints
|
||||
sqlContent = sqlContent.replace(/WITH\s*\([^)]+\)/gi, '');
|
||||
|
||||
// 11. Handle default value expressions with functions
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+NEWID\(\)/gi, "DEFAULT 'newid'");
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+NEWSEQUENTIALID\(\)/gi,
|
||||
"DEFAULT 'newsequentialid'"
|
||||
);
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+GETDATE\(\)/gi,
|
||||
"DEFAULT 'getdate'"
|
||||
);
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+SYSDATETIME\(\)/gi,
|
||||
"DEFAULT 'sysdatetime'"
|
||||
);
|
||||
// Don't replace numeric defaults or simple values
|
||||
// 6. Handle default value expressions with functions - replace with simpler defaults
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
|
||||
// Only replace function calls in DEFAULT, not numeric literals
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+(\w+)\s*\([^)]*\)/gi,
|
||||
"DEFAULT '0'"
|
||||
);
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+\([^)]+\)/gi, 'DEFAULT 0');
|
||||
|
||||
// 12. Replace SQL Server specific data types with standard types
|
||||
// Note: We preserve varchar(max) and nvarchar(max) for accurate export
|
||||
sqlContent = sqlContent.replace(/\buniqueid\b/gi, 'uniqueidentifier'); // Fix common typo
|
||||
sqlContent = sqlContent.replace(
|
||||
/\bdatetime2\s*\(\s*\d+\s*\)/gi,
|
||||
'datetime2'
|
||||
);
|
||||
sqlContent = sqlContent.replace(/\btime\s*\(\s*\d+\s*\)/gi, 'time');
|
||||
sqlContent = sqlContent.replace(
|
||||
/\bdatetimeoffset\s*\(\s*\d+\s*\)/gi,
|
||||
'datetimeoffset'
|
||||
);
|
||||
|
||||
// 13. Handle IDENTITY columns - convert to a simpler format
|
||||
sqlContent = sqlContent.replace(
|
||||
/IDENTITY\s*\(\s*\d+\s*,\s*\d+\s*\)/gi,
|
||||
'AUTO_INCREMENT'
|
||||
);
|
||||
sqlContent = sqlContent.replace(/IDENTITY/gi, 'AUTO_INCREMENT');
|
||||
|
||||
// 14. Replace CHECK constraints with comments (parser doesn't handle well)
|
||||
sqlContent = sqlContent.replace(
|
||||
/CHECK\s*\([^)]+\)/gi,
|
||||
'/* CHECK CONSTRAINT */'
|
||||
);
|
||||
|
||||
// 15. Handle FOREIGN KEY constraints within CREATE TABLE
|
||||
// Convert inline foreign key syntax to be more parser-friendly
|
||||
sqlContent = sqlContent.replace(
|
||||
/(\w+)\s+(\w+(?:\s*\(\s*\d+(?:\s*,\s*\d+)?\s*\))?)\s+(?:NOT\s+NULL\s+)?FOREIGN\s+KEY\s+REFERENCES\s+(\w+)\.?(\w+)\s*\((\w+)\)/gi,
|
||||
'$1 $2 /* FK TO $3.$4($5) */'
|
||||
);
|
||||
|
||||
// Handle standalone FOREIGN KEY constraints
|
||||
sqlContent = sqlContent.replace(
|
||||
/CONSTRAINT\s+(\w+)\s+FOREIGN\s+KEY\s*\((\w+)\)\s+REFERENCES\s+(\w+)\.?(\w+)?\s*\((\w+)\)(?:\s+ON\s+DELETE\s+(\w+))?(?:\s+ON\s+UPDATE\s+(\w+))?/gi,
|
||||
'/* CONSTRAINT $1 FK($2) REF $3.$4($5) */'
|
||||
);
|
||||
|
||||
// 16. Split into individual statements to handle them separately
|
||||
// 7. Split into individual statements to handle them separately
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
@@ -135,27 +120,30 @@ function preprocessSQLServerScript(sqlContent: string): string {
|
||||
const filteredStatements = statements.filter((stmt) => {
|
||||
const trimmedStmt = stmt.trim().toUpperCase();
|
||||
return (
|
||||
trimmedStmt.includes('CREATE TABLE') ||
|
||||
trimmedStmt.includes('CREATE UNIQUE INDEX') ||
|
||||
trimmedStmt.includes('CREATE INDEX') ||
|
||||
trimmedStmt.includes('ALTER TABLE')
|
||||
trimmedStmt.startsWith('CREATE TABLE') ||
|
||||
trimmedStmt.startsWith('CREATE UNIQUE INDEX') ||
|
||||
trimmedStmt.startsWith('CREATE INDEX') ||
|
||||
trimmedStmt.startsWith('ALTER TABLE')
|
||||
);
|
||||
});
|
||||
|
||||
return filteredStatements.join(';\n') + ';';
|
||||
return filteredStatements.join(';') + ';';
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual parsing of ALTER TABLE ADD CONSTRAINT statements
|
||||
* This is a fallback for when the node-sql-parser fails to properly parse the constraints
|
||||
*/
|
||||
function parseAlterTableAddConstraint(statements: string[]): SQLForeignKey[] {
|
||||
function parseAlterTableAddConstraint(statements: string[]): {
|
||||
fkData: SQLForeignKey[];
|
||||
tableMap: Record<string, string>;
|
||||
} {
|
||||
const fkData: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {};
|
||||
|
||||
// Regular expressions to extract information from ALTER TABLE statements
|
||||
// Handle multi-line ALTER TABLE statements
|
||||
const alterTableRegex =
|
||||
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+(?:WITH\s+CHECK\s+)?ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s*REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/is;
|
||||
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s+REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/i;
|
||||
|
||||
for (const stmt of statements) {
|
||||
const match = stmt.match(alterTableRegex);
|
||||
@@ -171,6 +159,18 @@ function parseAlterTableAddConstraint(statements: string[]): SQLForeignKey[] {
|
||||
targetColumn,
|
||||
] = match;
|
||||
|
||||
// Generate IDs for tables if they don't already exist
|
||||
const sourceTableKey = `${sourceSchema}.${sourceTable}`;
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
|
||||
if (!tableMap[sourceTableKey]) {
|
||||
tableMap[sourceTableKey] = generateId();
|
||||
}
|
||||
|
||||
if (!tableMap[targetTableKey]) {
|
||||
tableMap[targetTableKey] = generateId();
|
||||
}
|
||||
|
||||
fkData.push({
|
||||
name: constraintName,
|
||||
sourceTable: sourceTable,
|
||||
@@ -179,13 +179,13 @@ function parseAlterTableAddConstraint(statements: string[]): SQLForeignKey[] {
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetColumn,
|
||||
sourceTableId: '', // Will be filled by linkRelationships
|
||||
targetTableId: '', // Will be filled by linkRelationships
|
||||
sourceTableId: tableMap[sourceTableKey],
|
||||
targetTableId: tableMap[targetTableKey],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return fkData;
|
||||
return { fkData, tableMap };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -267,239 +267,6 @@ function normalizeSQLServerDataType(dataType: string): string {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual parsing of CREATE TABLE statements when node-sql-parser fails
|
||||
*/
|
||||
function parseCreateTableManually(
|
||||
statement: string,
|
||||
tables: SQLTable[],
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
// Extract table name and schema (handling square brackets)
|
||||
const tableMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(/i
|
||||
);
|
||||
if (!tableMatch) return;
|
||||
|
||||
const [, schema = 'dbo', tableName] = tableMatch;
|
||||
|
||||
// Generate table ID
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schema}.${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Extract column definitions
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
// Find the content between the parentheses
|
||||
const tableContentMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+[^(]+\(([\s\S]*)\)\s*(?:ON\s+|$)/i
|
||||
);
|
||||
if (!tableContentMatch) return;
|
||||
|
||||
const tableContent = tableContentMatch[1];
|
||||
|
||||
// Split table content by commas but not within parentheses
|
||||
const parts = [];
|
||||
let current = '';
|
||||
let parenDepth = 0;
|
||||
|
||||
for (let i = 0; i < tableContent.length; i++) {
|
||||
const char = tableContent[i];
|
||||
if (char === '(') parenDepth++;
|
||||
else if (char === ')') parenDepth--;
|
||||
else if (char === ',' && parenDepth === 0) {
|
||||
parts.push(current.trim());
|
||||
current = '';
|
||||
continue;
|
||||
}
|
||||
current += char;
|
||||
}
|
||||
if (current.trim()) parts.push(current.trim());
|
||||
|
||||
// Process each part (column or constraint)
|
||||
for (const part of parts) {
|
||||
// Handle constraint definitions
|
||||
if (part.match(/^\s*CONSTRAINT/i)) {
|
||||
// Parse constraints
|
||||
const constraintMatch = part.match(
|
||||
/CONSTRAINT\s+\[?(\w+)\]?\s+(PRIMARY\s+KEY|UNIQUE|FOREIGN\s+KEY)/i
|
||||
);
|
||||
if (constraintMatch) {
|
||||
const [, constraintName, constraintType] = constraintMatch;
|
||||
|
||||
if (constraintType.match(/PRIMARY\s+KEY/i)) {
|
||||
// Extract columns from PRIMARY KEY constraint - handle multi-line format
|
||||
const pkColumnsMatch = part.match(
|
||||
/PRIMARY\s+KEY(?:\s+CLUSTERED)?\s*\(([\s\S]+?)\)/i
|
||||
);
|
||||
if (pkColumnsMatch) {
|
||||
const pkColumns = pkColumnsMatch[1]
|
||||
.split(',')
|
||||
.map((c) =>
|
||||
c
|
||||
.trim()
|
||||
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
|
||||
.trim()
|
||||
);
|
||||
pkColumns.forEach((col) => {
|
||||
const column = columns.find((c) => c.name === col);
|
||||
if (column) column.primaryKey = true;
|
||||
});
|
||||
}
|
||||
} else if (constraintType === 'UNIQUE') {
|
||||
// Extract columns from UNIQUE constraint
|
||||
const uniqueColumnsMatch = part.match(
|
||||
/UNIQUE(?:\s+NONCLUSTERED)?\s*\(([\s\S]+?)\)/i
|
||||
);
|
||||
if (uniqueColumnsMatch) {
|
||||
const uniqueColumns = uniqueColumnsMatch[1]
|
||||
.split(',')
|
||||
.map((c) =>
|
||||
c
|
||||
.trim()
|
||||
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
|
||||
.trim()
|
||||
);
|
||||
indexes.push({
|
||||
name: constraintName,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
} else if (constraintType.match(/FOREIGN\s+KEY/i)) {
|
||||
// Parse foreign key constraint
|
||||
const fkMatch = part.match(
|
||||
/FOREIGN\s+KEY\s*\(([^)]+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkMatch) {
|
||||
const [
|
||||
,
|
||||
sourceCol,
|
||||
targetSchema = 'dbo',
|
||||
targetTable,
|
||||
targetCol,
|
||||
] = fkMatch;
|
||||
relationships.push({
|
||||
name: constraintName,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: sourceCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse column definition - handle both numeric args and 'max'
|
||||
// Handle brackets around column names and types
|
||||
let columnMatch = part.match(
|
||||
/^\s*\[?(\w+)\]?\s+\[?(\w+)\]?(?:\s*\(\s*([\d,\s]+|max)\s*\))?(.*)$/i
|
||||
);
|
||||
|
||||
// If no match, try pattern for preprocessed types without parentheses
|
||||
if (!columnMatch) {
|
||||
columnMatch = part.match(/^\s*(\w+)\s+(\w+)\s+([\d,\s]+)\s+(.*)$/i);
|
||||
}
|
||||
|
||||
if (columnMatch) {
|
||||
const [, colName, baseType, typeArgs, rest] = columnMatch;
|
||||
|
||||
if (
|
||||
colName &&
|
||||
!colName.match(/^(PRIMARY|FOREIGN|UNIQUE|CHECK)$/i)
|
||||
) {
|
||||
// Check for inline foreign key
|
||||
const inlineFkMatch = rest.match(
|
||||
/FOREIGN\s+KEY\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (inlineFkMatch) {
|
||||
const [, targetSchema = 'dbo', targetTable, targetCol] =
|
||||
inlineFkMatch;
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${colName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: colName,
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetCol.trim().replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
|
||||
const isPrimaryKey = !!rest.match(/PRIMARY\s+KEY/i);
|
||||
const isNotNull = !!rest.match(/NOT\s+NULL/i);
|
||||
const isIdentity = !!rest.match(
|
||||
/IDENTITY(?:\s*\(\s*\d+\s*,\s*\d+\s*\))?/i
|
||||
);
|
||||
const isUnique = !!rest.match(/UNIQUE/i);
|
||||
const defaultMatch = rest.match(/DEFAULT\s+([^,]+)/i);
|
||||
|
||||
// Parse type arguments
|
||||
let parsedTypeArgs: number[] | string | undefined;
|
||||
if (typeArgs) {
|
||||
if (typeArgs.toLowerCase() === 'max') {
|
||||
// Preserve 'max' keyword for varchar/nvarchar types
|
||||
parsedTypeArgs = 'max';
|
||||
} else {
|
||||
// Parse numeric args
|
||||
parsedTypeArgs = typeArgs
|
||||
.split(',')
|
||||
.map((arg) => parseInt(arg.trim()));
|
||||
}
|
||||
}
|
||||
|
||||
const column: SQLColumn = {
|
||||
name: colName,
|
||||
type: normalizeSQLServerDataType(baseType.trim()),
|
||||
nullable: !isNotNull && !isPrimaryKey,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique,
|
||||
increment: isIdentity,
|
||||
default: defaultMatch ? defaultMatch[1].trim() : undefined,
|
||||
};
|
||||
|
||||
// Add type arguments if present
|
||||
if (parsedTypeArgs) {
|
||||
if (typeof parsedTypeArgs === 'string') {
|
||||
// For 'max' keyword
|
||||
column.typeArgs = parsedTypeArgs;
|
||||
} else if (parsedTypeArgs.length > 0) {
|
||||
// For numeric arguments
|
||||
column.typeArgs = parsedTypeArgs;
|
||||
}
|
||||
}
|
||||
|
||||
columns.push(column);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the table
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schema,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse SQL Server DDL scripts and extract database structure
|
||||
* @param sqlContent SQL Server DDL content as string
|
||||
@@ -513,130 +280,83 @@ export async function fromSQLServer(
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// First, handle ALTER TABLE statements for foreign keys
|
||||
// Split by GO or semicolon for SQL Server
|
||||
const statements = sqlContent
|
||||
.split(/(?:GO\s*$|;\s*$)/im)
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
// Preprocess the SQL content to handle T-SQL specific syntax
|
||||
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
|
||||
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
const alterTableStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().includes('ALTER TABLE') &&
|
||||
stmt.trim().toUpperCase().startsWith('ALTER TABLE') &&
|
||||
stmt.includes('FOREIGN KEY')
|
||||
);
|
||||
|
||||
if (alterTableStatements.length > 0) {
|
||||
const fkData = parseAlterTableAddConstraint(alterTableStatements);
|
||||
const { fkData, tableMap: fkTableMap } =
|
||||
parseAlterTableAddConstraint(alterTableStatements);
|
||||
|
||||
// Store table IDs from alter statements
|
||||
Object.assign(tableMap, fkTableMap);
|
||||
|
||||
// Store foreign key relationships for later processing
|
||||
relationships.push(...fkData);
|
||||
}
|
||||
|
||||
// Parse CREATE TABLE statements manually first
|
||||
const createTableStatements = statements.filter((stmt) =>
|
||||
stmt.trim().toUpperCase().includes('CREATE TABLE')
|
||||
);
|
||||
|
||||
for (const stmt of createTableStatements) {
|
||||
parseCreateTableManually(stmt, tables, tableMap, relationships);
|
||||
}
|
||||
|
||||
// Preprocess the SQL content for node-sql-parser
|
||||
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
|
||||
|
||||
// Try to use node-sql-parser for additional parsing
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
let ast;
|
||||
try {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
let ast;
|
||||
try {
|
||||
ast = parser.astify(preprocessedSQL, parserOpts);
|
||||
} catch {
|
||||
// Fallback: Try to parse each statement individually
|
||||
const statements = preprocessedSQL
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
ast = [];
|
||||
ast = parser.astify(preprocessedSQL, parserOpts);
|
||||
} catch {
|
||||
// Fallback: Try to parse each statement individually
|
||||
const statements = preprocessedSQL
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
ast = [];
|
||||
|
||||
for (const stmt of statements) {
|
||||
try {
|
||||
const stmtAst = parser.astify(stmt + ';', parserOpts);
|
||||
if (Array.isArray(stmtAst)) {
|
||||
ast.push(...stmtAst);
|
||||
} else if (stmtAst) {
|
||||
ast.push(stmtAst);
|
||||
}
|
||||
} catch {
|
||||
// Skip statements that can't be parsed
|
||||
for (const stmt of statements) {
|
||||
try {
|
||||
const stmtAst = parser.astify(stmt + ';', parserOpts);
|
||||
if (Array.isArray(stmtAst)) {
|
||||
ast.push(...stmtAst);
|
||||
} else if (stmtAst) {
|
||||
ast.push(stmtAst);
|
||||
}
|
||||
} catch {
|
||||
// Skip statements that can't be parsed
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(ast) && ast.length > 0) {
|
||||
// Process each statement
|
||||
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
|
||||
// Process CREATE INDEX statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndex(
|
||||
stmt as CreateIndexStatement,
|
||||
tables
|
||||
);
|
||||
}
|
||||
// Process ALTER TABLE statements for non-FK constraints
|
||||
else if (
|
||||
stmt.type === 'alter' &&
|
||||
stmt.keyword === 'table'
|
||||
) {
|
||||
processAlterTable(
|
||||
stmt as AlterTableStatement,
|
||||
tables,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (parserError) {
|
||||
// If parser fails completely, continue with manual parsing results
|
||||
console.warn(
|
||||
'node-sql-parser failed, using manual parsing only:',
|
||||
parserError
|
||||
);
|
||||
}
|
||||
|
||||
// Parse CREATE INDEX statements manually
|
||||
const createIndexStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().includes('CREATE') &&
|
||||
stmt.trim().toUpperCase().includes('INDEX')
|
||||
);
|
||||
if (!Array.isArray(ast) || ast.length === 0) {
|
||||
throw new Error('Failed to parse SQL DDL - Empty or invalid AST');
|
||||
}
|
||||
|
||||
for (const stmt of createIndexStatements) {
|
||||
const indexMatch = stmt.match(
|
||||
/CREATE\s+(UNIQUE\s+)?INDEX\s+\[?(\w+)\]?\s+ON\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (indexMatch) {
|
||||
const [
|
||||
,
|
||||
unique,
|
||||
indexName,
|
||||
schema = 'dbo',
|
||||
tableName,
|
||||
columnsStr,
|
||||
] = indexMatch;
|
||||
const table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === schema
|
||||
// Process each statement
|
||||
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
|
||||
// Process CREATE TABLE statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'table') {
|
||||
processCreateTable(
|
||||
stmt as CreateTableStatement,
|
||||
tables,
|
||||
tableMap,
|
||||
relationships
|
||||
);
|
||||
if (table) {
|
||||
const columns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim().replace(/\[|\]/g, ''));
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns,
|
||||
unique: !!unique,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Process CREATE INDEX statements
|
||||
else if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndex(stmt as CreateIndexStatement, tables);
|
||||
}
|
||||
// Process ALTER TABLE statements
|
||||
else if (stmt.type === 'alter' && stmt.keyword === 'table') {
|
||||
processAlterTable(
|
||||
stmt as AlterTableStatement,
|
||||
tables,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Link relationships to ensure all targetTableId and sourceTableId fields are filled
|
||||
const validRelationships = linkRelationships(
|
||||
@@ -659,6 +379,233 @@ export async function fromSQLServer(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE TABLE statement
|
||||
*/
|
||||
function processCreateTable(
|
||||
stmt: CreateTableStatement,
|
||||
tables: SQLTable[],
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
// Extract table name and schema
|
||||
if (stmt.table && typeof stmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
|
||||
const tableObj = stmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
// SQL Server uses 'schema' or 'db' field
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = stmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If no schema specified, use default 'dbo' schema for SQL Server
|
||||
if (!schemaName) {
|
||||
schemaName = 'dbo';
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Process table columns
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
if (stmt.create_definitions && Array.isArray(stmt.create_definitions)) {
|
||||
stmt.create_definitions.forEach(
|
||||
(def: ColumnDefinition | ConstraintDefinition) => {
|
||||
if (def.resource === 'column') {
|
||||
// Process column definition
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(columnDef.column);
|
||||
const rawDataType = columnDef.definition?.dataType || '';
|
||||
const normalizedDataType =
|
||||
normalizeSQLServerDataType(rawDataType);
|
||||
|
||||
if (columnName) {
|
||||
// Check for SQL Server specific column properties
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key === 'primary key';
|
||||
|
||||
// For SQL Server, check for IDENTITY property in suffixes
|
||||
const hasIdentity = columnDef.definition?.suffix?.some(
|
||||
(suffix) =>
|
||||
suffix.toLowerCase().includes('identity')
|
||||
);
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: normalizedDataType,
|
||||
nullable: columnDef.nullable?.type !== 'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(columnDef.definition),
|
||||
default: columnDef.default_val
|
||||
? safelyBuildSQLFromAST(columnDef.default_val)
|
||||
: undefined,
|
||||
increment: hasIdentity,
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
// Handle constraint definitions
|
||||
const constraintDef = def as ConstraintDefinition;
|
||||
|
||||
// Handle PRIMARY KEY constraints
|
||||
if (constraintDef.constraint_type === 'primary key') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
// Extract column names from primary key constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const pkColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
// Find and mark the column as primary key
|
||||
const column = columns.find(
|
||||
(col) => col.name === pkColumnName
|
||||
);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle UNIQUE constraints
|
||||
else if (constraintDef.constraint_type === 'unique') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
const uniqueColumns: string[] = [];
|
||||
// Extract column names from unique constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const uniqueColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
uniqueColumns.push(uniqueColumnName);
|
||||
}
|
||||
}
|
||||
|
||||
// Add as an index
|
||||
if (uniqueColumns.length > 0) {
|
||||
indexes.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`unique_${tableName}_${uniqueColumns.join('_')}`,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle FOREIGN KEY constraints
|
||||
else if (
|
||||
constraintDef.constraint_type === 'foreign key' &&
|
||||
constraintDef.reference
|
||||
) {
|
||||
const reference = constraintDef.reference;
|
||||
if (
|
||||
reference &&
|
||||
reference.table &&
|
||||
reference.columns &&
|
||||
reference.columns.length > 0
|
||||
) {
|
||||
// Extract target table info
|
||||
const targetTable =
|
||||
reference.table as TableReference;
|
||||
const targetTableName = targetTable.table;
|
||||
const targetSchemaName =
|
||||
targetTable.schema || targetTable.db || 'dbo';
|
||||
|
||||
// Extract source column
|
||||
let sourceColumnName = '';
|
||||
if (
|
||||
Array.isArray(constraintDef.definition) &&
|
||||
constraintDef.definition.length > 0
|
||||
) {
|
||||
const sourceColDef =
|
||||
constraintDef.definition[0];
|
||||
if (
|
||||
sourceColDef &&
|
||||
typeof sourceColDef === 'object' &&
|
||||
'type' in sourceColDef &&
|
||||
sourceColDef.type === 'column_ref'
|
||||
) {
|
||||
sourceColumnName = extractColumnName(
|
||||
sourceColDef as ColumnReference
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract target column
|
||||
const targetColumnName = extractColumnName(
|
||||
reference.columns[0]
|
||||
);
|
||||
|
||||
if (
|
||||
sourceColumnName &&
|
||||
targetTableName &&
|
||||
targetColumnName
|
||||
) {
|
||||
// Create a foreign key relationship
|
||||
relationships.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`fk_${tableName}_${sourceColumnName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn: sourceColumnName,
|
||||
targetTable: targetTableName,
|
||||
targetSchema: targetSchemaName,
|
||||
targetColumn: targetColumnName,
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
updateAction: reference.on_update,
|
||||
deleteAction: reference.on_delete,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create the table object
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE INDEX statement
|
||||
*/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@ import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { type DBField } from '@/lib/domain/db-field';
|
||||
import type { DBCustomType } from '@/lib/domain/db-custom-type';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
|
||||
// Use DBCustomType for generating Enum DBML
|
||||
const generateEnumsDBML = (customTypes: DBCustomType[] | undefined): string => {
|
||||
@@ -211,17 +210,14 @@ export const sanitizeSQLforDBML = (sql: string): string => {
|
||||
|
||||
// Post-process DBML to convert separate Ref statements to inline refs
|
||||
const convertToInlineRefs = (dbml: string): string => {
|
||||
// Extract all Ref statements - Updated pattern to handle schema.table.field format
|
||||
// Matches both "table"."field" and "schema"."table"."field" formats
|
||||
// Extract all Ref statements - Corrected pattern
|
||||
const refPattern =
|
||||
/Ref\s+"([^"]+)"\s*:\s*(?:"([^"]+)"\.)?"([^"]+)"\."([^"]+)"\s*([<>*])\s*(?:"([^"]+)"\.)?"([^"]+)"\."([^"]+)"/g;
|
||||
/Ref\s+"([^"]+)"\s*:\s*"([^"]+)"\."([^"]+)"\s*([<>*])\s*"([^"]+)"\."([^"]+)"/g;
|
||||
const refs: Array<{
|
||||
refName: string;
|
||||
sourceSchema?: string;
|
||||
sourceTable: string;
|
||||
sourceField: string;
|
||||
direction: string;
|
||||
targetSchema?: string;
|
||||
targetTable: string;
|
||||
targetField: string;
|
||||
}> = [];
|
||||
@@ -230,52 +226,27 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
while ((match = refPattern.exec(dbml)) !== null) {
|
||||
refs.push({
|
||||
refName: match[1], // Reference name
|
||||
sourceSchema: match[2] || undefined, // Source schema (optional)
|
||||
sourceTable: match[3], // Source table
|
||||
sourceField: match[4], // Source field
|
||||
direction: match[5], // Direction (<, >)
|
||||
targetSchema: match[6] || undefined, // Target schema (optional)
|
||||
targetTable: match[7], // Target table
|
||||
targetField: match[8], // Target field
|
||||
sourceTable: match[2], // Source table
|
||||
sourceField: match[3], // Source field
|
||||
direction: match[4], // Direction (<, >)
|
||||
targetTable: match[5], // Target table
|
||||
targetField: match[6], // Target field
|
||||
});
|
||||
}
|
||||
|
||||
// Extract all table definitions - Support both quoted and bracketed table names
|
||||
// Extract all table definitions - Corrected pattern and handling
|
||||
const tables: {
|
||||
[key: string]: {
|
||||
start: number;
|
||||
end: number;
|
||||
content: string;
|
||||
fullMatch: string;
|
||||
};
|
||||
[key: string]: { start: number; end: number; content: string };
|
||||
} = {};
|
||||
// Updated pattern to handle various table name formats including schema.table
|
||||
const tablePattern =
|
||||
/Table\s+(?:"([^"]+)"(?:\."([^"]+)")?|(\[?[^\s[]+\]?\.\[?[^\s\]]+\]?)|(\[?[^\s[{]+\]?))\s*{([^}]*)}/g;
|
||||
const tablePattern = /Table\s+"([^"]+)"\s*{([^}]*)}/g; // Simpler pattern, assuming content doesn't have {}
|
||||
|
||||
let tableMatch;
|
||||
while ((tableMatch = tablePattern.exec(dbml)) !== null) {
|
||||
// Extract table name - handle schema.table format
|
||||
let tableName;
|
||||
if (tableMatch[1] && tableMatch[2]) {
|
||||
// Format: "schema"."table"
|
||||
tableName = `${tableMatch[1]}.${tableMatch[2]}`;
|
||||
} else if (tableMatch[1]) {
|
||||
// Format: "table" (no schema)
|
||||
tableName = tableMatch[1];
|
||||
} else {
|
||||
// Other formats
|
||||
tableName = tableMatch[3] || tableMatch[4];
|
||||
}
|
||||
|
||||
// Clean up any bracket syntax from table names
|
||||
const cleanTableName = tableName.replace(/\[([^\]]+)\]/g, '$1');
|
||||
|
||||
tables[cleanTableName] = {
|
||||
const tableName = tableMatch[1];
|
||||
tables[tableName] = {
|
||||
start: tableMatch.index,
|
||||
end: tableMatch.index + tableMatch[0].length,
|
||||
content: tableMatch[5],
|
||||
fullMatch: tableMatch[0],
|
||||
content: tableMatch[2],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -286,102 +257,50 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
// Create a map for faster table lookup
|
||||
const tableMap = new Map(Object.entries(tables));
|
||||
|
||||
// 1. First, collect all refs per field
|
||||
const fieldRefs = new Map<
|
||||
string,
|
||||
{ table: string; refs: string[]; relatedTables: string[] }
|
||||
>();
|
||||
|
||||
// 1. Add inline refs to table contents
|
||||
refs.forEach((ref) => {
|
||||
let targetTableName, fieldNameToModify, inlineRefSyntax, relatedTable;
|
||||
let targetTableName, fieldNameToModify, inlineRefSyntax;
|
||||
|
||||
if (ref.direction === '<') {
|
||||
targetTableName = ref.targetSchema
|
||||
? `${ref.targetSchema}.${ref.targetTable}`
|
||||
: ref.targetTable;
|
||||
targetTableName = ref.targetTable;
|
||||
fieldNameToModify = ref.targetField;
|
||||
const sourceRef = ref.sourceSchema
|
||||
? `"${ref.sourceSchema}"."${ref.sourceTable}"."${ref.sourceField}"`
|
||||
: `"${ref.sourceTable}"."${ref.sourceField}"`;
|
||||
inlineRefSyntax = `ref: < ${sourceRef}`;
|
||||
relatedTable = ref.sourceTable;
|
||||
inlineRefSyntax = `[ref: < "${ref.sourceTable}"."${ref.sourceField}"]`;
|
||||
} else {
|
||||
targetTableName = ref.sourceSchema
|
||||
? `${ref.sourceSchema}.${ref.sourceTable}`
|
||||
: ref.sourceTable;
|
||||
targetTableName = ref.sourceTable;
|
||||
fieldNameToModify = ref.sourceField;
|
||||
const targetRef = ref.targetSchema
|
||||
? `"${ref.targetSchema}"."${ref.targetTable}"."${ref.targetField}"`
|
||||
: `"${ref.targetTable}"."${ref.targetField}"`;
|
||||
inlineRefSyntax = `ref: > ${targetRef}`;
|
||||
relatedTable = ref.targetTable;
|
||||
inlineRefSyntax = `[ref: > "${ref.targetTable}"."${ref.targetField}"]`;
|
||||
}
|
||||
|
||||
const fieldKey = `${targetTableName}.${fieldNameToModify}`;
|
||||
const existing = fieldRefs.get(fieldKey) || {
|
||||
table: targetTableName,
|
||||
refs: [],
|
||||
relatedTables: [],
|
||||
};
|
||||
existing.refs.push(inlineRefSyntax);
|
||||
existing.relatedTables.push(relatedTable);
|
||||
fieldRefs.set(fieldKey, existing);
|
||||
});
|
||||
|
||||
// 2. Apply all refs to fields
|
||||
fieldRefs.forEach((fieldData, fieldKey) => {
|
||||
// fieldKey might be "schema.table.field" or just "table.field"
|
||||
const lastDotIndex = fieldKey.lastIndexOf('.');
|
||||
const tableName = fieldKey.substring(0, lastDotIndex);
|
||||
const fieldName = fieldKey.substring(lastDotIndex + 1);
|
||||
const tableData = tableMap.get(tableName);
|
||||
|
||||
const tableData = tableMap.get(targetTableName);
|
||||
if (tableData) {
|
||||
// Updated pattern to capture field definition and all existing attributes in brackets
|
||||
const fieldPattern = new RegExp(
|
||||
`^([ \t]*"${fieldName}"[^\\n]*?)(?:\\s*(\\[[^\\]]*\\]))*\\s*(//.*)?$`,
|
||||
'gm'
|
||||
`("(${fieldNameToModify})"[^\n]*?)([ \t]*[[].*?[]])?([ \t]*//.*)?$`,
|
||||
'm'
|
||||
);
|
||||
let newContent = tableData.content;
|
||||
|
||||
newContent = newContent.replace(
|
||||
fieldPattern,
|
||||
(lineMatch, fieldPart, existingBrackets, commentPart) => {
|
||||
// Collect all attributes from existing brackets
|
||||
const allAttributes: string[] = [];
|
||||
if (existingBrackets) {
|
||||
// Extract all bracket contents
|
||||
const bracketPattern = /\[([^\]]*)\]/g;
|
||||
let bracketMatch;
|
||||
while (
|
||||
(bracketMatch = bracketPattern.exec(lineMatch)) !==
|
||||
null
|
||||
) {
|
||||
const content = bracketMatch[1].trim();
|
||||
if (content) {
|
||||
allAttributes.push(content);
|
||||
}
|
||||
}
|
||||
(
|
||||
lineMatch,
|
||||
fieldPart,
|
||||
_fieldName,
|
||||
existingAttributes,
|
||||
commentPart
|
||||
) => {
|
||||
// Avoid adding duplicate refs
|
||||
if (lineMatch.includes('[ref:')) {
|
||||
return lineMatch;
|
||||
}
|
||||
|
||||
// Add all refs for this field
|
||||
allAttributes.push(...fieldData.refs);
|
||||
|
||||
// Combine all attributes into a single bracket
|
||||
const combinedAttributes = allAttributes.join(', ');
|
||||
|
||||
// Preserve original spacing from fieldPart
|
||||
const leadingSpaces = fieldPart.match(/^(\s*)/)?.[1] || '';
|
||||
const fieldDefWithoutSpaces = fieldPart.trim();
|
||||
|
||||
return `${leadingSpaces}${fieldDefWithoutSpaces} [${combinedAttributes}]${commentPart || ''}`;
|
||||
return `${fieldPart.trim()} ${inlineRefSyntax}${existingAttributes || ''}${commentPart || ''}`;
|
||||
}
|
||||
);
|
||||
|
||||
// Update the table content if modified
|
||||
if (newContent !== tableData.content) {
|
||||
tableData.content = newContent;
|
||||
tableMap.set(tableName, tableData);
|
||||
tableMap.set(targetTableName, tableData);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -393,24 +312,9 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
([, a], [, b]) => a.start - b.start
|
||||
);
|
||||
|
||||
for (const [, tableData] of sortedTables) {
|
||||
for (const [tableName, tableData] of sortedTables) {
|
||||
reconstructedDbml += dbml.substring(lastIndex, tableData.start);
|
||||
// Preserve the original table definition format but with updated content
|
||||
const originalTableDef = tableData.fullMatch;
|
||||
|
||||
// Ensure the content ends with proper whitespace before the closing brace
|
||||
let content = tableData.content;
|
||||
// Check if content ends with a field that has inline refs
|
||||
if (content.match(/\[.*ref:.*\]\s*$/)) {
|
||||
// Ensure there's a newline before the closing brace
|
||||
content = content.trimEnd() + '\n';
|
||||
}
|
||||
|
||||
const updatedTableDef = originalTableDef.replace(
|
||||
/{[^}]*}/,
|
||||
`{${content}}`
|
||||
);
|
||||
reconstructedDbml += updatedTableDef;
|
||||
reconstructedDbml += `Table "${tableName}" {${tableData.content}}`;
|
||||
lastIndex = tableData.end;
|
||||
}
|
||||
reconstructedDbml += dbml.substring(lastIndex);
|
||||
@@ -421,10 +325,7 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
.filter((line) => !line.trim().startsWith('Ref '));
|
||||
const finalDbml = finalLines.join('\n').trim();
|
||||
|
||||
// Clean up excessive empty lines - replace multiple consecutive empty lines with just one
|
||||
const cleanedDbml = finalDbml.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
|
||||
return cleanedDbml;
|
||||
return finalDbml;
|
||||
};
|
||||
|
||||
// Function to check for SQL keywords (add more if needed)
|
||||
@@ -509,131 +410,6 @@ const normalizeCharTypeFormat = (dbml: string): string => {
|
||||
.replace(/character \(([0-9]+)\)/g, 'character($1)');
|
||||
};
|
||||
|
||||
// Fix table definitions with incorrect bracket syntax
|
||||
const fixTableBracketSyntax = (dbml: string): string => {
|
||||
// Fix patterns like Table [schema].[table] to Table "schema"."table"
|
||||
return dbml.replace(
|
||||
/Table\s+\[([^\]]+)\]\.\[([^\]]+)\]/g,
|
||||
'Table "$1"."$2"'
|
||||
);
|
||||
};
|
||||
|
||||
// Restore schema information that may have been stripped by the DBML importer
|
||||
const restoreTableSchemas = (dbml: string, diagram: Diagram): string => {
|
||||
if (!diagram.tables) return dbml;
|
||||
|
||||
// Group tables by name to handle duplicates
|
||||
const tablesByName = new Map<
|
||||
string,
|
||||
Array<{ table: (typeof diagram.tables)[0]; index: number }>
|
||||
>();
|
||||
diagram.tables.forEach((table, index) => {
|
||||
const existing = tablesByName.get(table.name) || [];
|
||||
existing.push({ table, index });
|
||||
tablesByName.set(table.name, existing);
|
||||
});
|
||||
|
||||
let result = dbml;
|
||||
|
||||
// Process each group of tables with the same name
|
||||
tablesByName.forEach((tablesGroup, tableName) => {
|
||||
if (tablesGroup.length === 1) {
|
||||
// Single table with this name - simple case
|
||||
const table = tablesGroup[0].table;
|
||||
if (table.schema) {
|
||||
// Match table definition without schema (e.g., Table "users" {)
|
||||
const tablePattern = new RegExp(
|
||||
`Table\\s+"${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
const schemaTableName = `Table "${table.schema}"."${table.name}" {`;
|
||||
result = result.replace(tablePattern, schemaTableName);
|
||||
|
||||
// Update references in Ref statements
|
||||
const escapedTableName = table.name.replace(
|
||||
/[.*+?^${}()|[\]\\]/g,
|
||||
'\\$&'
|
||||
);
|
||||
|
||||
// Pattern 1: In Ref definitions - :"tablename"."field"
|
||||
const refDefPattern = new RegExp(
|
||||
`(Ref\\s+"[^"]+")\\s*:\\s*"${escapedTableName}"\\."([^"]+)"`,
|
||||
'g'
|
||||
);
|
||||
result = result.replace(
|
||||
refDefPattern,
|
||||
`$1:"${table.schema}"."${table.name}"."$2"`
|
||||
);
|
||||
|
||||
// Pattern 2: In Ref targets - [<>] "tablename"."field"
|
||||
const refTargetPattern = new RegExp(
|
||||
`([<>])\\s*"${escapedTableName}"\\."([^"]+)"`,
|
||||
'g'
|
||||
);
|
||||
result = result.replace(
|
||||
refTargetPattern,
|
||||
`$1 "${table.schema}"."${table.name}"."$2"`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Multiple tables with the same name - need to be more careful
|
||||
const defaultSchema = defaultSchemas[diagram.databaseType];
|
||||
|
||||
// Separate tables by whether they have the default schema or not
|
||||
const defaultSchemaTable = tablesGroup.find(
|
||||
({ table }) => table.schema === defaultSchema
|
||||
);
|
||||
const nonDefaultSchemaTables = tablesGroup.filter(
|
||||
({ table }) => table.schema && table.schema !== defaultSchema
|
||||
);
|
||||
|
||||
// Find all table definitions for this name
|
||||
const escapedTableName = tableName.replace(
|
||||
/[.*+?^${}()|[\]\\]/g,
|
||||
'\\$&'
|
||||
);
|
||||
|
||||
// First, handle tables that already have schema in DBML
|
||||
const schemaTablePattern = new RegExp(
|
||||
`Table\\s+"[^"]+"\\.\\s*"${escapedTableName}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
result = result.replace(schemaTablePattern, (match) => {
|
||||
// This table already has a schema, keep it as is
|
||||
return match;
|
||||
});
|
||||
|
||||
// Then handle tables without schema in DBML
|
||||
const noSchemaTablePattern = new RegExp(
|
||||
`Table\\s+"${escapedTableName}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
|
||||
let noSchemaMatchIndex = 0;
|
||||
result = result.replace(noSchemaTablePattern, (match) => {
|
||||
// If we have a table with the default schema and this is the first match without schema,
|
||||
// it should be the default schema table
|
||||
if (noSchemaMatchIndex === 0 && defaultSchemaTable) {
|
||||
noSchemaMatchIndex++;
|
||||
return `Table "${defaultSchema}"."${tableName}" {`;
|
||||
}
|
||||
// Otherwise, try to match with non-default schema tables
|
||||
const remainingNonDefault =
|
||||
nonDefaultSchemaTables[
|
||||
noSchemaMatchIndex - (defaultSchemaTable ? 1 : 0)
|
||||
];
|
||||
if (remainingNonDefault) {
|
||||
noSchemaMatchIndex++;
|
||||
return `Table "${remainingNonDefault.table.schema}"."${tableName}" {`;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export interface DBMLExportResult {
|
||||
standardDbml: string;
|
||||
inlineDbml: string;
|
||||
@@ -653,18 +429,13 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
};
|
||||
}) ?? [];
|
||||
|
||||
// Remove duplicate tables (consider both schema and table name)
|
||||
const seenTableIdentifiers = new Set<string>();
|
||||
// Remove duplicate tables (keep first occurrence by table name)
|
||||
const seenTableNames = new Set<string>();
|
||||
const uniqueTables = sanitizedTables.filter((table) => {
|
||||
// Create a unique identifier combining schema and table name
|
||||
const tableIdentifier = table.schema
|
||||
? `${table.schema}.${table.name}`
|
||||
: table.name;
|
||||
|
||||
if (seenTableIdentifiers.has(tableIdentifier)) {
|
||||
if (seenTableNames.has(table.name)) {
|
||||
return false; // Skip duplicate
|
||||
}
|
||||
seenTableIdentifiers.add(tableIdentifier);
|
||||
seenTableNames.add(table.name);
|
||||
return true; // Keep unique table
|
||||
});
|
||||
|
||||
@@ -712,58 +483,43 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
|
||||
const processTable = (table: DBTable) => {
|
||||
const originalName = table.name;
|
||||
let safeTableName = originalName;
|
||||
|
||||
// If name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(originalName)) {
|
||||
safeTableName = `"${originalName.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
let safeTableName = originalName.replace(/[^\w]/g, '_');
|
||||
|
||||
// Rename table if SQL keyword (PostgreSQL only)
|
||||
if (shouldRenameKeywords && isSQLKeyword(originalName)) {
|
||||
const newName = `${originalName}_table`;
|
||||
if (shouldRenameKeywords && isSQLKeyword(safeTableName)) {
|
||||
const newName = `${safeTableName}_table`;
|
||||
sqlRenamedTables.set(newName, originalName);
|
||||
safeTableName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
safeTableName = newName;
|
||||
}
|
||||
|
||||
const fieldNameCounts = new Map<string, number>();
|
||||
const processedFields = table.fields.map((field) => {
|
||||
let finalSafeName = field.name;
|
||||
|
||||
// If field name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(field.name)) {
|
||||
finalSafeName = `"${field.name.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
const originalSafeName = field.name.replace(/[^\w]/g, '_');
|
||||
let finalSafeName = originalSafeName;
|
||||
|
||||
// Handle duplicate field names
|
||||
const count = fieldNameCounts.get(field.name) || 0;
|
||||
const count = fieldNameCounts.get(originalSafeName) || 0;
|
||||
if (count > 0) {
|
||||
const newName = `${field.name}_${count + 1}`;
|
||||
finalSafeName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
finalSafeName = `${originalSafeName}_${count + 1}`;
|
||||
}
|
||||
fieldNameCounts.set(field.name, count + 1);
|
||||
fieldNameCounts.set(originalSafeName, count + 1);
|
||||
|
||||
// Create sanitized field
|
||||
const sanitizedField: DBField = {
|
||||
...field,
|
||||
name: finalSafeName,
|
||||
};
|
||||
delete sanitizedField.comments;
|
||||
|
||||
// Rename field if SQL keyword (PostgreSQL only)
|
||||
if (shouldRenameKeywords && isSQLKeyword(field.name)) {
|
||||
const newFieldName = `${field.name}_field`;
|
||||
if (shouldRenameKeywords && isSQLKeyword(finalSafeName)) {
|
||||
const newFieldName = `${finalSafeName}_field`;
|
||||
fieldRenames.push({
|
||||
table: safeTableName,
|
||||
originalName: field.name,
|
||||
originalName: finalSafeName,
|
||||
newName: newFieldName,
|
||||
});
|
||||
sanitizedField.name = /[^\w]/.test(newFieldName)
|
||||
? `"${newFieldName.replace(/"/g, '\\"')}"`
|
||||
: newFieldName;
|
||||
sanitizedField.name = newFieldName;
|
||||
}
|
||||
|
||||
return sanitizedField;
|
||||
@@ -776,9 +532,7 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
indexes: (table.indexes || []).map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
? index.name.replace(/[^\w]/g, '_')
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
};
|
||||
@@ -788,15 +542,10 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
...cleanDiagram,
|
||||
tables: cleanDiagram.tables?.map(processTable) ?? [],
|
||||
relationships:
|
||||
cleanDiagram.relationships?.map((rel, index) => {
|
||||
const safeName = rel.name
|
||||
? rel.name.replace(/[^\w]/g, '_')
|
||||
: Math.random().toString(36).substring(2, 8);
|
||||
return {
|
||||
...rel,
|
||||
name: `fk_${index}_${safeName}`,
|
||||
};
|
||||
}) ?? [],
|
||||
cleanDiagram.relationships?.map((rel, index) => ({
|
||||
...rel,
|
||||
name: `fk_${index}_${rel.name ? rel.name.replace(/[^\w]/g, '_') : Math.random().toString(36).substring(2, 8)}`,
|
||||
})) ?? [],
|
||||
} as Diagram);
|
||||
|
||||
let standard = '';
|
||||
@@ -828,27 +577,16 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
}
|
||||
|
||||
standard = normalizeCharTypeFormat(
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
)
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
)
|
||||
);
|
||||
|
||||
// Restore schema information that may have been stripped by DBML importer
|
||||
standard = restoreTableSchemas(standard, diagram);
|
||||
|
||||
// Prepend Enum DBML to the standard output
|
||||
if (enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
}
|
||||
standard = enumsDBML + '\n' + standard;
|
||||
|
||||
inline = normalizeCharTypeFormat(convertToInlineRefs(standard));
|
||||
|
||||
// Clean up excessive empty lines in both outputs
|
||||
standard = standard.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
inline = inline.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
'Error during DBML generation process:',
|
||||
@@ -864,11 +602,11 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
|
||||
// If an error occurred, still prepend enums if they exist, or they'll be lost.
|
||||
// The error message will then follow.
|
||||
if (standard.startsWith('// Error generating DBML:') && enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
if (standard.startsWith('// Error generating DBML:')) {
|
||||
standard = enumsDBML + standard;
|
||||
}
|
||||
if (inline.startsWith('// Error generating DBML:') && enumsDBML) {
|
||||
inline = enumsDBML + '\n\n' + inline;
|
||||
if (inline.startsWith('// Error generating DBML:')) {
|
||||
inline = enumsDBML + inline;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -695,34 +695,13 @@ Note dragon_note {
|
||||
// Check that table header color was removed
|
||||
expect(hoardsTable).toBeDefined();
|
||||
|
||||
// Verify all indexes are imported correctly
|
||||
expect(hoardsTable?.indexes).toHaveLength(3); // Should have 3 indexes as defined in DBML
|
||||
|
||||
// Verify named indexes
|
||||
const uniqueDragonIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_unique_dragon'
|
||||
);
|
||||
expect(uniqueDragonIndex).toBeDefined();
|
||||
expect(uniqueDragonIndex?.name).toBe('idx_unique_dragon'); // Verify exact name from DBML
|
||||
expect(uniqueDragonIndex?.unique).toBe(true);
|
||||
expect(uniqueDragonIndex?.fieldIds).toHaveLength(1);
|
||||
|
||||
const hoardValueIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_hoard_value'
|
||||
);
|
||||
expect(hoardValueIndex).toBeDefined();
|
||||
expect(hoardValueIndex?.name).toBe('idx_hoard_value'); // Verify exact name from DBML
|
||||
expect(hoardValueIndex?.unique).toBe(false);
|
||||
expect(hoardValueIndex?.fieldIds).toHaveLength(1);
|
||||
|
||||
const dragonActiveIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_dragon_active'
|
||||
);
|
||||
expect(dragonActiveIndex).toBeDefined();
|
||||
expect(dragonActiveIndex?.name).toBe('idx_dragon_active'); // Verify exact name from DBML
|
||||
expect(dragonActiveIndex?.unique).toBe(false);
|
||||
expect(dragonActiveIndex?.fieldIds).toHaveLength(2);
|
||||
|
||||
// Check relationship
|
||||
expect(diagram.relationships).toHaveLength(1);
|
||||
const relationship = diagram.relationships?.[0];
|
||||
@@ -762,263 +741,5 @@ Table empty_table {
|
||||
expect(diagram.tables?.[0]?.fields).toHaveLength(1);
|
||||
expect(diagram.tables?.[0]?.name).toBe('empty_table');
|
||||
});
|
||||
|
||||
it('should import tables with same name but different schemas', async () => {
|
||||
const dbml = `
|
||||
Table "aa"."users" {
|
||||
id integer [primary key]
|
||||
}
|
||||
|
||||
Table "bb"."users" {
|
||||
id integer [primary key]
|
||||
}`;
|
||||
const diagram = await importDBMLToDiagram(dbml);
|
||||
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
|
||||
const aaUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'aa'
|
||||
);
|
||||
const bbUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'bb'
|
||||
);
|
||||
|
||||
expect(aaUsersTable).toBeDefined();
|
||||
expect(bbUsersTable).toBeDefined();
|
||||
|
||||
expect(aaUsersTable?.schema).toBe('aa');
|
||||
expect(bbUsersTable?.schema).toBe('bb');
|
||||
|
||||
expect(aaUsersTable?.fields).toHaveLength(1);
|
||||
expect(bbUsersTable?.fields).toHaveLength(1);
|
||||
|
||||
expect(aaUsersTable?.fields[0].name).toBe('id');
|
||||
expect(aaUsersTable?.fields[0].type.id).toBe('int');
|
||||
expect(aaUsersTable?.fields[0].primaryKey).toBe(true);
|
||||
|
||||
expect(bbUsersTable?.fields[0].name).toBe('id');
|
||||
expect(bbUsersTable?.fields[0].type.id).toBe('int');
|
||||
expect(bbUsersTable?.fields[0].primaryKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should import complex multi-schema DBML with inline refs and various indexes', async () => {
|
||||
// This test validates:
|
||||
// - 3 tables across different schemas (public, public_2, public_3)
|
||||
// - Table-level notes (Note: 'my comment' on users table)
|
||||
// - 3 indexes:
|
||||
// * Composite unique index: (content, user_id) on posts table
|
||||
// * Single non-unique index: created_at on posts table
|
||||
// * Single unique index: id on comments table
|
||||
// - 3 inline foreign key relationships:
|
||||
// * posts.user_id -> users.id
|
||||
// * comments.post_id -> posts.id
|
||||
// * comments.user_id -> users.id
|
||||
// - Quoted identifiers for all table and field names
|
||||
|
||||
const dbml = `
|
||||
Table "public"."users" {
|
||||
"id" varchar(500) [pk]
|
||||
"name" varchar(500)
|
||||
"email" varchar(500)
|
||||
Note: 'my comment'
|
||||
}
|
||||
|
||||
Table "public_2"."posts" {
|
||||
"id" varchar(500) [pk]
|
||||
"title" varchar(500)
|
||||
"content" text
|
||||
"user_id" varchar(500) [ref: < "public"."users"."id"]
|
||||
"created_at" timestamp
|
||||
|
||||
Indexes {
|
||||
(content, user_id) [unique, name: "public_2_content_user_id_idx"]
|
||||
created_at [name: "public_2_index_2"]
|
||||
}
|
||||
}
|
||||
|
||||
Table "public_3"."comments" {
|
||||
"id" varchar(500) [pk]
|
||||
"content" text
|
||||
"post_id" varchar(500) [ref: < "public_2"."posts"."id"]
|
||||
"user_id" varchar(500) [ref: < "public"."users"."id"]
|
||||
"created_at" timestamp
|
||||
|
||||
Indexes {
|
||||
id [unique, name: "public_3_index_1"]
|
||||
}
|
||||
}`;
|
||||
const diagram = await importDBMLToDiagram(dbml);
|
||||
|
||||
// Verify tables
|
||||
expect(diagram.tables).toHaveLength(3);
|
||||
|
||||
const usersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'public'
|
||||
);
|
||||
const postsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'posts' && t.schema === 'public_2'
|
||||
);
|
||||
const commentsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'comments' && t.schema === 'public_3'
|
||||
);
|
||||
|
||||
expect(usersTable).toBeDefined();
|
||||
expect(postsTable).toBeDefined();
|
||||
expect(commentsTable).toBeDefined();
|
||||
|
||||
// Check users table
|
||||
expect(usersTable?.fields).toHaveLength(3);
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'id')?.primaryKey
|
||||
).toBe(true);
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'id')?.type.id
|
||||
).toBe('varchar');
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'name')?.type.id
|
||||
).toBe('varchar');
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'email')?.type.id
|
||||
).toBe('varchar');
|
||||
|
||||
// Check if table note is preserved
|
||||
expect(usersTable?.comments).toBe('my comment');
|
||||
|
||||
// Check posts table
|
||||
expect(postsTable?.fields).toHaveLength(5);
|
||||
expect(
|
||||
postsTable?.fields.find((f) => f.name === 'content')?.type.id
|
||||
).toBe('text');
|
||||
expect(
|
||||
postsTable?.fields.find((f) => f.name === 'created_at')?.type.id
|
||||
).toBe('timestamp');
|
||||
|
||||
// Check posts indexes thoroughly
|
||||
expect(postsTable?.indexes).toHaveLength(2);
|
||||
|
||||
// Index 1: Composite unique index on (content, user_id)
|
||||
const compositeIndex = postsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_2_content_user_id_idx'
|
||||
);
|
||||
expect(compositeIndex).toBeDefined();
|
||||
expect(compositeIndex?.name).toBe('public_2_content_user_id_idx'); // Verify exact name from DBML
|
||||
expect(compositeIndex?.unique).toBe(true);
|
||||
expect(compositeIndex?.fieldIds).toHaveLength(2);
|
||||
// Verify it includes the correct fields
|
||||
const contentFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'content'
|
||||
)?.id;
|
||||
const userIdFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'user_id'
|
||||
)?.id;
|
||||
expect(compositeIndex?.fieldIds).toContain(contentFieldId);
|
||||
expect(compositeIndex?.fieldIds).toContain(userIdFieldId);
|
||||
|
||||
// Index 2: Non-unique index on created_at
|
||||
const singleIndex = postsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_2_index_2'
|
||||
);
|
||||
expect(singleIndex).toBeDefined();
|
||||
expect(singleIndex?.name).toBe('public_2_index_2'); // Verify exact name from DBML
|
||||
expect(singleIndex?.unique).toBe(false);
|
||||
expect(singleIndex?.fieldIds).toHaveLength(1);
|
||||
const createdAtFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'created_at'
|
||||
)?.id;
|
||||
expect(singleIndex?.fieldIds[0]).toBe(createdAtFieldId);
|
||||
|
||||
// Check comments table
|
||||
expect(commentsTable?.fields).toHaveLength(5);
|
||||
expect(commentsTable?.indexes).toHaveLength(1);
|
||||
|
||||
// Index: Unique index on id
|
||||
const idIndex = commentsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_3_index_1'
|
||||
);
|
||||
expect(idIndex).toBeDefined();
|
||||
expect(idIndex?.name).toBe('public_3_index_1'); // Verify exact name from DBML
|
||||
expect(idIndex?.unique).toBe(true);
|
||||
expect(idIndex?.fieldIds).toHaveLength(1);
|
||||
const idFieldId = commentsTable?.fields.find(
|
||||
(f) => f.name === 'id'
|
||||
)?.id;
|
||||
expect(idIndex?.fieldIds[0]).toBe(idFieldId);
|
||||
|
||||
// Verify relationships (inline refs should create relationships)
|
||||
// From DBML:
|
||||
// 1. posts.user_id -> users.id
|
||||
// 2. comments.post_id -> posts.id
|
||||
// 3. comments.user_id -> users.id
|
||||
expect(diagram.relationships).toHaveLength(3);
|
||||
|
||||
// Find relationships - check the actual field references
|
||||
const findRelationshipByFields = (
|
||||
sourceTableId: string,
|
||||
sourceFieldName: string,
|
||||
targetTableId: string,
|
||||
targetFieldName: string
|
||||
) => {
|
||||
const sourceField = diagram.tables
|
||||
?.find((t) => t.id === sourceTableId)
|
||||
?.fields.find((f) => f.name === sourceFieldName);
|
||||
const targetField = diagram.tables
|
||||
?.find((t) => t.id === targetTableId)
|
||||
?.fields.find((f) => f.name === targetFieldName);
|
||||
|
||||
return diagram.relationships?.find(
|
||||
(r) =>
|
||||
(r.sourceFieldId === sourceField?.id &&
|
||||
r.targetFieldId === targetField?.id) ||
|
||||
(r.sourceFieldId === targetField?.id &&
|
||||
r.targetFieldId === sourceField?.id)
|
||||
);
|
||||
};
|
||||
|
||||
// Relationship 1: posts.user_id -> users.id
|
||||
const postsUsersRel = findRelationshipByFields(
|
||||
postsTable!.id,
|
||||
'user_id',
|
||||
usersTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(postsUsersRel).toBeDefined();
|
||||
expect(postsUsersRel?.sourceSchema).toBeDefined();
|
||||
expect(postsUsersRel?.targetSchema).toBeDefined();
|
||||
|
||||
// Relationship 2: comments.post_id -> posts.id
|
||||
const commentsPostsRel = findRelationshipByFields(
|
||||
commentsTable!.id,
|
||||
'post_id',
|
||||
postsTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(commentsPostsRel).toBeDefined();
|
||||
|
||||
// Relationship 3: comments.user_id -> users.id
|
||||
const commentsUsersRel = findRelationshipByFields(
|
||||
commentsTable!.id,
|
||||
'user_id',
|
||||
usersTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(commentsUsersRel).toBeDefined();
|
||||
|
||||
// Verify all relationships have the expected cardinality
|
||||
// In DBML, inline refs create relationships where the referenced table (with PK)
|
||||
// is the "one" side and the referencing table (with FK) is the "many" side
|
||||
const allOneToMany = diagram.relationships?.every(
|
||||
(r) =>
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
);
|
||||
expect(allOneToMany).toBe(true);
|
||||
|
||||
// Verify schemas are preserved in relationships
|
||||
const relationshipsHaveSchemas = diagram.relationships?.every(
|
||||
(r) =>
|
||||
r.sourceSchema !== undefined && r.targetSchema !== undefined
|
||||
);
|
||||
expect(relationshipsHaveSchemas).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,9 +32,8 @@ export const preprocessDBML = (content: string): string => {
|
||||
);
|
||||
|
||||
// Handle Table headers with color attributes
|
||||
// This regex handles both simple table names and schema.table patterns with quotes
|
||||
processed = processed.replace(
|
||||
/Table\s+((?:"[^"]+"\."[^"]+")|(?:\w+))\s*\[[^\]]*\]\s*\{/g,
|
||||
/Table\s+(\w+)\s*\[[^\]]*\]\s*\{/g,
|
||||
'Table $1 {'
|
||||
);
|
||||
|
||||
@@ -87,7 +86,7 @@ interface DBMLIndexColumn {
|
||||
}
|
||||
|
||||
interface DBMLIndex {
|
||||
columns: (string | DBMLIndexColumn)[];
|
||||
columns: string | (string | DBMLIndexColumn)[];
|
||||
unique?: boolean;
|
||||
name?: string;
|
||||
}
|
||||
@@ -97,7 +96,6 @@ interface DBMLTable {
|
||||
schema?: string | { name: string };
|
||||
fields: DBMLField[];
|
||||
indexes?: DBMLIndex[];
|
||||
note?: string | { value: string } | null;
|
||||
}
|
||||
|
||||
interface DBMLEndpoint {
|
||||
@@ -115,11 +113,9 @@ const mapDBMLTypeToGenericType = (dbmlType: string): DataType => {
|
||||
const matchedType = genericDataTypes.find((t) => t.id === normalizedType);
|
||||
if (matchedType) return matchedType;
|
||||
const typeMap: Record<string, string> = {
|
||||
int: 'int',
|
||||
integer: 'int',
|
||||
int: 'integer',
|
||||
varchar: 'varchar',
|
||||
bool: 'boolean',
|
||||
boolean: 'boolean',
|
||||
number: 'numeric',
|
||||
string: 'varchar',
|
||||
text: 'text',
|
||||
@@ -137,12 +133,7 @@ const mapDBMLTypeToGenericType = (dbmlType: string): DataType => {
|
||||
const foundType = genericDataTypes.find((t) => t.id === mappedType);
|
||||
if (foundType) return foundType;
|
||||
}
|
||||
const type = genericDataTypes.find((t) => t.id === 'varchar')!;
|
||||
|
||||
return {
|
||||
id: type.id,
|
||||
name: type.name,
|
||||
};
|
||||
return genericDataTypes.find((t) => t.id === 'varchar')!;
|
||||
};
|
||||
|
||||
const determineCardinality = (
|
||||
@@ -198,9 +189,10 @@ export const importDBMLToDiagram = async (
|
||||
}
|
||||
|
||||
const parsedData = parser.parse(sanitizedContent, 'dbml');
|
||||
const dbmlData = parsedData.schemas[0];
|
||||
|
||||
// Handle case where no schemas are found
|
||||
if (!parsedData.schemas || parsedData.schemas.length === 0) {
|
||||
// Handle case where no schema is found
|
||||
if (!dbmlData || !dbmlData.tables) {
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
@@ -212,117 +204,71 @@ export const importDBMLToDiagram = async (
|
||||
};
|
||||
}
|
||||
|
||||
// Process all schemas, not just the first one
|
||||
const allTables: DBMLTable[] = [];
|
||||
const allRefs: DBMLRef[] = [];
|
||||
|
||||
parsedData.schemas.forEach((schema) => {
|
||||
if (schema.tables) {
|
||||
schema.tables.forEach((table) => {
|
||||
// For tables with explicit schema, use the schema name
|
||||
// For tables without explicit schema, use empty string
|
||||
const schemaName =
|
||||
typeof table.schema === 'string'
|
||||
? table.schema
|
||||
: table.schema?.name || '';
|
||||
|
||||
allTables.push({
|
||||
name: table.name,
|
||||
schema: schemaName,
|
||||
note: table.note,
|
||||
fields: table.fields.map(
|
||||
(field) =>
|
||||
({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
unique: field.unique,
|
||||
pk: field.pk,
|
||||
not_null: field.not_null,
|
||||
increment: field.increment,
|
||||
}) satisfies DBMLField
|
||||
),
|
||||
indexes:
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
let indexColumns: string[];
|
||||
|
||||
// Handle both string and array formats
|
||||
if (typeof dbmlIndex.columns === 'string') {
|
||||
// Handle composite index case "(col1, col2)"
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
if (dbmlIndex.columns.includes('(')) {
|
||||
const columnsStr: string =
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
dbmlIndex.columns.replace(
|
||||
/[()]/g,
|
||||
''
|
||||
);
|
||||
indexColumns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim());
|
||||
} else {
|
||||
// Single column as string
|
||||
|
||||
indexColumns = [
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
dbmlIndex.columns.trim(),
|
||||
];
|
||||
}
|
||||
} else {
|
||||
// Handle array of columns
|
||||
indexColumns = dbmlIndex.columns.map(
|
||||
(col) => {
|
||||
if (typeof col === 'string') {
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
return col.trim();
|
||||
} else if (
|
||||
typeof col === 'object' &&
|
||||
'value' in col
|
||||
) {
|
||||
return col.value.trim();
|
||||
} else {
|
||||
return String(col).trim();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Generate a consistent index name
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
};
|
||||
}) || [],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (schema.refs) {
|
||||
schema.refs.forEach((ref) => {
|
||||
// Convert the ref to ensure it has exactly two endpoints
|
||||
if (ref.endpoints && ref.endpoints.length >= 2) {
|
||||
allRefs.push({
|
||||
endpoints: [ref.endpoints[0], ref.endpoints[1]] as [
|
||||
DBMLEndpoint,
|
||||
DBMLEndpoint,
|
||||
],
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Extract only the necessary data from the parsed DBML
|
||||
const extractedData: {
|
||||
tables: DBMLTable[];
|
||||
refs: DBMLRef[];
|
||||
} = {
|
||||
tables: allTables,
|
||||
refs: allRefs,
|
||||
const extractedData = {
|
||||
tables: (dbmlData.tables as unknown as DBMLTable[]).map(
|
||||
(table) => ({
|
||||
name: table.name,
|
||||
schema: table.schema,
|
||||
fields: table.fields.map((field: DBMLField) => ({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
unique: field.unique,
|
||||
pk: field.pk,
|
||||
not_null: field.not_null,
|
||||
increment: field.increment,
|
||||
})),
|
||||
indexes:
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
let indexColumns: string[];
|
||||
|
||||
// Handle composite index case "(col1, col2)"
|
||||
if (typeof dbmlIndex.columns === 'string') {
|
||||
if (dbmlIndex.columns.includes('(')) {
|
||||
// Composite index
|
||||
const columnsStr =
|
||||
dbmlIndex.columns.replace(/[()]/g, '');
|
||||
indexColumns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim());
|
||||
} else {
|
||||
// Single column
|
||||
indexColumns = [dbmlIndex.columns.trim()];
|
||||
}
|
||||
} else {
|
||||
// Handle array of columns
|
||||
indexColumns = Array.isArray(dbmlIndex.columns)
|
||||
? dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'object' &&
|
||||
'value' in col
|
||||
? (col.value as string).trim()
|
||||
: (col as string).trim()
|
||||
)
|
||||
: [String(dbmlIndex.columns).trim()];
|
||||
}
|
||||
|
||||
// Generate a consistent index name
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
};
|
||||
}) || [],
|
||||
})
|
||||
),
|
||||
refs: (dbmlData.refs as unknown as DBMLRef[]).map((ref) => ({
|
||||
endpoints: (ref.endpoints as [DBMLEndpoint, DBMLEndpoint]).map(
|
||||
(endpoint) => ({
|
||||
tableName: endpoint.tableName,
|
||||
fieldNames: endpoint.fieldNames,
|
||||
relation: endpoint.relation,
|
||||
})
|
||||
),
|
||||
})),
|
||||
};
|
||||
|
||||
// Convert DBML tables to ChartDB table objects
|
||||
@@ -359,26 +305,13 @@ export const importDBMLToDiagram = async (
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
`idx_${table.name}_${dbmlIndex.columns.join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
// Extract table note/comment
|
||||
let tableComment: string | undefined;
|
||||
if (table.note) {
|
||||
if (typeof table.note === 'string') {
|
||||
tableComment = table.note;
|
||||
} else if (
|
||||
typeof table.note === 'object' &&
|
||||
'value' in table.note
|
||||
) {
|
||||
tableComment = table.note.value;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: table.name.replace(/['"]/g, ''),
|
||||
@@ -394,7 +327,6 @@ export const importDBMLToDiagram = async (
|
||||
color: randomColor(),
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
comments: tableComment,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
dataTypeSchema,
|
||||
findDataTypeDataById,
|
||||
type DataType,
|
||||
} from '../data/data-types/data-types';
|
||||
import { dataTypeSchema, type DataType } from '../data/data-types/data-types';
|
||||
import type { ColumnInfo } from '../data/import-metadata/metadata-types/column-info';
|
||||
import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types/index-info';
|
||||
import type { PrimaryKeyInfo } from '../data/import-metadata/metadata-types/primary-key-info';
|
||||
import type { TableInfo } from '../data/import-metadata/metadata-types/table-info';
|
||||
import { generateId } from '../utils';
|
||||
import type { DatabaseType } from './database-type';
|
||||
|
||||
export interface DBField {
|
||||
id: string;
|
||||
@@ -102,80 +97,3 @@ export const createFieldsFromMetadata = ({
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const generateDBFieldSuffix = (
|
||||
field: DBField,
|
||||
{
|
||||
databaseType,
|
||||
forceExtended = false,
|
||||
typeId,
|
||||
}: {
|
||||
databaseType?: DatabaseType;
|
||||
forceExtended?: boolean;
|
||||
typeId?: string;
|
||||
} = {}
|
||||
): string => {
|
||||
if (databaseType && forceExtended && typeId) {
|
||||
return generateExtendedSuffix(field, databaseType, typeId);
|
||||
}
|
||||
|
||||
return generateStandardSuffix(field);
|
||||
};
|
||||
|
||||
const generateExtendedSuffix = (
|
||||
field: DBField,
|
||||
databaseType: DatabaseType,
|
||||
typeId: string
|
||||
): string => {
|
||||
const type = findDataTypeDataById(typeId, databaseType);
|
||||
|
||||
if (!type?.fieldAttributes) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const { fieldAttributes } = type;
|
||||
|
||||
// Character maximum length types (e.g., VARCHAR)
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
const maxLength = field.characterMaximumLength ?? 'n';
|
||||
return `(${maxLength})`;
|
||||
}
|
||||
|
||||
// Precision and scale types (e.g., DECIMAL)
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return formatPrecisionAndScale(field.precision, field.scale, '(p, s)');
|
||||
}
|
||||
|
||||
// Precision only types (e.g., FLOAT)
|
||||
if (fieldAttributes.precision) {
|
||||
const precision = field.precision ?? 'p';
|
||||
return `(${precision})`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
const generateStandardSuffix = (field: DBField): string => {
|
||||
// Character maximum length
|
||||
if (field.characterMaximumLength) {
|
||||
return `(${field.characterMaximumLength})`;
|
||||
}
|
||||
|
||||
return formatPrecisionAndScale(field.precision, field.scale, '');
|
||||
};
|
||||
|
||||
const formatPrecisionAndScale = (
|
||||
precision: number | null | undefined,
|
||||
scale: number | null | undefined,
|
||||
fallback: string
|
||||
): string => {
|
||||
if (precision && scale) {
|
||||
return `(${precision}, ${scale})`;
|
||||
}
|
||||
|
||||
if (precision) {
|
||||
return `(${precision})`;
|
||||
}
|
||||
|
||||
return fallback;
|
||||
};
|
||||
|
||||
@@ -108,7 +108,7 @@ export const loadFromDatabaseMetadata = async ({
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
const diagram: Diagram = {
|
||||
const diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: databaseMetadata.database_name
|
||||
? `${databaseMetadata.database_name}-db`
|
||||
|
||||
@@ -82,15 +82,13 @@ export const CanvasContextMenu: React.FC<React.PropsWithChildren> = ({
|
||||
openCreateRelationshipDialog();
|
||||
}, [openCreateRelationshipDialog]);
|
||||
|
||||
if (!isDesktop) {
|
||||
if (!isDesktop || readonly) {
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
return (
|
||||
<ContextMenu>
|
||||
<ContextMenuTrigger disabled={readonly}>
|
||||
{children}
|
||||
</ContextMenuTrigger>
|
||||
<ContextMenuTrigger>{children}</ContextMenuTrigger>
|
||||
<ContextMenuContent>
|
||||
<ContextMenuItem
|
||||
onClick={createTableHandler}
|
||||
|
||||
@@ -16,7 +16,6 @@ import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import { TreeView } from '@/components/tree-view/tree-view';
|
||||
import type { TreeNode } from '@/components/tree-view/tree';
|
||||
import { ScrollArea } from '@/components/scroll-area/scroll-area';
|
||||
|
||||
export interface CanvasFilterProps {
|
||||
onClose: () => void;
|
||||
@@ -406,7 +405,7 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
</div>
|
||||
|
||||
{/* Table Tree */}
|
||||
<ScrollArea className="flex-1 rounded-b-lg" type="auto">
|
||||
<div className="flex-1 overflow-y-auto rounded-b-lg">
|
||||
<TreeView
|
||||
data={filteredTreeData}
|
||||
onNodeClick={handleNodeClick}
|
||||
@@ -417,7 +416,7 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
setExpanded={setExpanded}
|
||||
className="py-2"
|
||||
/>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -40,13 +40,7 @@ import {
|
||||
} from './table-node/table-node-field';
|
||||
import { Toolbar } from './toolbar/toolbar';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import {
|
||||
Pencil,
|
||||
LayoutGrid,
|
||||
AlertTriangle,
|
||||
Magnet,
|
||||
Highlighter,
|
||||
} from 'lucide-react';
|
||||
import { Pencil, LayoutGrid, AlertTriangle, Magnet } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
@@ -92,8 +86,6 @@ import type { Area } from '@/lib/domain/area';
|
||||
import { updateTablesParentAreas, getTablesInArea } from './area-utils';
|
||||
import { CanvasFilter } from './canvas-filter/canvas-filter';
|
||||
import { useHotkeys } from 'react-hotkeys-hook';
|
||||
import { ShowAllButton } from './show-all-button';
|
||||
import { useIsLostInCanvas } from './hooks/use-is-lost-in-canvas';
|
||||
|
||||
const HIGHLIGHTED_EDGE_Z_INDEX = 1;
|
||||
const DEFAULT_EDGE_Z_INDEX = 0;
|
||||
@@ -166,7 +158,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
>([]);
|
||||
const { toast } = useToast();
|
||||
const { t } = useTranslation();
|
||||
const { isLostInCanvas } = useIsLostInCanvas();
|
||||
const {
|
||||
tables,
|
||||
areas,
|
||||
@@ -184,8 +175,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
readonly,
|
||||
removeArea,
|
||||
updateArea,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
hiddenTableIds,
|
||||
} = useChartDB();
|
||||
const { showSidePanel } = useLayout();
|
||||
@@ -294,7 +283,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
setEdges,
|
||||
showDependenciesOnCanvas,
|
||||
databaseType,
|
||||
tables, // Add tables to force edge recreation when table properties change
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -397,22 +385,12 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
hiddenTableIds,
|
||||
});
|
||||
|
||||
// Check if table uses the highlighted custom type
|
||||
let hasHighlightedCustomType = false;
|
||||
if (highlightedCustomType) {
|
||||
hasHighlightedCustomType = table.fields.some(
|
||||
(field) =>
|
||||
field.type.name === highlightedCustomType.name
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
},
|
||||
};
|
||||
}),
|
||||
@@ -435,7 +413,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
overlapGraph.lastUpdated,
|
||||
overlapGraph.graph,
|
||||
highlightOverlappingTables,
|
||||
highlightedCustomType,
|
||||
]);
|
||||
|
||||
const prevFilteredSchemas = useRef<string[] | undefined>(undefined);
|
||||
@@ -998,19 +975,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
width: event.data.table.width,
|
||||
};
|
||||
|
||||
// Trigger a dimension change to force React Flow to update the node
|
||||
onNodesChangeHandler([
|
||||
{
|
||||
id: event.data.id,
|
||||
type: 'dimensions',
|
||||
dimensions: {
|
||||
width: event.data.table.width,
|
||||
height: node.measured?.height || 0,
|
||||
},
|
||||
resizing: true, // Set resizing flag to ensure the change is processed
|
||||
} as NodeDimensionChange,
|
||||
]);
|
||||
|
||||
newOverlappingGraph = findTableOverlapping(
|
||||
{
|
||||
node: {
|
||||
@@ -1065,14 +1029,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
setOverlapGraph(overlappingTablesInDiagram);
|
||||
}
|
||||
},
|
||||
[
|
||||
overlapGraph,
|
||||
setOverlapGraph,
|
||||
getNode,
|
||||
nodes,
|
||||
filteredSchemas,
|
||||
onNodesChangeHandler,
|
||||
]
|
||||
[overlapGraph, setOverlapGraph, getNode, nodes, filteredSchemas]
|
||||
);
|
||||
|
||||
events.useSubscription(eventConsumer);
|
||||
@@ -1205,34 +1162,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
})}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
{highlightedCustomType ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="size-8 border border-yellow-400 bg-yellow-200 p-1 shadow-none hover:bg-yellow-300 dark:border-yellow-700 dark:bg-yellow-800 dark:hover:bg-yellow-700"
|
||||
onClick={() =>
|
||||
highlightCustomTypeId(
|
||||
undefined
|
||||
)
|
||||
}
|
||||
>
|
||||
<Highlighter className="size-4" />
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'toolbar.custom_type_highlight_tooltip',
|
||||
{
|
||||
typeName:
|
||||
highlightedCustomType.name,
|
||||
}
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : null}
|
||||
</>
|
||||
) : null}
|
||||
|
||||
@@ -1299,25 +1228,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
</Button>
|
||||
</Controls>
|
||||
) : null}
|
||||
{isLostInCanvas ? (
|
||||
<Controls
|
||||
position={
|
||||
isDesktop ? 'bottom-center' : 'top-center'
|
||||
}
|
||||
orientation="horizontal"
|
||||
showZoom={false}
|
||||
showFitView={false}
|
||||
showInteractive={false}
|
||||
className="!shadow-none"
|
||||
style={{
|
||||
[isDesktop ? 'bottom' : 'top']: isDesktop
|
||||
? '70px'
|
||||
: '70px',
|
||||
}}
|
||||
>
|
||||
<ShowAllButton />
|
||||
</Controls>
|
||||
) : null}
|
||||
<Controls
|
||||
position={isDesktop ? 'bottom-center' : 'top-center'}
|
||||
orientation="horizontal"
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Info } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
|
||||
export interface ShowAllButtonProps {}
|
||||
|
||||
export const ShowAllButton: React.FC<ShowAllButtonProps> = () => {
|
||||
const { fitView } = useCanvas();
|
||||
const [visible, setVisible] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => {
|
||||
setVisible(true);
|
||||
}, 300);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, []);
|
||||
|
||||
const showAll = useCallback(() => {
|
||||
fitView({
|
||||
duration: 500,
|
||||
padding: 0.1,
|
||||
maxZoom: 0.8,
|
||||
});
|
||||
}, [fitView]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'transition-all duration-300 ease-in-out',
|
||||
visible
|
||||
? 'translate-y-0 opacity-100'
|
||||
: 'pointer-events-none translate-y-4 opacity-0'
|
||||
)}
|
||||
>
|
||||
<div className="sm:hidden">
|
||||
<Button
|
||||
onClick={showAll}
|
||||
size="sm"
|
||||
className="h-fit rounded-lg bg-slate-900 px-4 py-1.5 text-xs text-white shadow-lg hover:bg-slate-800 dark:bg-slate-700 dark:hover:bg-slate-600"
|
||||
>
|
||||
Show All
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="hidden items-center gap-2 rounded-lg bg-slate-900 px-3 py-2 shadow-lg sm:flex">
|
||||
<div className="flex size-6 items-center justify-center rounded-full bg-pink-600">
|
||||
<Info className="size-4 text-white" />
|
||||
</div>
|
||||
<span className="text-sm text-white">
|
||||
Your content is out of view
|
||||
</span>
|
||||
<Button
|
||||
onClick={showAll}
|
||||
size="sm"
|
||||
className="ml-2 h-fit rounded-lg bg-slate-700 px-4 py-1.5 text-xs text-white hover:bg-slate-600 dark:hover:bg-slate-800"
|
||||
>
|
||||
Show All
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
SquarePlus,
|
||||
Trash2,
|
||||
} from 'lucide-react';
|
||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
@@ -32,7 +32,6 @@ import {
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
|
||||
export const LEFT_HANDLE_ID_PREFIX = 'left_rel_';
|
||||
export const RIGHT_HANDLE_ID_PREFIX = 'right_rel_';
|
||||
@@ -60,10 +59,6 @@ const arePropsEqual = (
|
||||
prevProps.field.unique === nextProps.field.unique &&
|
||||
prevProps.field.type.id === nextProps.field.type.id &&
|
||||
prevProps.field.type.name === nextProps.field.type.name &&
|
||||
prevProps.field.characterMaximumLength ===
|
||||
nextProps.field.characterMaximumLength &&
|
||||
prevProps.field.precision === nextProps.field.precision &&
|
||||
prevProps.field.scale === nextProps.field.scale &&
|
||||
prevProps.focused === nextProps.focused &&
|
||||
prevProps.highlighted === nextProps.highlighted &&
|
||||
prevProps.visible === nextProps.visible &&
|
||||
@@ -74,13 +69,8 @@ const arePropsEqual = (
|
||||
|
||||
export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
({ field, focused, tableNodeId, highlighted, visible, isConnectable }) => {
|
||||
const {
|
||||
removeField,
|
||||
relationships,
|
||||
readonly,
|
||||
updateField,
|
||||
highlightedCustomType,
|
||||
} = useChartDB();
|
||||
const { removeField, relationships, readonly, updateField } =
|
||||
useChartDB();
|
||||
const [editMode, setEditMode] = useState(false);
|
||||
const [fieldName, setFieldName] = useState(field.name);
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
@@ -213,22 +203,13 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
setEditMode(true);
|
||||
}, []);
|
||||
|
||||
const isCustomTypeHighlighted = useMemo(() => {
|
||||
if (!highlightedCustomType) return false;
|
||||
return field.type.name === highlightedCustomType.name;
|
||||
}, [highlightedCustomType, field.type.name]);
|
||||
const { showFieldAttributes } = useLocalConfig();
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'group relative flex h-8 items-center justify-between gap-1 border-t px-3 text-sm last:rounded-b-[6px] hover:bg-slate-100 dark:hover:bg-slate-800',
|
||||
'transition-all duration-200 ease-in-out',
|
||||
{
|
||||
'bg-pink-100 dark:bg-pink-900':
|
||||
highlighted && !isCustomTypeHighlighted,
|
||||
'bg-yellow-100 dark:bg-yellow-900':
|
||||
isCustomTypeHighlighted,
|
||||
'bg-pink-100 dark:bg-pink-900': highlighted,
|
||||
'max-h-8 opacity-100': visible,
|
||||
'z-0 max-h-0 overflow-hidden opacity-0': !visible,
|
||||
'bg-sky-200 dark:bg-sky-800 hover:bg-sky-100 dark:hover:bg-sky-900 border-sky-300 dark:border-sky-700':
|
||||
@@ -286,7 +267,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
)}
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center gap-1 min-w-0 flex-1 text-left',
|
||||
'flex items-center gap-1 truncate text-left',
|
||||
{
|
||||
'font-semibold': field.primaryKey || field.unique,
|
||||
'w-full': editMode,
|
||||
@@ -322,8 +303,14 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
// <span
|
||||
// className="truncate"
|
||||
// onClick={readonly ? undefined : enterEditMode}
|
||||
// >
|
||||
// {field.name}
|
||||
// </span>
|
||||
<span
|
||||
className={cn('truncate min-w-0', {
|
||||
className={cn('truncate', {
|
||||
'text-red-800 font-normal dark:text-red-200':
|
||||
isDiffFieldRemoved,
|
||||
'text-green-800 font-normal dark:text-green-200':
|
||||
@@ -346,6 +333,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
{/* <span className="truncate">{field.name}</span> */}
|
||||
{field.comments && !editMode ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
@@ -358,7 +346,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
) : null}
|
||||
</div>
|
||||
{editMode ? null : (
|
||||
<div className="ml-2 flex shrink-0 items-center justify-end gap-1.5">
|
||||
<div className="flex max-w-[35%] justify-end gap-1.5 truncate hover:shrink-0">
|
||||
{field.primaryKey ? (
|
||||
<div
|
||||
className={cn(
|
||||
@@ -383,8 +371,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
|
||||
<div
|
||||
className={cn(
|
||||
'content-center text-right text-xs text-muted-foreground overflow-hidden max-w-[8rem]',
|
||||
field.primaryKey ? 'min-w-0' : 'min-w-[3rem]',
|
||||
'content-center truncate text-right text-xs text-muted-foreground',
|
||||
!readonly ? 'group-hover:hidden' : '',
|
||||
isDiffFieldRemoved
|
||||
? 'text-red-800 dark:text-red-200'
|
||||
@@ -399,23 +386,17 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
: ''
|
||||
)}
|
||||
>
|
||||
<span className="block truncate">
|
||||
{fieldDiffChangedType ? (
|
||||
<>
|
||||
<span className="line-through">
|
||||
{field.type.name.split(' ')[0]}
|
||||
</span>{' '}
|
||||
{
|
||||
fieldDiffChangedType.name.split(
|
||||
' '
|
||||
)[0]
|
||||
}
|
||||
</>
|
||||
) : (
|
||||
`${field.type.name.split(' ')[0]}${showFieldAttributes ? generateDBFieldSuffix(field) : ''}`
|
||||
)}
|
||||
{field.nullable ? '?' : ''}
|
||||
</span>
|
||||
{fieldDiffChangedType ? (
|
||||
<>
|
||||
<span className="line-through">
|
||||
{field.type.name.split(' ')[0]}
|
||||
</span>{' '}
|
||||
{fieldDiffChangedType.name.split(' ')[0]}
|
||||
</>
|
||||
) : (
|
||||
field.type.name.split(' ')[0]
|
||||
)}
|
||||
{field.nullable ? '?' : ''}
|
||||
</div>
|
||||
{readonly ? null : (
|
||||
<div className="hidden flex-row group-hover:flex">
|
||||
|
||||
@@ -53,7 +53,6 @@ export type TableNodeType = Node<
|
||||
table: DBTable;
|
||||
isOverlapping: boolean;
|
||||
highlightOverlappingTables?: boolean;
|
||||
hasHighlightedCustomType?: boolean;
|
||||
},
|
||||
'table'
|
||||
>;
|
||||
@@ -63,12 +62,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
selected,
|
||||
dragging,
|
||||
id,
|
||||
data: {
|
||||
table,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
},
|
||||
data: { table, isOverlapping, highlightOverlappingTables },
|
||||
}) => {
|
||||
const { updateTable, relationships, readonly } = useChartDB();
|
||||
const edges = useStore((store) => store.edges) as EdgeType[];
|
||||
@@ -309,9 +303,6 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
highlightOverlappingTables && isOverlapping
|
||||
? 'animate-scale-2'
|
||||
: '',
|
||||
hasHighlightedCustomType
|
||||
? 'ring-2 ring-offset-slate-50 dark:ring-offset-slate-900 ring-yellow-500 ring-offset-2 animate-scale'
|
||||
: '',
|
||||
isDiffTableChanged && !isDiffNewTable && !isDiffTableRemoved
|
||||
? 'outline outline-[3px] outline-sky-500 dark:outline-sky-900 outline-offset-[5px]'
|
||||
: '',
|
||||
@@ -326,8 +317,6 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
selected,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
|
||||
isDiffTableChanged,
|
||||
isDiffNewTable,
|
||||
isDiffTableRemoved,
|
||||
|
||||
@@ -14,6 +14,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { keyboardShortcutsForOS } from '@/context/keyboard-shortcuts-context/keyboard-shortcuts';
|
||||
import { KeyboardShortcutAction } from '@/context/keyboard-shortcuts-context/keyboard-shortcuts';
|
||||
import { useIsLostInCanvas } from '../hooks/use-is-lost-in-canvas';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { cn } from '@/lib/utils';
|
||||
@@ -29,6 +30,7 @@ export const Toolbar: React.FC<ToolbarProps> = () => {
|
||||
const { redo, undo, hasRedo, hasUndo } = useHistory();
|
||||
const { getZoom, zoomIn, zoomOut, fitView } = useReactFlow();
|
||||
const [zoom, setZoom] = useState<string>(convertToPercentage(getZoom()));
|
||||
const { isLostInCanvas } = useIsLostInCanvas();
|
||||
const { setShowFilter } = useCanvas();
|
||||
const { hiddenTableIds } = useChartDB();
|
||||
|
||||
@@ -104,7 +106,14 @@ export const Toolbar: React.FC<ToolbarProps> = () => {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<ToolbarButton onClick={showAll}>
|
||||
<ToolbarButton
|
||||
onClick={showAll}
|
||||
className={
|
||||
isLostInCanvas
|
||||
? 'bg-pink-500 text-white hover:bg-pink-600 hover:text-white'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
<Scan />
|
||||
</ToolbarButton>
|
||||
</span>
|
||||
|
||||
@@ -16,17 +16,11 @@ import {
|
||||
customTypeKindToLabel,
|
||||
DBCustomTypeKind,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
import { Trash2, Braces, Highlighter } from 'lucide-react';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { Trash2, Braces } from 'lucide-react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CustomTypeEnumValues } from './enum-values/enum-values';
|
||||
import { CustomTypeCompositeFields } from './composite-fields/composite-fields';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { checkIfCustomTypeUsed } from '../utils';
|
||||
|
||||
export interface CustomTypeListItemContentProps {
|
||||
customType: DBCustomType;
|
||||
@@ -35,13 +29,7 @@ export interface CustomTypeListItemContentProps {
|
||||
export const CustomTypeListItemContent: React.FC<
|
||||
CustomTypeListItemContentProps
|
||||
> = ({ customType }) => {
|
||||
const {
|
||||
removeCustomType,
|
||||
updateCustomType,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
tables,
|
||||
} = useChartDB();
|
||||
const { removeCustomType, updateCustomType } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const deleteCustomTypeHandler = useCallback(() => {
|
||||
@@ -104,43 +92,6 @@ export const CustomTypeListItemContent: React.FC<
|
||||
[customType.id, updateCustomType]
|
||||
);
|
||||
|
||||
const toggleHighlightCustomType = useCallback(() => {
|
||||
if (highlightedCustomType?.id === customType.id) {
|
||||
highlightCustomTypeId(undefined);
|
||||
} else {
|
||||
highlightCustomTypeId(customType.id);
|
||||
}
|
||||
}, [customType.id, highlightCustomTypeId, highlightedCustomType?.id]);
|
||||
|
||||
const canHighlight = useMemo(
|
||||
() => checkIfCustomTypeUsed({ customType, tables }),
|
||||
[customType, tables]
|
||||
);
|
||||
|
||||
const isHighlighted = useMemo(
|
||||
() => highlightedCustomType?.id === customType.id,
|
||||
[highlightedCustomType, customType.id]
|
||||
);
|
||||
|
||||
const renderHighlightButton = useCallback(
|
||||
() => (
|
||||
<Button
|
||||
variant="ghost"
|
||||
disabled={!canHighlight}
|
||||
className="flex h-8 w-full items-center justify-center p-2 text-xs"
|
||||
onClick={toggleHighlightCustomType}
|
||||
>
|
||||
<Highlighter className="mr-1 size-3.5" />
|
||||
{t(
|
||||
isHighlighted
|
||||
? 'side_panel.custom_types_section.custom_type.custom_type_actions.clear_field_highlight'
|
||||
: 'side_panel.custom_types_section.custom_type.custom_type_actions.highlight_fields'
|
||||
)}
|
||||
</Button>
|
||||
),
|
||||
[isHighlighted, canHighlight, toggleHighlightCustomType, t]
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="my-1 flex flex-col rounded-b-md px-1">
|
||||
<div className="flex flex-col gap-6">
|
||||
@@ -197,24 +148,10 @@ export const CustomTypeListItemContent: React.FC<
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col items-center justify-center pt-2">
|
||||
{!canHighlight ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>{renderHighlightButton()}</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.custom_types_section.custom_type.no_fields_tooltip'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
renderHighlightButton()
|
||||
)}
|
||||
<div className="flex flex-1 items-center justify-center pt-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="flex h-8 w-full items-center justify-center p-2 text-xs"
|
||||
className="h-8 p-2 text-xs"
|
||||
onClick={deleteCustomTypeHandler}
|
||||
>
|
||||
<Trash2 className="mr-1 size-3.5 text-red-700" />
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import React, { useCallback } from 'react';
|
||||
import {
|
||||
GripVertical,
|
||||
Pencil,
|
||||
EllipsisVertical,
|
||||
Trash2,
|
||||
Check,
|
||||
Highlighter,
|
||||
} from 'lucide-react';
|
||||
import { ListItemHeaderButton } from '@/pages/editor-page/side-panel/list-item-header-button/list-item-header-button';
|
||||
import { Input } from '@/components/input/input';
|
||||
@@ -33,7 +32,6 @@ import {
|
||||
type DBCustomType,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
import { Badge } from '@/components/badge/badge';
|
||||
import { checkIfCustomTypeUsed } from '../utils';
|
||||
|
||||
export interface CustomTypeListItemHeaderProps {
|
||||
customType: DBCustomType;
|
||||
@@ -42,15 +40,8 @@ export interface CustomTypeListItemHeaderProps {
|
||||
export const CustomTypeListItemHeader: React.FC<
|
||||
CustomTypeListItemHeaderProps
|
||||
> = ({ customType }) => {
|
||||
const {
|
||||
updateCustomType,
|
||||
removeCustomType,
|
||||
schemas,
|
||||
filteredSchemas,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
tables,
|
||||
} = useChartDB();
|
||||
const { updateCustomType, removeCustomType, schemas, filteredSchemas } =
|
||||
useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const [editMode, setEditMode] = React.useState(false);
|
||||
const [customTypeName, setCustomTypeName] = React.useState(customType.name);
|
||||
@@ -80,40 +71,12 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
setEditMode(true);
|
||||
};
|
||||
|
||||
const deleteCustomTypeHandler = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
|
||||
e.stopPropagation();
|
||||
const deleteCustomTypeHandler = useCallback(() => {
|
||||
removeCustomType(customType.id);
|
||||
}, [customType.id, removeCustomType]);
|
||||
|
||||
removeCustomType(customType.id);
|
||||
},
|
||||
[customType.id, removeCustomType]
|
||||
);
|
||||
|
||||
const isHighlighted = useMemo(
|
||||
() => highlightedCustomType?.id === customType.id,
|
||||
[highlightedCustomType, customType.id]
|
||||
);
|
||||
|
||||
const toggleHighlightCustomType = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
|
||||
e.stopPropagation();
|
||||
|
||||
if (isHighlighted) {
|
||||
highlightCustomTypeId(undefined);
|
||||
} else {
|
||||
highlightCustomTypeId(customType.id);
|
||||
}
|
||||
},
|
||||
[customType.id, highlightCustomTypeId, isHighlighted]
|
||||
);
|
||||
|
||||
const canHighlight = useMemo(
|
||||
() => checkIfCustomTypeUsed({ customType, tables }),
|
||||
[customType, tables]
|
||||
);
|
||||
|
||||
const renderDropDownMenu = useCallback(() => {
|
||||
return (
|
||||
const renderDropDownMenu = useCallback(
|
||||
() => (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger>
|
||||
<ListItemHeaderButton>
|
||||
@@ -128,18 +91,6 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
</DropdownMenuLabel>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuGroup>
|
||||
<DropdownMenuItem
|
||||
onClick={toggleHighlightCustomType}
|
||||
disabled={!canHighlight}
|
||||
className="flex justify-between"
|
||||
>
|
||||
{t(
|
||||
isHighlighted
|
||||
? 'side_panel.custom_types_section.custom_type.custom_type_actions.clear_field_highlight'
|
||||
: 'side_panel.custom_types_section.custom_type.custom_type_actions.highlight_fields'
|
||||
)}
|
||||
<Highlighter className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={deleteCustomTypeHandler}
|
||||
className="flex justify-between !text-red-700"
|
||||
@@ -152,14 +103,9 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
</DropdownMenuGroup>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
}, [
|
||||
deleteCustomTypeHandler,
|
||||
t,
|
||||
toggleHighlightCustomType,
|
||||
canHighlight,
|
||||
isHighlighted,
|
||||
]);
|
||||
),
|
||||
[deleteCustomTypeHandler, t]
|
||||
);
|
||||
|
||||
let schemaToDisplay;
|
||||
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import type { DBCustomType, DBTable } from '@/lib/domain';
|
||||
|
||||
export const checkIfCustomTypeUsed = ({
|
||||
customType,
|
||||
tables,
|
||||
}: {
|
||||
customType: DBCustomType;
|
||||
tables: DBTable[];
|
||||
}): boolean => {
|
||||
const typeNameToFind = customType.name;
|
||||
|
||||
for (const table of tables) {
|
||||
for (const field of table.fields) {
|
||||
if (field.type.name === typeNameToFind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useMemo, useState, useEffect, useCallback } from 'react';
|
||||
import React, { useMemo, useState, useEffect } from 'react';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
@@ -76,20 +76,16 @@ export const TableDBML: React.FC<TableDBMLProps> = ({ filteredTables }) => {
|
||||
}, [currentDiagram, filteredTables, toast]);
|
||||
|
||||
// Determine which DBML string to display
|
||||
const dbmlToDisplay = useMemo(
|
||||
() => (dbmlFormat === 'inline' ? inlineDbml : standardDbml),
|
||||
[dbmlFormat, inlineDbml, standardDbml]
|
||||
);
|
||||
const dbmlToDisplay = dbmlFormat === 'inline' ? inlineDbml : standardDbml;
|
||||
|
||||
// Toggle function
|
||||
const toggleFormat = useCallback(() => {
|
||||
const toggleFormat = () => {
|
||||
setDbmlFormat((prev) => (prev === 'inline' ? 'standard' : 'inline'));
|
||||
}, []);
|
||||
};
|
||||
|
||||
return (
|
||||
<CodeSnippet
|
||||
code={dbmlToDisplay}
|
||||
actionsTooltipSide="right"
|
||||
className="my-0.5"
|
||||
actions={[
|
||||
{
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import React, { useEffect, useRef, useCallback, useMemo } from 'react';
|
||||
import React, { useEffect, useRef, useCallback } from 'react';
|
||||
import { Ellipsis, Trash2 } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { FieldAttributeRange } from '@/lib/data/data-types/data-types';
|
||||
import { findDataTypeDataById } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
Popover,
|
||||
@@ -17,28 +16,15 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Textarea } from '@/components/textarea/textarea';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import equal from 'fast-deep-equal';
|
||||
import type { DatabaseType, DBTable } from '@/lib/domain';
|
||||
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/select/select';
|
||||
|
||||
export interface TableFieldPopoverProps {
|
||||
field: DBField;
|
||||
table: DBTable;
|
||||
databaseType: DatabaseType;
|
||||
updateField: (attrs: Partial<DBField>) => void;
|
||||
removeField: () => void;
|
||||
}
|
||||
|
||||
export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
field,
|
||||
table,
|
||||
databaseType,
|
||||
updateField,
|
||||
removeField,
|
||||
}) => {
|
||||
@@ -46,19 +32,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
const [localField, setLocalField] = React.useState<DBField>(field);
|
||||
const [isOpen, setIsOpen] = React.useState(false);
|
||||
|
||||
// Check if this field is the only primary key in the table
|
||||
const isOnlyPrimaryKey = React.useMemo(() => {
|
||||
if (!field.primaryKey) return false;
|
||||
|
||||
// Early exit if we find another primary key
|
||||
for (const f of table.fields) {
|
||||
if (f.id !== field.id && f.primaryKey) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}, [table.fields, field.primaryKey, field.id]);
|
||||
|
||||
useEffect(() => {
|
||||
setLocalField(field);
|
||||
}, [field]);
|
||||
@@ -79,8 +52,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
debouncedUpdateField({
|
||||
comments: localField.comments,
|
||||
characterMaximumLength: localField.characterMaximumLength,
|
||||
precision: localField.precision,
|
||||
scale: localField.scale,
|
||||
unique: localField.unique,
|
||||
default: localField.default,
|
||||
});
|
||||
@@ -88,11 +59,6 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
prevFieldRef.current = localField;
|
||||
}, [localField, debouncedUpdateField, isOpen]);
|
||||
|
||||
const dataFieldType = useMemo(
|
||||
() => findDataTypeDataById(field.type.id, databaseType),
|
||||
[field.type.id, databaseType]
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
@@ -128,7 +94,7 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={localField.unique}
|
||||
disabled={isOnlyPrimaryKey}
|
||||
disabled={field.primaryKey}
|
||||
onCheckedChange={(value) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
@@ -157,7 +123,8 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
{dataFieldType?.fieldAttributes?.hasCharMaxLength ? (
|
||||
{findDataTypeDataById(field.type.id)
|
||||
?.hasCharMaxLength ? (
|
||||
<div className="flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
@@ -167,209 +134,20 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
'side_panel.tables_section.table.field_actions.character_length'
|
||||
)}
|
||||
</Label>
|
||||
{dataFieldType?.fieldAttributes
|
||||
?.hasCharMaxLengthOption ? (
|
||||
<div className="flex gap-2">
|
||||
<Select
|
||||
value={
|
||||
localField.characterMaximumLength ===
|
||||
'max'
|
||||
? 'max'
|
||||
: localField.characterMaximumLength
|
||||
? 'custom'
|
||||
: 'none'
|
||||
}
|
||||
onValueChange={(value) => {
|
||||
if (value === 'max') {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
'max',
|
||||
})
|
||||
);
|
||||
} else if (value === 'custom') {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
'255',
|
||||
})
|
||||
);
|
||||
} else {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
null,
|
||||
})
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<SelectTrigger className="w-full bg-muted">
|
||||
<SelectValue placeholder="Select length" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="none">
|
||||
No length
|
||||
</SelectItem>
|
||||
<SelectItem value="max">
|
||||
MAX
|
||||
</SelectItem>
|
||||
<SelectItem value="custom">
|
||||
Custom
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{localField.characterMaximumLength &&
|
||||
localField.characterMaximumLength !==
|
||||
'max' ? (
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength
|
||||
}
|
||||
type="number"
|
||||
min="1"
|
||||
max={
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.maxLength || undefined
|
||||
}
|
||||
onChange={(e) =>
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
})
|
||||
)
|
||||
}
|
||||
className="w-24 rounded-md bg-muted text-sm"
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
) : (
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength ??
|
||||
''
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
{dataFieldType?.fieldAttributes?.precision ||
|
||||
dataFieldType?.fieldAttributes?.scale ? (
|
||||
<div className="flex gap-2">
|
||||
<div className="flex flex-1 flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.precision'
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
value={localField.precision ?? ''}
|
||||
type="number"
|
||||
max={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.precision as FieldAttributeRange
|
||||
).max
|
||||
: undefined
|
||||
}
|
||||
min={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.precision as FieldAttributeRange
|
||||
).min
|
||||
: undefined
|
||||
}
|
||||
placeholder={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? `${(dataFieldType?.fieldAttributes?.precision as FieldAttributeRange).default}`
|
||||
: 'Optional'
|
||||
}
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
precision: e.target.value
|
||||
? parseInt(e.target.value)
|
||||
: undefined,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.scale'
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
value={localField.scale ?? ''}
|
||||
max={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.scale as FieldAttributeRange
|
||||
).max
|
||||
: undefined
|
||||
}
|
||||
min={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? (
|
||||
findDataTypeDataById(
|
||||
field.type.id
|
||||
)?.fieldAttributes
|
||||
?.scale as FieldAttributeRange
|
||||
).min
|
||||
: undefined
|
||||
}
|
||||
placeholder={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? `${(dataFieldType?.fieldAttributes?.scale as FieldAttributeRange).default}`
|
||||
: 'Optional'
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
scale: e.target.value
|
||||
? parseInt(e.target.value)
|
||||
: undefined,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength ?? ''
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
<div className="flex flex-col gap-2">
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { GripVertical, KeyRound } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
dataTypeDataToDataType,
|
||||
sortedDataTypeMap,
|
||||
@@ -23,62 +22,14 @@ import type {
|
||||
} from '@/components/select-box/select-box';
|
||||
import { SelectBox } from '@/components/select-box/select-box';
|
||||
import { TableFieldPopover } from './table-field-modal/table-field-modal';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
|
||||
export interface TableFieldProps {
|
||||
table: DBTable;
|
||||
field: DBField;
|
||||
updateField: (attrs: Partial<DBField>) => void;
|
||||
removeField: () => void;
|
||||
}
|
||||
|
||||
const generateFieldRegexPatterns = (
|
||||
dataType: DataTypeData
|
||||
): {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
} => {
|
||||
if (!dataType.fieldAttributes) {
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
}
|
||||
|
||||
const typeName = dataType.name;
|
||||
const fieldAttributes = dataType.fieldAttributes;
|
||||
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
if (fieldAttributes.hasCharMaxLengthOption) {
|
||||
return {
|
||||
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
|
||||
extractRegex: /\((\d+|max)\)/i,
|
||||
};
|
||||
}
|
||||
return {
|
||||
regex: `^${typeName}\\(\\d+\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
|
||||
extractRegex: new RegExp(
|
||||
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
};
|
||||
|
||||
export const TableField: React.FC<TableFieldProps> = ({
|
||||
table,
|
||||
field,
|
||||
updateField,
|
||||
removeField,
|
||||
@@ -86,30 +37,21 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
const { databaseType, customTypes } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// Only calculate primary key fields, not just count
|
||||
const primaryKeyFields = useMemo(() => {
|
||||
return table.fields.filter((f) => f.primaryKey);
|
||||
}, [table.fields]);
|
||||
|
||||
const primaryKeyCount = primaryKeyFields.length;
|
||||
|
||||
const { attributes, listeners, setNodeRef, transform, transition } =
|
||||
useSortable({ id: field.id });
|
||||
|
||||
const dataFieldOptions = useMemo(() => {
|
||||
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
||||
databaseType
|
||||
].map((type) => {
|
||||
const regexPatterns = generateFieldRegexPatterns(type);
|
||||
|
||||
return {
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: regexPatterns.regex,
|
||||
extractRegex: regexPatterns.extractRegex,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
};
|
||||
});
|
||||
].map((type) => ({
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: type.hasCharMaxLength
|
||||
? `^${type.name}\\(\\d+\\)$`
|
||||
: undefined,
|
||||
extractRegex: type.hasCharMaxLength ? /\((\d+)\)/ : undefined,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
}));
|
||||
|
||||
if (!customTypes?.length) {
|
||||
return standardTypes;
|
||||
@@ -141,44 +83,18 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
};
|
||||
|
||||
let characterMaximumLength: string | undefined = undefined;
|
||||
let precision: number | undefined = undefined;
|
||||
let scale: number | undefined = undefined;
|
||||
|
||||
if (regexMatches?.length) {
|
||||
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
||||
} else if (
|
||||
dataType?.fieldAttributes?.precision &&
|
||||
dataType?.fieldAttributes?.scale
|
||||
) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
scale = regexMatches[2]
|
||||
? parseInt(regexMatches[2])
|
||||
: undefined;
|
||||
} else if (dataType?.fieldAttributes?.precision) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
dataType?.fieldAttributes?.hasCharMaxLength &&
|
||||
field.characterMaximumLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.precision && field.precision) {
|
||||
precision = field.precision;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.scale && field.scale) {
|
||||
scale = field.scale;
|
||||
}
|
||||
if (regexMatches?.length && dataType?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1];
|
||||
} else if (
|
||||
field.characterMaximumLength &&
|
||||
dataType?.hasCharMaxLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
}
|
||||
|
||||
updateField({
|
||||
characterMaximumLength,
|
||||
precision,
|
||||
scale,
|
||||
type: dataTypeDataToDataType(
|
||||
dataType ?? {
|
||||
id: value as string,
|
||||
@@ -187,13 +103,7 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
),
|
||||
});
|
||||
},
|
||||
[
|
||||
updateField,
|
||||
databaseType,
|
||||
field.characterMaximumLength,
|
||||
field.precision,
|
||||
field.scale,
|
||||
]
|
||||
[updateField, databaseType, field.characterMaximumLength]
|
||||
);
|
||||
|
||||
const style = {
|
||||
@@ -201,50 +111,14 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
transition,
|
||||
};
|
||||
|
||||
const handlePrimaryKeyToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
if (value) {
|
||||
// When setting as primary key
|
||||
const updates: Partial<DBField> = {
|
||||
primaryKey: true,
|
||||
};
|
||||
// Only auto-set unique if this will be the only primary key
|
||||
if (primaryKeyCount === 0) {
|
||||
updates.unique = true;
|
||||
}
|
||||
updateField(updates);
|
||||
} else {
|
||||
// When removing primary key
|
||||
updateField({
|
||||
primaryKey: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
[primaryKeyCount, updateField]
|
||||
);
|
||||
|
||||
const handleNullableToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
updateField({ nullable: value });
|
||||
},
|
||||
[updateField]
|
||||
);
|
||||
|
||||
const handleNameChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
updateField({ name: e.target.value });
|
||||
},
|
||||
[updateField]
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex flex-1 touch-none flex-row justify-between gap-2 p-1"
|
||||
className="flex flex-1 touch-none flex-row justify-between p-1"
|
||||
ref={setNodeRef}
|
||||
style={style}
|
||||
{...attributes}
|
||||
>
|
||||
<div className="flex flex-1 items-center justify-start gap-1 overflow-hidden">
|
||||
<div className="flex w-8/12 items-center justify-start gap-1 overflow-hidden">
|
||||
<div
|
||||
className="flex w-4 shrink-0 cursor-move items-center justify-center"
|
||||
{...listeners}
|
||||
@@ -253,7 +127,7 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</div>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="min-w-0 flex-1">
|
||||
<span className="w-5/12">
|
||||
<Input
|
||||
className="h-8 w-full !truncate focus-visible:ring-0"
|
||||
type="text"
|
||||
@@ -261,14 +135,18 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
'side_panel.tables_section.table.field_name'
|
||||
)}
|
||||
value={field.name}
|
||||
onChange={handleNameChange}
|
||||
onChange={(e) =>
|
||||
updateField({
|
||||
name: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{field.name}</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger className="flex h-8 min-w-0 flex-1" asChild>
|
||||
<TooltipTrigger className="flex h-8 !w-5/12" asChild>
|
||||
<span>
|
||||
<SelectBox
|
||||
className="flex h-8 min-h-8 w-full"
|
||||
@@ -278,14 +156,26 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
'side_panel.tables_section.table.field_type'
|
||||
)}
|
||||
value={field.type.id}
|
||||
valueSuffix={generateDBFieldSuffix(field)}
|
||||
optionSuffix={(option) =>
|
||||
generateDBFieldSuffix(field, {
|
||||
databaseType,
|
||||
forceExtended: true,
|
||||
typeId: option.value,
|
||||
})
|
||||
valueSuffix={
|
||||
field.characterMaximumLength
|
||||
? `(${field.characterMaximumLength})`
|
||||
: ''
|
||||
}
|
||||
optionSuffix={(option) => {
|
||||
const type = sortedDataTypeMap[
|
||||
databaseType
|
||||
].find((v) => v.id === option.value);
|
||||
|
||||
if (!type) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (type.hasCharMaxLength) {
|
||||
return `(${!field.characterMaximumLength ? 'n' : field.characterMaximumLength})`;
|
||||
}
|
||||
|
||||
return '';
|
||||
}}
|
||||
onChange={onChangeDataType}
|
||||
emptyPlaceholder={t(
|
||||
'side_panel.tables_section.table.no_types_found'
|
||||
@@ -301,13 +191,17 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="flex shrink-0 items-center justify-end gap-1">
|
||||
<div className="flex w-4/12 justify-end gap-1 overflow-hidden">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={field.nullable}
|
||||
onPressedChange={handleNullableToggle}
|
||||
onPressedChange={(value) =>
|
||||
updateField({
|
||||
nullable: value,
|
||||
})
|
||||
}
|
||||
>
|
||||
N
|
||||
</TableFieldToggle>
|
||||
@@ -322,7 +216,12 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={field.primaryKey}
|
||||
onPressedChange={handlePrimaryKeyToggle}
|
||||
onPressedChange={(value) =>
|
||||
updateField({
|
||||
unique: value,
|
||||
primaryKey: value,
|
||||
})
|
||||
}
|
||||
>
|
||||
<KeyRound className="h-3.5" />
|
||||
</TableFieldToggle>
|
||||
@@ -334,10 +233,8 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</Tooltip>
|
||||
<TableFieldPopover
|
||||
field={field}
|
||||
table={table}
|
||||
updateField={updateField}
|
||||
removeField={removeField}
|
||||
databaseType={databaseType}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -56,32 +56,6 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
>(['fields']);
|
||||
const sensors = useSensors(useSensor(PointerSensor));
|
||||
|
||||
// Create a memoized version of the field updater that handles primary key logic
|
||||
const handleFieldUpdate = useCallback(
|
||||
(fieldId: string, attrs: Partial<DBField>) => {
|
||||
updateField(table.id, fieldId, attrs);
|
||||
|
||||
// Handle the case when removing a primary key and only one remains
|
||||
if (attrs.primaryKey === false) {
|
||||
const remainingPrimaryKeys = table.fields.filter(
|
||||
(f) => f.id !== fieldId && f.primaryKey
|
||||
);
|
||||
if (remainingPrimaryKeys.length === 1) {
|
||||
// Set the remaining primary key field as unique
|
||||
updateField(
|
||||
table.id,
|
||||
remainingPrimaryKeys[0].id,
|
||||
{
|
||||
unique: true,
|
||||
},
|
||||
{ updateHistory: false }
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[table.id, table.fields, updateField]
|
||||
);
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent) => {
|
||||
const { active, over } = event;
|
||||
|
||||
@@ -173,9 +147,14 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
<TableField
|
||||
key={field.id}
|
||||
field={field}
|
||||
table={table}
|
||||
updateField={(attrs) =>
|
||||
handleFieldUpdate(field.id, attrs)
|
||||
updateField={(
|
||||
attrs: Partial<DBField>
|
||||
) =>
|
||||
updateField(
|
||||
table.id,
|
||||
field.id,
|
||||
attrs
|
||||
)
|
||||
}
|
||||
removeField={() =>
|
||||
removeField(table.id, field.id)
|
||||
|
||||
@@ -38,7 +38,6 @@ import {
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { cloneTable } from '@/lib/clone';
|
||||
import type { DBSchema } from '@/lib/domain';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
|
||||
export interface TableListItemHeaderProps {
|
||||
table: DBTable;
|
||||
@@ -49,14 +48,12 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
}) => {
|
||||
const {
|
||||
updateTable,
|
||||
updateTablesState,
|
||||
removeTable,
|
||||
createIndex,
|
||||
createField,
|
||||
createTable,
|
||||
schemas,
|
||||
filteredSchemas,
|
||||
databaseType,
|
||||
} = useChartDB();
|
||||
const { openTableSchemaDialog } = useDialog();
|
||||
const { t } = useTranslation();
|
||||
@@ -131,15 +128,9 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
|
||||
const updateTableSchema = useCallback(
|
||||
({ schema }: { schema: DBSchema }) => {
|
||||
updateTablesState((currentTables) =>
|
||||
currentTables.map((t) =>
|
||||
t.id === table.id || !t.schema
|
||||
? { ...t, schema: schema.name }
|
||||
: t
|
||||
)
|
||||
);
|
||||
updateTable(table.id, { schema: schema.name });
|
||||
},
|
||||
[table.id, updateTablesState]
|
||||
[table.id, updateTable]
|
||||
);
|
||||
|
||||
const changeSchema = useCallback(() => {
|
||||
@@ -147,7 +138,6 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
table,
|
||||
schemas,
|
||||
onConfirm: updateTableSchema,
|
||||
allowSchemaCreation: true,
|
||||
});
|
||||
}, [openTableSchemaDialog, table, schemas, updateTableSchema]);
|
||||
|
||||
@@ -180,7 +170,7 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
)}
|
||||
</DropdownMenuLabel>
|
||||
<DropdownMenuSeparator />
|
||||
{schemas.length > 0 || defaultSchemas?.[databaseType] ? (
|
||||
{schemas.length > 0 ? (
|
||||
<>
|
||||
<DropdownMenuGroup>
|
||||
<DropdownMenuItem
|
||||
@@ -261,7 +251,6 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
t,
|
||||
changeSchema,
|
||||
schemas.length,
|
||||
databaseType,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -57,8 +57,6 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
setScrollAction,
|
||||
setShowCardinality,
|
||||
showCardinality,
|
||||
setShowFieldAttributes,
|
||||
showFieldAttributes,
|
||||
setShowDependenciesOnCanvas,
|
||||
showDependenciesOnCanvas,
|
||||
setShowMiniMapOnCanvas,
|
||||
@@ -139,10 +137,6 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
setShowCardinality(!showCardinality);
|
||||
}, [showCardinality, setShowCardinality]);
|
||||
|
||||
const showOrHideFieldAttributes = useCallback(() => {
|
||||
setShowFieldAttributes(!showFieldAttributes);
|
||||
}, [showFieldAttributes, setShowFieldAttributes]);
|
||||
|
||||
const showOrHideDependencies = useCallback(() => {
|
||||
setShowDependenciesOnCanvas(!showDependenciesOnCanvas);
|
||||
}, [showDependenciesOnCanvas, setShowDependenciesOnCanvas]);
|
||||
@@ -430,11 +424,6 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
? t('menu.view.hide_cardinality')
|
||||
: t('menu.view.show_cardinality')}
|
||||
</MenubarItem>
|
||||
<MenubarItem onClick={showOrHideFieldAttributes}>
|
||||
{showFieldAttributes
|
||||
? t('menu.view.hide_field_attributes')
|
||||
: t('menu.view.show_field_attributes')}
|
||||
</MenubarItem>
|
||||
{databaseType !== DatabaseType.CLICKHOUSE &&
|
||||
dependencies &&
|
||||
dependencies.length > 0 ? (
|
||||
|
||||
Reference in New Issue
Block a user