mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-28 10:33:56 +00:00
Compare commits
18 Commits
jf/edit-cl
...
v1.16.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9ac8929c5 | ||
|
|
c567c0a5f3 | ||
|
|
2dc1a6fc75 | ||
|
|
98f6edd5c8 | ||
|
|
47a7a73a13 | ||
|
|
d71b46e8b5 | ||
|
|
e4c4a3b354 | ||
|
|
1b8d51b73c | ||
|
|
93d72a896b | ||
|
|
9991077978 | ||
|
|
bc82f9d6a8 | ||
|
|
26dc299cd2 | ||
|
|
d6ba4a4074 | ||
|
|
d09379e8be | ||
|
|
bdc41c0b74 | ||
|
|
d3dbf41894 | ||
|
|
e6783a89cc | ||
|
|
af3638da7a |
39
CHANGELOG.md
39
CHANGELOG.md
@@ -1,5 +1,44 @@
|
||||
# Changelog
|
||||
|
||||
## [1.16.0](https://github.com/chartdb/chartdb/compare/v1.15.1...v1.16.0) (2025-09-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add area context menu and UI improvements ([#918](https://github.com/chartdb/chartdb/issues/918)) ([d09379e](https://github.com/chartdb/chartdb/commit/d09379e8be0fa3c83ca77ff62ae815fe4db9869b))
|
||||
* add quick table mode on canvas ([#915](https://github.com/chartdb/chartdb/issues/915)) ([8954d89](https://github.com/chartdb/chartdb/commit/8954d893bbfee45bb311380115fb14ebbf3a3133))
|
||||
* add zoom navigation buttons to canvas filter for tables and areas ([#903](https://github.com/chartdb/chartdb/issues/903)) ([a0fb1ed](https://github.com/chartdb/chartdb/commit/a0fb1ed08ba18b66354fa3498d610097a83d4afc))
|
||||
* **import-db:** add DBML syntax to import database dialog ([#768](https://github.com/chartdb/chartdb/issues/768)) ([af3638d](https://github.com/chartdb/chartdb/commit/af3638da7a9b70f281ceaddbc2f712a713d90cda))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add areas width and height + table width to diff check ([#931](https://github.com/chartdb/chartdb/issues/931)) ([98f6edd](https://github.com/chartdb/chartdb/commit/98f6edd5c8a8e9130e892b2d841744e0cf63a7bf))
|
||||
* add diff x,y ([#928](https://github.com/chartdb/chartdb/issues/928)) ([e4c4a3b](https://github.com/chartdb/chartdb/commit/e4c4a3b35484d9ece955a5aec577603dde73d634))
|
||||
* add support for ALTER TABLE ADD COLUMN in PostgreSQL importer ([#892](https://github.com/chartdb/chartdb/issues/892)) ([ec6e46f](https://github.com/chartdb/chartdb/commit/ec6e46fe81ea1806c179c50a4c5779d8596008aa))
|
||||
* add tests for diff ([#930](https://github.com/chartdb/chartdb/issues/930)) ([47a7a73](https://github.com/chartdb/chartdb/commit/47a7a73a137b87dfa6e67aff5f939cf64ccf4601))
|
||||
* dbml edit mode glitch ([#925](https://github.com/chartdb/chartdb/issues/925)) ([93d72a8](https://github.com/chartdb/chartdb/commit/93d72a896bab9aa79d8ea2f876126887e432214c))
|
||||
* dbml export default time bug ([#922](https://github.com/chartdb/chartdb/issues/922)) ([bc82f9d](https://github.com/chartdb/chartdb/commit/bc82f9d6a8fe4de2f7e0fc465e0a20c5dbf8f41d))
|
||||
* dbml export renaming fields bug ([#921](https://github.com/chartdb/chartdb/issues/921)) ([26dc299](https://github.com/chartdb/chartdb/commit/26dc299cd28e9890d191c13f84a15ac38ae48b11))
|
||||
* **dbml:** export array fields without quotes ([#911](https://github.com/chartdb/chartdb/issues/911)) ([5e81c18](https://github.com/chartdb/chartdb/commit/5e81c1848aaa911990e1e881d62525f5254d6d34))
|
||||
* diff logic ([#927](https://github.com/chartdb/chartdb/issues/927)) ([1b8d51b](https://github.com/chartdb/chartdb/commit/1b8d51b73c4ed4b7c5929adcb17a44927c7defca))
|
||||
* export dbml issues after upgrade version ([#883](https://github.com/chartdb/chartdb/issues/883)) ([07937a2](https://github.com/chartdb/chartdb/commit/07937a2f51708b1c10b45c2bd1f9a9acf5c3f708))
|
||||
* export sql + import metadata lib ([#902](https://github.com/chartdb/chartdb/issues/902)) ([ffddcdc](https://github.com/chartdb/chartdb/commit/ffddcdcc987bacb0e0d7e8dea27d08d3a8c5a8c8))
|
||||
* handle bidirectional relationships in DBML export ([#924](https://github.com/chartdb/chartdb/issues/924)) ([9991077](https://github.com/chartdb/chartdb/commit/99910779789a9c6ef113d06bc3de31e35b9b04d1))
|
||||
* import dbml set pk field unique ([#920](https://github.com/chartdb/chartdb/issues/920)) ([d6ba4a4](https://github.com/chartdb/chartdb/commit/d6ba4a40749d85d2703f120600df4345dab3c561))
|
||||
* improve SQL default value parsing for PostgreSQL, MySQL, and SQL Server with proper type handling and casting support ([#900](https://github.com/chartdb/chartdb/issues/900)) ([fe9ef27](https://github.com/chartdb/chartdb/commit/fe9ef275b8619dcfd7e57541a62a6237a16d29a8))
|
||||
* move area utils ([#932](https://github.com/chartdb/chartdb/issues/932)) ([2dc1a6f](https://github.com/chartdb/chartdb/commit/2dc1a6fc7519e0a455b0e1306601195deb156c96))
|
||||
* move auto arrange to toolbar ([#904](https://github.com/chartdb/chartdb/issues/904)) ([b016a70](https://github.com/chartdb/chartdb/commit/b016a70691bc22af5720b4de683e8c9353994fcc))
|
||||
* remove general db creation ([#901](https://github.com/chartdb/chartdb/issues/901)) ([df89f0b](https://github.com/chartdb/chartdb/commit/df89f0b6b9ba3fcc8b05bae4f60c0dc4ad1d2215))
|
||||
* remove many to many rel option ([#933](https://github.com/chartdb/chartdb/issues/933)) ([c567c0a](https://github.com/chartdb/chartdb/commit/c567c0a5f39157b2c430e92192b6750304d7a834))
|
||||
* reset increment and default when change field ([#896](https://github.com/chartdb/chartdb/issues/896)) ([e5e1d59](https://github.com/chartdb/chartdb/commit/e5e1d5932762422ea63acfd6cf9fe4f03aa822f7))
|
||||
* **sql-import:** handle SQL Server DDL with multiple tables, inline foreign keys, and case-insensitive field matching ([#897](https://github.com/chartdb/chartdb/issues/897)) ([2a64dee](https://github.com/chartdb/chartdb/commit/2a64deebb87a11ee3892024c3273d682bb86f7ef))
|
||||
* **sql-import:** support ALTER TABLE ALTER COLUMN TYPE in PostgreSQL importer ([#895](https://github.com/chartdb/chartdb/issues/895)) ([aa29061](https://github.com/chartdb/chartdb/commit/aa290615caf806d7d0374c848d50b4636fde7e96))
|
||||
* **sqlite:** improve parser to handle tables without column types and fix column detection ([#914](https://github.com/chartdb/chartdb/issues/914)) ([d3dbf41](https://github.com/chartdb/chartdb/commit/d3dbf41894d74f0ffce9afe3bd810f065aa53017))
|
||||
* trigger edit table on canvas from context menu ([#919](https://github.com/chartdb/chartdb/issues/919)) ([bdc41c0](https://github.com/chartdb/chartdb/commit/bdc41c0b74d9d9918e7b6cd2152fa07c0c58ce60))
|
||||
* update deps vulns ([#909](https://github.com/chartdb/chartdb/issues/909)) ([2bd9ca2](https://github.com/chartdb/chartdb/commit/2bd9ca25b2c7b1f053ff4fdc8c5cfc1b0e65901d))
|
||||
* upgrade dbml lib ([#880](https://github.com/chartdb/chartdb/issues/880)) ([d8e0bc7](https://github.com/chartdb/chartdb/commit/d8e0bc7db8881971ddaea7177bcebee13cc865f6))
|
||||
|
||||
## [1.15.1](https://github.com/chartdb/chartdb/compare/v1.15.0...v1.15.1) (2025-08-27)
|
||||
|
||||
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"version": "1.15.1",
|
||||
"version": "1.16.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "chartdb",
|
||||
"version": "1.15.1",
|
||||
"version": "1.16.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.13.9",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"private": true,
|
||||
"version": "1.15.1",
|
||||
"version": "1.16.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -38,7 +38,7 @@ export interface CodeSnippetProps {
|
||||
className?: string;
|
||||
code: string;
|
||||
codeToCopy?: string;
|
||||
language?: 'sql' | 'shell';
|
||||
language?: 'sql' | 'shell' | 'dbml';
|
||||
loading?: boolean;
|
||||
autoScroll?: boolean;
|
||||
isComplete?: boolean;
|
||||
|
||||
@@ -9,12 +9,14 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
base: 'vs-dark',
|
||||
inherit: true,
|
||||
rules: [
|
||||
{ token: 'comment', foreground: '6A9955' }, // Comments
|
||||
{ token: 'keyword', foreground: '569CD6' }, // Table, Ref keywords
|
||||
{ token: 'string', foreground: 'CE9178' }, // Strings
|
||||
{ token: 'annotation', foreground: '9CDCFE' }, // [annotations]
|
||||
{ token: 'delimiter', foreground: 'D4D4D4' }, // Braces {}
|
||||
{ token: 'operator', foreground: 'D4D4D4' }, // Operators
|
||||
{ token: 'datatype', foreground: '4EC9B0' }, // Data types
|
||||
{ token: 'type', foreground: '4EC9B0' }, // Data types
|
||||
{ token: 'identifier', foreground: '9CDCFE' }, // Field names
|
||||
],
|
||||
colors: {},
|
||||
});
|
||||
@@ -23,12 +25,14 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
base: 'vs',
|
||||
inherit: true,
|
||||
rules: [
|
||||
{ token: 'comment', foreground: '008000' }, // Comments
|
||||
{ token: 'keyword', foreground: '0000FF' }, // Table, Ref keywords
|
||||
{ token: 'string', foreground: 'A31515' }, // Strings
|
||||
{ token: 'annotation', foreground: '001080' }, // [annotations]
|
||||
{ token: 'delimiter', foreground: '000000' }, // Braces {}
|
||||
{ token: 'operator', foreground: '000000' }, // Operators
|
||||
{ token: 'type', foreground: '267F99' }, // Data types
|
||||
{ token: 'identifier', foreground: '001080' }, // Field names
|
||||
],
|
||||
colors: {},
|
||||
});
|
||||
@@ -37,23 +41,59 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
const datatypePattern = dataTypesNames.join('|');
|
||||
|
||||
monaco.languages.setMonarchTokensProvider('dbml', {
|
||||
keywords: ['Table', 'Ref', 'Indexes', 'Note', 'Enum'],
|
||||
keywords: ['Table', 'Ref', 'Indexes', 'Note', 'Enum', 'enum'],
|
||||
datatypes: dataTypesNames,
|
||||
operators: ['>', '<', '-'],
|
||||
|
||||
tokenizer: {
|
||||
root: [
|
||||
// Comments
|
||||
[/\/\/.*$/, 'comment'],
|
||||
|
||||
// Keywords - case insensitive
|
||||
[
|
||||
/\b([Tt][Aa][Bb][Ll][Ee]|[Ee][Nn][Uu][Mm]|[Rr][Ee][Ff]|[Ii][Nn][Dd][Ee][Xx][Ee][Ss]|[Nn][Oo][Tt][Ee])\b/,
|
||||
'keyword',
|
||||
],
|
||||
|
||||
// Annotations in brackets
|
||||
[/\[.*?\]/, 'annotation'],
|
||||
|
||||
// Strings
|
||||
[/'''/, 'string', '@tripleQuoteString'],
|
||||
[/".*?"/, 'string'],
|
||||
[/'.*?'/, 'string'],
|
||||
[/"([^"\\]|\\.)*$/, 'string.invalid'], // non-terminated string
|
||||
[/'([^'\\]|\\.)*$/, 'string.invalid'], // non-terminated string
|
||||
[/"/, 'string', '@string_double'],
|
||||
[/'/, 'string', '@string_single'],
|
||||
[/`.*?`/, 'string'],
|
||||
[/[{}]/, 'delimiter'],
|
||||
[/[<>]/, 'operator'],
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'], // Added 'i' flag for case-insensitive matching
|
||||
|
||||
// Delimiters and operators
|
||||
[/[{}()]/, 'delimiter'],
|
||||
[/[<>-]/, 'operator'],
|
||||
[/:/, 'delimiter'],
|
||||
|
||||
// Data types
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'],
|
||||
|
||||
// Numbers
|
||||
[/\d+/, 'number'],
|
||||
|
||||
// Identifiers
|
||||
[/[a-zA-Z_]\w*/, 'identifier'],
|
||||
],
|
||||
|
||||
string_double: [
|
||||
[/[^\\"]+/, 'string'],
|
||||
[/\\./, 'string.escape'],
|
||||
[/"/, 'string', '@pop'],
|
||||
],
|
||||
|
||||
string_single: [
|
||||
[/[^\\']+/, 'string'],
|
||||
[/\\./, 'string.escape'],
|
||||
[/'/, 'string', '@pop'],
|
||||
],
|
||||
|
||||
tripleQuoteString: [
|
||||
[/[^']+/, 'string'],
|
||||
[/'''/, 'string', '@pop'],
|
||||
|
||||
@@ -7,7 +7,6 @@ import type { ExportImageDialogProps } from '@/dialogs/export-image-dialog/expor
|
||||
import type { ExportDiagramDialogProps } from '@/dialogs/export-diagram-dialog/export-diagram-dialog';
|
||||
import type { ImportDiagramDialogProps } from '@/dialogs/import-diagram-dialog/import-diagram-dialog';
|
||||
import type { CreateRelationshipDialogProps } from '@/dialogs/create-relationship-dialog/create-relationship-dialog';
|
||||
import type { ImportDBMLDialogProps } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
import type { OpenDiagramDialogProps } from '@/dialogs/open-diagram-dialog/open-diagram-dialog';
|
||||
import type { CreateDiagramDialogProps } from '@/dialogs/create-diagram-dialog/create-diagram-dialog';
|
||||
|
||||
@@ -67,12 +66,6 @@ export interface DialogContext {
|
||||
params: Omit<ImportDiagramDialogProps, 'dialog'>
|
||||
) => void;
|
||||
closeImportDiagramDialog: () => void;
|
||||
|
||||
// Import DBML dialog
|
||||
openImportDBMLDialog: (
|
||||
params?: Omit<ImportDBMLDialogProps, 'dialog'>
|
||||
) => void;
|
||||
closeImportDBMLDialog: () => void;
|
||||
}
|
||||
|
||||
export const dialogContext = createContext<DialogContext>({
|
||||
@@ -96,6 +89,4 @@ export const dialogContext = createContext<DialogContext>({
|
||||
closeExportDiagramDialog: emptyFn,
|
||||
openImportDiagramDialog: emptyFn,
|
||||
closeImportDiagramDialog: emptyFn,
|
||||
openImportDBMLDialog: emptyFn,
|
||||
closeImportDBMLDialog: emptyFn,
|
||||
});
|
||||
|
||||
@@ -20,8 +20,6 @@ import type { ExportImageDialogProps } from '@/dialogs/export-image-dialog/expor
|
||||
import { ExportImageDialog } from '@/dialogs/export-image-dialog/export-image-dialog';
|
||||
import { ExportDiagramDialog } from '@/dialogs/export-diagram-dialog/export-diagram-dialog';
|
||||
import { ImportDiagramDialog } from '@/dialogs/import-diagram-dialog/import-diagram-dialog';
|
||||
import type { ImportDBMLDialogProps } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
import { ImportDBMLDialog } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
|
||||
export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
children,
|
||||
@@ -132,11 +130,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const [openImportDiagramDialog, setOpenImportDiagramDialog] =
|
||||
useState(false);
|
||||
|
||||
// Import DBML dialog
|
||||
const [openImportDBMLDialog, setOpenImportDBMLDialog] = useState(false);
|
||||
const [importDBMLDialogParams, setImportDBMLDialogParams] =
|
||||
useState<Omit<ImportDBMLDialogProps, 'dialog'>>();
|
||||
|
||||
return (
|
||||
<dialogContext.Provider
|
||||
value={{
|
||||
@@ -165,11 +158,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
openImportDiagramDialog: () => setOpenImportDiagramDialog(true),
|
||||
closeImportDiagramDialog: () =>
|
||||
setOpenImportDiagramDialog(false),
|
||||
openImportDBMLDialog: (params) => {
|
||||
setImportDBMLDialogParams(params);
|
||||
setOpenImportDBMLDialog(true);
|
||||
},
|
||||
closeImportDBMLDialog: () => setOpenImportDBMLDialog(false),
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
@@ -204,10 +192,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
/>
|
||||
<ExportDiagramDialog dialog={{ open: openExportDiagramDialog }} />
|
||||
<ImportDiagramDialog dialog={{ open: openImportDiagramDialog }} />
|
||||
<ImportDBMLDialog
|
||||
dialog={{ open: openImportDBMLDialog }}
|
||||
{...importDBMLDialogParams}
|
||||
/>
|
||||
</dialogContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -42,6 +42,14 @@ import {
|
||||
type ValidationResult,
|
||||
} from '@/lib/data/sql-import/sql-validator';
|
||||
import { SQLValidationStatus } from './sql-validation-status';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
import { detectImportMethod } from '@/lib/import-method/detect-import-method';
|
||||
import { verifyDBML } from '@/lib/dbml/dbml-import/verify-dbml';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
|
||||
const calculateContentSizeMB = (content: string): number => {
|
||||
return content.length / (1024 * 1024); // Convert to MB
|
||||
@@ -55,49 +63,6 @@ const calculateIsLargeFile = (content: string): boolean => {
|
||||
const errorScriptOutputMessage =
|
||||
'Invalid JSON. Please correct it or contact us at support@chartdb.io for help.';
|
||||
|
||||
// Helper to detect if content is likely SQL DDL or JSON
|
||||
const detectContentType = (content: string): 'query' | 'ddl' | null => {
|
||||
if (!content || content.trim().length === 0) return null;
|
||||
|
||||
// Common SQL DDL keywords
|
||||
const ddlKeywords = [
|
||||
'CREATE TABLE',
|
||||
'ALTER TABLE',
|
||||
'DROP TABLE',
|
||||
'CREATE INDEX',
|
||||
'CREATE VIEW',
|
||||
'CREATE PROCEDURE',
|
||||
'CREATE FUNCTION',
|
||||
'CREATE SCHEMA',
|
||||
'CREATE DATABASE',
|
||||
];
|
||||
|
||||
const upperContent = content.toUpperCase();
|
||||
|
||||
// Check for SQL DDL patterns
|
||||
const hasDDLKeywords = ddlKeywords.some((keyword) =>
|
||||
upperContent.includes(keyword)
|
||||
);
|
||||
if (hasDDLKeywords) return 'ddl';
|
||||
|
||||
// Check if it looks like JSON
|
||||
try {
|
||||
// Just check structure, don't need full parse for detection
|
||||
if (
|
||||
(content.trim().startsWith('{') && content.trim().endsWith('}')) ||
|
||||
(content.trim().startsWith('[') && content.trim().endsWith(']'))
|
||||
) {
|
||||
return 'query';
|
||||
}
|
||||
} catch (error) {
|
||||
// Not valid JSON, might be partial
|
||||
console.error('Error detecting content type:', error);
|
||||
}
|
||||
|
||||
// If we can't confidently detect, return null
|
||||
return null;
|
||||
};
|
||||
|
||||
export interface ImportDatabaseProps {
|
||||
goBack?: () => void;
|
||||
onImport: () => void;
|
||||
@@ -111,8 +76,8 @@ export interface ImportDatabaseProps {
|
||||
>;
|
||||
keepDialogAfterImport?: boolean;
|
||||
title: string;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
importMethod: ImportMethod;
|
||||
setImportMethod: (method: ImportMethod) => void;
|
||||
}
|
||||
|
||||
export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
@@ -132,6 +97,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const { effectiveTheme } = useTheme();
|
||||
const [errorMessage, setErrorMessage] = useState('');
|
||||
const editorRef = useRef<editor.IStandaloneCodeEditor | null>(null);
|
||||
const decorationsCollection = useRef<editor.IEditorDecorationsCollection>();
|
||||
const pasteDisposableRef = useRef<IDisposable | null>(null);
|
||||
|
||||
const { t } = useTranslation();
|
||||
@@ -146,15 +112,20 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const [isAutoFixing, setIsAutoFixing] = useState(false);
|
||||
const [showAutoFixButton, setShowAutoFixButton] = useState(false);
|
||||
|
||||
const clearDecorations = useCallback(() => {
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
setScriptResult('');
|
||||
setErrorMessage('');
|
||||
setShowCheckJsonButton(false);
|
||||
}, [importMethod, setScriptResult]);
|
||||
|
||||
// Check if the ddl is valid
|
||||
// Check if the ddl or dbml is valid
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'ddl') {
|
||||
clearDecorations();
|
||||
if (importMethod === 'query') {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
@@ -163,9 +134,54 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
if (!scriptResult.trim()) {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
setErrorMessage('');
|
||||
return;
|
||||
}
|
||||
|
||||
if (importMethod === 'dbml') {
|
||||
// Validate DBML by parsing it
|
||||
const validateResponse = verifyDBML(scriptResult);
|
||||
if (!validateResponse.hasError) {
|
||||
setErrorMessage('');
|
||||
setSqlValidation({
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
});
|
||||
} else {
|
||||
let errorMsg = 'Invalid DBML syntax';
|
||||
let line: number = 1;
|
||||
|
||||
if (validateResponse.parsedError) {
|
||||
errorMsg = validateResponse.parsedError.message;
|
||||
line = validateResponse.parsedError.line;
|
||||
highlightErrorLine({
|
||||
error: validateResponse.parsedError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
}
|
||||
|
||||
setSqlValidation({
|
||||
isValid: false,
|
||||
errors: [
|
||||
{
|
||||
message: errorMsg,
|
||||
line: line,
|
||||
type: 'syntax' as const,
|
||||
},
|
||||
],
|
||||
warnings: [],
|
||||
});
|
||||
setErrorMessage(errorMsg);
|
||||
}
|
||||
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// SQL validation
|
||||
// First run our validation based on database type
|
||||
const validation = validateSQL(scriptResult, databaseType);
|
||||
setSqlValidation(validation);
|
||||
@@ -192,7 +208,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setErrorMessage(result.error);
|
||||
}
|
||||
});
|
||||
}, [importMethod, scriptResult, databaseType]);
|
||||
}, [importMethod, scriptResult, databaseType, clearDecorations]);
|
||||
|
||||
// Check if the script result is a valid JSON
|
||||
useEffect(() => {
|
||||
@@ -320,6 +336,8 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: editor.IStandaloneCodeEditor) => {
|
||||
editorRef.current = editor;
|
||||
decorationsCollection.current =
|
||||
editor.createDecorationsCollection();
|
||||
|
||||
// Cleanup previous disposable if it exists
|
||||
if (pasteDisposableRef.current) {
|
||||
@@ -338,7 +356,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const isLargeFile = calculateIsLargeFile(content);
|
||||
|
||||
// First, detect content type to determine if we should switch modes
|
||||
const detectedType = detectContentType(content);
|
||||
const detectedType = detectImportMethod(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
// Switch to the detected mode immediately
|
||||
setImportMethod(detectedType);
|
||||
@@ -352,7 +370,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode, do NOT format as it can break the SQL
|
||||
// For DDL and DBML modes, do NOT format as it can break the syntax
|
||||
} else {
|
||||
// Content type didn't change, apply formatting based on current mode
|
||||
if (importMethod === 'query' && !isLargeFile) {
|
||||
@@ -363,7 +381,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode or large files, do NOT format
|
||||
// For DDL and DBML modes or large files, do NOT format
|
||||
}
|
||||
});
|
||||
|
||||
@@ -410,16 +428,25 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
<div className="w-full text-center text-xs text-muted-foreground">
|
||||
{importMethod === 'query'
|
||||
? 'Smart Query Output'
|
||||
: 'SQL Script'}
|
||||
: importMethod === 'dbml'
|
||||
? 'DBML Script'
|
||||
: 'SQL Script'}
|
||||
</div>
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<Suspense fallback={<Spinner />}>
|
||||
<Editor
|
||||
value={scriptResult}
|
||||
onChange={debouncedHandleInputChange}
|
||||
language={importMethod === 'query' ? 'json' : 'sql'}
|
||||
language={
|
||||
importMethod === 'query'
|
||||
? 'json'
|
||||
: importMethod === 'dbml'
|
||||
? 'dbml'
|
||||
: 'sql'
|
||||
}
|
||||
loading={<Spinner />}
|
||||
onMount={handleEditorDidMount}
|
||||
beforeMount={setupDBMLLanguage}
|
||||
theme={
|
||||
effectiveTheme === 'dark'
|
||||
? 'dbml-dark'
|
||||
@@ -430,7 +457,6 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
glyphMargin: false,
|
||||
lineNumbers: 'on',
|
||||
guides: {
|
||||
indentation: false,
|
||||
@@ -455,7 +481,9 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
</Suspense>
|
||||
</div>
|
||||
|
||||
{errorMessage || (importMethod === 'ddl' && sqlValidation) ? (
|
||||
{errorMessage ||
|
||||
((importMethod === 'ddl' || importMethod === 'dbml') &&
|
||||
sqlValidation) ? (
|
||||
<SQLValidationStatus
|
||||
validation={sqlValidation}
|
||||
errorMessage={errorMessage}
|
||||
|
||||
@@ -15,9 +15,11 @@ import {
|
||||
AvatarImage,
|
||||
} from '@/components/avatar/avatar';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Code } from 'lucide-react';
|
||||
import { Code, FileCode } from 'lucide-react';
|
||||
import { SmartQueryInstructions } from './instructions/smart-query-instructions';
|
||||
import { DDLInstructions } from './instructions/ddl-instructions';
|
||||
import { DBMLInstructions } from './instructions/dbml-instructions';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
const DatabasesWithoutDDLInstructions: DatabaseType[] = [
|
||||
DatabaseType.CLICKHOUSE,
|
||||
@@ -30,8 +32,8 @@ export interface InstructionsSectionProps {
|
||||
setDatabaseEdition: React.Dispatch<
|
||||
React.SetStateAction<DatabaseEdition | undefined>
|
||||
>;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
importMethod: ImportMethod;
|
||||
setImportMethod: (method: ImportMethod) => void;
|
||||
showSSMSInfoDialog: boolean;
|
||||
setShowSSMSInfoDialog: (show: boolean) => void;
|
||||
}
|
||||
@@ -125,9 +127,9 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
className="ml-1 flex-wrap justify-start gap-2"
|
||||
value={importMethod}
|
||||
onValueChange={(value) => {
|
||||
let selectedImportMethod: 'query' | 'ddl' = 'query';
|
||||
let selectedImportMethod: ImportMethod = 'query';
|
||||
if (value) {
|
||||
selectedImportMethod = value as 'query' | 'ddl';
|
||||
selectedImportMethod = value as ImportMethod;
|
||||
}
|
||||
|
||||
setImportMethod(selectedImportMethod);
|
||||
@@ -150,10 +152,20 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<Code size={16} />
|
||||
<FileCode size={16} />
|
||||
</Avatar>
|
||||
SQL Script
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value="dbml"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<Code size={16} />
|
||||
</Avatar>
|
||||
DBML
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</div>
|
||||
)}
|
||||
@@ -167,11 +179,16 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
showSSMSInfoDialog={showSSMSInfoDialog}
|
||||
setShowSSMSInfoDialog={setShowSSMSInfoDialog}
|
||||
/>
|
||||
) : (
|
||||
) : importMethod === 'ddl' ? (
|
||||
<DDLInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
/>
|
||||
) : (
|
||||
<DBMLInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import React from 'react';
|
||||
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
|
||||
export interface DBMLInstructionsProps {
|
||||
databaseType: DatabaseType;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}
|
||||
|
||||
export const DBMLInstructions: React.FC<DBMLInstructionsProps> = () => {
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col gap-1 text-sm text-primary">
|
||||
<div>
|
||||
Paste your DBML (Database Markup Language) schema definition
|
||||
here →
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex h-64 flex-col gap-1 text-sm text-primary">
|
||||
<h4 className="text-xs font-medium">Example:</h4>
|
||||
<CodeSnippet
|
||||
className="h-full"
|
||||
allowCopy={false}
|
||||
editorProps={{
|
||||
beforeMount: setupDBMLLanguage,
|
||||
}}
|
||||
code={`Table users {
|
||||
id int [pk]
|
||||
username varchar
|
||||
email varchar
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id int [pk]
|
||||
user_id int [ref: > users.id]
|
||||
title varchar
|
||||
content text
|
||||
}`}
|
||||
language={'dbml'}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -43,8 +43,8 @@ const DDLInstructionsMap: Record<DatabaseType, DDLInstruction[]> = {
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal:',
|
||||
code: `sqlite3 <database_file_path>\n.dump > <output_file_path>`,
|
||||
example: `sqlite3 my_db.db\n.dump > schema_export.sql`,
|
||||
code: `sqlite3 <database_file_path>\n".schema" > <output_file_path>`,
|
||||
example: `sqlite3 my_db.db\n".schema" > schema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
|
||||
@@ -73,7 +73,7 @@ export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
|
||||
{hasErrors ? (
|
||||
<div className="rounded-md border border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-950">
|
||||
<ScrollArea className="h-24">
|
||||
<ScrollArea className="h-fit max-h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-red-700 dark:text-red-300">
|
||||
{validation?.errors
|
||||
.slice(0, 3)
|
||||
@@ -137,7 +137,7 @@ export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
|
||||
{hasWarnings && !hasErrors ? (
|
||||
<div className="rounded-md border border-sky-200 bg-sky-50 dark:border-sky-800 dark:bg-sky-950">
|
||||
<ScrollArea className="h-24">
|
||||
<ScrollArea className="h-fit max-h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-sky-700 dark:text-sky-300">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertTriangle className="mt-0.5 size-4 shrink-0 text-sky-700 dark:text-sky-300" />
|
||||
|
||||
@@ -22,6 +22,11 @@ import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
import type { SelectedTable } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { filterMetadataByTables } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { MAX_TABLES_WITHOUT_SHOWING_FILTER } from '../common/select-tables/constants';
|
||||
import {
|
||||
defaultDBMLDiagramName,
|
||||
importDBMLToDiagram,
|
||||
} from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
export interface CreateDiagramDialogProps extends BaseDialogProps {}
|
||||
|
||||
@@ -30,11 +35,11 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}) => {
|
||||
const { diagramId } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const [importMethod, setImportMethod] = useState<ImportMethod>('query');
|
||||
const [databaseType, setDatabaseType] = useState<DatabaseType>(
|
||||
DatabaseType.GENERIC
|
||||
);
|
||||
const { closeCreateDiagramDialog, openImportDBMLDialog } = useDialog();
|
||||
const { closeCreateDiagramDialog } = useDialog();
|
||||
const { updateConfig } = useConfig();
|
||||
const [scriptResult, setScriptResult] = useState('');
|
||||
const [databaseEdition, setDatabaseEdition] = useState<
|
||||
@@ -89,6 +94,14 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else if (importMethod === 'dbml') {
|
||||
diagram = await importDBMLToDiagram(scriptResult, {
|
||||
databaseType,
|
||||
});
|
||||
// Update the diagram name if it's the default
|
||||
if (diagram.name === defaultDBMLDiagramName) {
|
||||
diagram.name = `Diagram ${diagramNumber}`;
|
||||
}
|
||||
} else {
|
||||
let metadata: DatabaseMetadata | undefined = databaseMetadata;
|
||||
|
||||
@@ -152,10 +165,6 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
await updateConfig({ config: { defaultDiagramId: diagram.id } });
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
setTimeout(
|
||||
() => openImportDBMLDialog({ withCreateEmptyDiagram: true }),
|
||||
700
|
||||
);
|
||||
}, [
|
||||
databaseType,
|
||||
addDiagram,
|
||||
@@ -164,14 +173,13 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
navigate,
|
||||
updateConfig,
|
||||
diagramNumber,
|
||||
openImportDBMLDialog,
|
||||
]);
|
||||
|
||||
const importNewDiagramOrFilterTables = useCallback(async () => {
|
||||
try {
|
||||
setIsParsingMetadata(true);
|
||||
|
||||
if (importMethod === 'ddl') {
|
||||
if (importMethod === 'ddl' || importMethod === 'dbml') {
|
||||
await importNewDiagram();
|
||||
} else {
|
||||
// Parse metadata asynchronously to avoid blocking the UI
|
||||
|
||||
@@ -15,6 +15,8 @@ import { useReactFlow } from '@xyflow/react';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useAlert } from '@/context/alert-context/alert-context';
|
||||
import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
import { importDBMLToDiagram } from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
export interface ImportDatabaseDialogProps extends BaseDialogProps {
|
||||
databaseType: DatabaseType;
|
||||
@@ -24,7 +26,7 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
dialog,
|
||||
databaseType,
|
||||
}) => {
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const [importMethod, setImportMethod] = useState<ImportMethod>('query');
|
||||
const { closeImportDatabaseDialog } = useDialog();
|
||||
const { showAlert } = useAlert();
|
||||
const {
|
||||
@@ -65,6 +67,10 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else if (importMethod === 'dbml') {
|
||||
diagram = await importDBMLToDiagram(scriptResult, {
|
||||
databaseType,
|
||||
});
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
|
||||
@@ -1,359 +0,0 @@
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useState,
|
||||
Suspense,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import type * as monaco from 'monaco-editor';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
DialogClose,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogInternalContent,
|
||||
DialogTitle,
|
||||
} from '@/components/dialog/dialog';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Editor } from '@/components/code-snippet/code-snippet';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
import { AlertCircle } from 'lucide-react';
|
||||
import {
|
||||
importDBMLToDiagram,
|
||||
sanitizeDBML,
|
||||
preprocessDBML,
|
||||
} from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { Parser } from '@dbml/core';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
import { debounce } from '@/lib/utils';
|
||||
import { parseDBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
|
||||
export interface ImportDBMLDialogProps extends BaseDialogProps {
|
||||
withCreateEmptyDiagram?: boolean;
|
||||
}
|
||||
|
||||
export const ImportDBMLDialog: React.FC<ImportDBMLDialogProps> = ({
|
||||
dialog,
|
||||
withCreateEmptyDiagram,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const initialDBML = `// Use DBML to define your database structure
|
||||
// Simple Blog System with Comments Example
|
||||
|
||||
Table users {
|
||||
id integer [primary key]
|
||||
name varchar
|
||||
email varchar
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id integer [primary key]
|
||||
title varchar
|
||||
content text
|
||||
user_id integer
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
Table comments {
|
||||
id integer [primary key]
|
||||
content text
|
||||
post_id integer
|
||||
user_id integer
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
// Relationships
|
||||
Ref: posts.user_id > users.id // Each post belongs to one user
|
||||
Ref: comments.post_id > posts.id // Each comment belongs to one post
|
||||
Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
|
||||
const [dbmlContent, setDBMLContent] = useState<string>(initialDBML);
|
||||
const { closeImportDBMLDialog } = useDialog();
|
||||
const [errorMessage, setErrorMessage] = useState<string | undefined>();
|
||||
const { effectiveTheme } = useTheme();
|
||||
const { toast } = useToast();
|
||||
const {
|
||||
addTables,
|
||||
addRelationships,
|
||||
tables,
|
||||
relationships,
|
||||
removeTables,
|
||||
removeRelationships,
|
||||
} = useChartDB();
|
||||
const { reorderTables } = useCanvas();
|
||||
const [reorder, setReorder] = useState(false);
|
||||
const editorRef = useRef<monaco.editor.IStandaloneCodeEditor>();
|
||||
const decorationsCollection =
|
||||
useRef<monaco.editor.IEditorDecorationsCollection>();
|
||||
|
||||
const handleEditorDidMount = (
|
||||
editor: monaco.editor.IStandaloneCodeEditor
|
||||
) => {
|
||||
editorRef.current = editor;
|
||||
decorationsCollection.current = editor.createDecorationsCollection();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (reorder) {
|
||||
reorderTables({
|
||||
updateHistory: false,
|
||||
});
|
||||
setReorder(false);
|
||||
}
|
||||
}, [reorder, reorderTables]);
|
||||
|
||||
const clearDecorations = useCallback(() => {
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
}, []);
|
||||
|
||||
const validateDBML = useCallback(
|
||||
async (content: string) => {
|
||||
// Clear previous errors
|
||||
setErrorMessage(undefined);
|
||||
clearDecorations();
|
||||
|
||||
if (!content.trim()) return;
|
||||
|
||||
try {
|
||||
const preprocessedContent = preprocessDBML(content);
|
||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||
const parser = new Parser();
|
||||
parser.parse(sanitizedContent, 'dbmlv2');
|
||||
} catch (e) {
|
||||
const parsedError = parseDBMLError(e);
|
||||
if (parsedError) {
|
||||
setErrorMessage(
|
||||
t('import_dbml_dialog.error.description') +
|
||||
` (1 error found - in line ${parsedError.line})`
|
||||
);
|
||||
highlightErrorLine({
|
||||
error: parsedError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
} else {
|
||||
setErrorMessage(
|
||||
e instanceof Error ? e.message : JSON.stringify(e)
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[clearDecorations, t]
|
||||
);
|
||||
|
||||
const debouncedValidateRef = useRef<((value: string) => void) | null>(null);
|
||||
|
||||
// Set up debounced validation
|
||||
useEffect(() => {
|
||||
debouncedValidateRef.current = debounce((value: string) => {
|
||||
validateDBML(value);
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
debouncedValidateRef.current = null;
|
||||
};
|
||||
}, [validateDBML]);
|
||||
|
||||
// Trigger validation when content changes
|
||||
useEffect(() => {
|
||||
if (debouncedValidateRef.current) {
|
||||
debouncedValidateRef.current(dbmlContent);
|
||||
}
|
||||
}, [dbmlContent]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) {
|
||||
setErrorMessage(undefined);
|
||||
clearDecorations();
|
||||
setDBMLContent(initialDBML);
|
||||
}
|
||||
}, [dialog.open, initialDBML, clearDecorations]);
|
||||
|
||||
const handleImport = useCallback(async () => {
|
||||
if (!dbmlContent.trim() || errorMessage) return;
|
||||
|
||||
try {
|
||||
const importedDiagram = await importDBMLToDiagram(dbmlContent);
|
||||
const tableIdsToRemove = tables
|
||||
.filter((table) =>
|
||||
importedDiagram.tables?.some(
|
||||
(t: DBTable) =>
|
||||
t.name === table.name && t.schema === table.schema
|
||||
)
|
||||
)
|
||||
.map((table) => table.id);
|
||||
// Find relationships that need to be removed
|
||||
const relationshipIdsToRemove = relationships
|
||||
.filter((relationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.targetTableId
|
||||
);
|
||||
if (!sourceTable || !targetTable) return true;
|
||||
const replacementSourceTable = importedDiagram.tables?.find(
|
||||
(table: DBTable) =>
|
||||
table.name === sourceTable.name &&
|
||||
table.schema === sourceTable.schema
|
||||
);
|
||||
const replacementTargetTable = importedDiagram.tables?.find(
|
||||
(table: DBTable) =>
|
||||
table.name === targetTable.name &&
|
||||
table.schema === targetTable.schema
|
||||
);
|
||||
return replacementSourceTable || replacementTargetTable;
|
||||
})
|
||||
.map((relationship) => relationship.id);
|
||||
|
||||
// Remove existing items
|
||||
await Promise.all([
|
||||
removeTables(tableIdsToRemove, { updateHistory: false }),
|
||||
removeRelationships(relationshipIdsToRemove, {
|
||||
updateHistory: false,
|
||||
}),
|
||||
]);
|
||||
|
||||
// Add new items
|
||||
await Promise.all([
|
||||
addTables(importedDiagram.tables ?? [], {
|
||||
updateHistory: false,
|
||||
}),
|
||||
addRelationships(importedDiagram.relationships ?? [], {
|
||||
updateHistory: false,
|
||||
}),
|
||||
]);
|
||||
setReorder(true);
|
||||
closeImportDBMLDialog();
|
||||
} catch (e) {
|
||||
toast({
|
||||
title: t('import_dbml_dialog.error.title'),
|
||||
variant: 'destructive',
|
||||
description: (
|
||||
<>
|
||||
<div>{t('import_dbml_dialog.error.description')}</div>
|
||||
{e instanceof Error ? e.message : JSON.stringify(e)}
|
||||
</>
|
||||
),
|
||||
});
|
||||
}
|
||||
}, [
|
||||
dbmlContent,
|
||||
closeImportDBMLDialog,
|
||||
tables,
|
||||
relationships,
|
||||
removeTables,
|
||||
removeRelationships,
|
||||
addTables,
|
||||
addRelationships,
|
||||
errorMessage,
|
||||
toast,
|
||||
setReorder,
|
||||
t,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
closeImportDBMLDialog();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogContent
|
||||
className="flex h-[80vh] max-h-screen w-full flex-col md:max-w-[900px]"
|
||||
showClose
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle>
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.example_title')
|
||||
: t('import_dbml_dialog.title')}
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
{t('import_dbml_dialog.description')}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogInternalContent>
|
||||
<Suspense fallback={<Spinner />}>
|
||||
<Editor
|
||||
value={dbmlContent}
|
||||
onChange={(value) => setDBMLContent(value || '')}
|
||||
language="dbml"
|
||||
onMount={handleEditorDidMount}
|
||||
theme={
|
||||
effectiveTheme === 'dark'
|
||||
? 'dbml-dark'
|
||||
: 'dbml-light'
|
||||
}
|
||||
beforeMount={setupDBMLLanguage}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
glyphMargin: true,
|
||||
lineNumbers: 'on',
|
||||
scrollbar: {
|
||||
vertical: 'visible',
|
||||
horizontal: 'visible',
|
||||
},
|
||||
}}
|
||||
className="size-full"
|
||||
/>
|
||||
</Suspense>
|
||||
</DialogInternalContent>
|
||||
<DialogFooter>
|
||||
<div className="flex w-full items-center justify-between">
|
||||
<div className="flex items-center gap-4">
|
||||
<DialogClose asChild>
|
||||
<Button variant="secondary">
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.skip_and_empty')
|
||||
: t('import_dbml_dialog.cancel')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
{errorMessage ? (
|
||||
<div className="flex items-center gap-1">
|
||||
<AlertCircle className="size-4 text-destructive" />
|
||||
|
||||
<span className="text-xs text-destructive">
|
||||
{errorMessage ||
|
||||
t(
|
||||
'import_dbml_dialog.error.description'
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
<Button
|
||||
onClick={handleImport}
|
||||
disabled={!dbmlContent.trim() || !!errorMessage}
|
||||
>
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.show_example')
|
||||
: t('import_dbml_dialog.import')}
|
||||
</Button>
|
||||
</div>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
@@ -1,50 +0,0 @@
|
||||
import { useEffect, useCallback, type RefObject } from 'react';
|
||||
|
||||
/**
|
||||
* Custom hook that handles click outside detection with capture phase
|
||||
* to work properly with React Flow canvas and other event-stopping elements
|
||||
*/
|
||||
export function useClickOutside(
|
||||
ref: RefObject<HTMLElement>,
|
||||
handler: () => void,
|
||||
isActive = true
|
||||
) {
|
||||
useEffect(() => {
|
||||
if (!isActive) return;
|
||||
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (ref.current && !ref.current.contains(event.target as Node)) {
|
||||
handler();
|
||||
}
|
||||
};
|
||||
|
||||
// Use capture phase to catch events before React Flow or other libraries can stop them
|
||||
document.addEventListener('mousedown', handleClickOutside, true);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside, true);
|
||||
};
|
||||
}, [ref, handler, isActive]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specialized version of useClickOutside for edit mode inputs
|
||||
* Adds a small delay to prevent race conditions with blur events
|
||||
*/
|
||||
export function useEditClickOutside(
|
||||
inputRef: RefObject<HTMLElement>,
|
||||
editMode: boolean,
|
||||
onSave: () => void,
|
||||
delay = 100
|
||||
) {
|
||||
const handleClickOutside = useCallback(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure any pending state updates are processed
|
||||
setTimeout(() => {
|
||||
onSave();
|
||||
}, delay);
|
||||
}
|
||||
}, [editMode, onSave, delay]);
|
||||
|
||||
useClickOutside(inputRef, handleClickOutside, editMode);
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
databaseTypesWithCommentSupport,
|
||||
} from '@/lib/domain/database-type';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DataType } from '../data-types/data-types';
|
||||
import { dataTypeMap, type DataType } from '../data-types/data-types';
|
||||
import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache';
|
||||
import { exportMSSQL } from './export-per-type/mssql';
|
||||
import { exportPostgreSQL } from './export-per-type/postgresql';
|
||||
@@ -314,11 +314,26 @@ export const exportBaseSQL = ({
|
||||
sqlScript += `(1)`;
|
||||
}
|
||||
|
||||
// Add precision and scale for numeric types
|
||||
if (field.precision && field.scale) {
|
||||
sqlScript += `(${field.precision}, ${field.scale})`;
|
||||
} else if (field.precision) {
|
||||
sqlScript += `(${field.precision})`;
|
||||
// Add precision and scale for numeric types only
|
||||
const precisionAndScaleTypes = dataTypeMap[targetDatabaseType]
|
||||
.filter(
|
||||
(t) =>
|
||||
t.fieldAttributes?.precision && t.fieldAttributes?.scale
|
||||
)
|
||||
.map((t) => t.name);
|
||||
|
||||
const isNumericType = precisionAndScaleTypes.some(
|
||||
(t) =>
|
||||
field.type.name.toLowerCase().includes(t) ||
|
||||
typeName.toLowerCase().includes(t)
|
||||
);
|
||||
|
||||
if (isNumericType) {
|
||||
if (field.precision && field.scale) {
|
||||
sqlScript += `(${field.precision}, ${field.scale})`;
|
||||
} else if (field.precision) {
|
||||
sqlScript += `(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle NOT NULL constraint
|
||||
@@ -366,6 +381,16 @@ export const exportBaseSQL = ({
|
||||
fieldDefault = `now()`;
|
||||
}
|
||||
|
||||
// Fix CURRENT_DATE() for PostgreSQL in DBML flow - PostgreSQL uses CURRENT_DATE without parentheses
|
||||
if (
|
||||
isDBMLFlow &&
|
||||
targetDatabaseType === DatabaseType.POSTGRESQL
|
||||
) {
|
||||
if (fieldDefault.toUpperCase() === 'CURRENT_DATE()') {
|
||||
fieldDefault = 'CURRENT_DATE';
|
||||
}
|
||||
}
|
||||
|
||||
sqlScript += ` DEFAULT ${fieldDefault}`;
|
||||
}
|
||||
}
|
||||
@@ -454,10 +479,16 @@ export const exportBaseSQL = ({
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
const indexName =
|
||||
const rawIndexName =
|
||||
table.schema && !isDBMLFlow
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
// Quote index name if it contains special characters
|
||||
// For DBML flow, also quote if contains special characters
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(rawIndexName);
|
||||
const indexName = needsQuoting
|
||||
? `"${rawIndexName}"`
|
||||
: rawIndexName;
|
||||
sqlScript += `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${fieldNames});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
import { describe, it } from 'vitest';
|
||||
|
||||
describe('node-sql-parser - CREATE TYPE handling', () => {
|
||||
it('should show exact parser error for CREATE TYPE', async () => {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
const parserOpts = {
|
||||
database: 'PostgreSQL',
|
||||
};
|
||||
|
||||
console.log('\n=== Testing CREATE TYPE statement ===');
|
||||
const createTypeSQL = `CREATE TYPE spell_element AS ENUM ('fire', 'water', 'earth', 'air');`;
|
||||
|
||||
try {
|
||||
parser.astify(createTypeSQL, parserOpts);
|
||||
console.log('CREATE TYPE parsed successfully');
|
||||
} catch (error) {
|
||||
console.log('CREATE TYPE parse error:', (error as Error).message);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE EXTENSION statement ===');
|
||||
const createExtensionSQL = `CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`;
|
||||
|
||||
try {
|
||||
parser.astify(createExtensionSQL, parserOpts);
|
||||
console.log('CREATE EXTENSION parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE EXTENSION parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with custom type ===');
|
||||
const createTableWithTypeSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element spell_element DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableWithTypeSQL, parserOpts);
|
||||
console.log('CREATE TABLE with custom type parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with custom type parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with standard types only ===');
|
||||
const createTableStandardSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element VARCHAR(20) DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableStandardSQL, parserOpts);
|
||||
console.log('CREATE TABLE with standard types parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with standard types parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,178 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLite } from '../sqlite';
|
||||
|
||||
describe('SQLite Import Tests', () => {
|
||||
it('should parse SQLite script with sqlite_sequence table and all relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
age INTEGER
|
||||
);
|
||||
CREATE TABLE sqlite_sequence(name,seq);
|
||||
CREATE TABLE products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
price REAL
|
||||
);
|
||||
CREATE TABLE user_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
product_id INTEGER NOT NULL,
|
||||
purchased_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLite(sql);
|
||||
|
||||
// ============= CHECK TOTAL COUNTS =============
|
||||
// Should have exactly 4 tables
|
||||
expect(result.tables).toHaveLength(4);
|
||||
|
||||
// Should have exactly 2 foreign key relationships
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// ============= CHECK USERS TABLE =============
|
||||
const usersTable = result.tables.find((t) => t.name === 'users');
|
||||
expect(usersTable).toBeDefined();
|
||||
expect(usersTable?.columns).toHaveLength(3); // id, name, age
|
||||
|
||||
// Check each column in users table
|
||||
expect(usersTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(usersTable?.columns[1]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(usersTable?.columns[2]).toMatchObject({
|
||||
name: 'age',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK SQLITE_SEQUENCE TABLE =============
|
||||
const sqliteSequenceTable = result.tables.find(
|
||||
(t) => t.name === 'sqlite_sequence'
|
||||
);
|
||||
expect(sqliteSequenceTable).toBeDefined();
|
||||
expect(sqliteSequenceTable?.columns).toHaveLength(2); // name, seq
|
||||
|
||||
// Check columns in sqlite_sequence table
|
||||
expect(sqliteSequenceTable?.columns[0]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT', // Should default to TEXT when no type specified
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(sqliteSequenceTable?.columns[1]).toMatchObject({
|
||||
name: 'seq',
|
||||
type: 'TEXT', // Should default to TEXT when no type specified
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK PRODUCTS TABLE =============
|
||||
const productsTable = result.tables.find((t) => t.name === 'products');
|
||||
expect(productsTable).toBeDefined();
|
||||
expect(productsTable?.columns).toHaveLength(3); // id, name, price
|
||||
|
||||
// Check each column in products table
|
||||
expect(productsTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(productsTable?.columns[1]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(productsTable?.columns[2]).toMatchObject({
|
||||
name: 'price',
|
||||
type: 'REAL',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK USER_PRODUCTS TABLE =============
|
||||
const userProductsTable = result.tables.find(
|
||||
(t) => t.name === 'user_products'
|
||||
);
|
||||
expect(userProductsTable).toBeDefined();
|
||||
expect(userProductsTable?.columns).toHaveLength(4); // id, user_id, product_id, purchased_at
|
||||
|
||||
// Check each column in user_products table
|
||||
expect(userProductsTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(userProductsTable?.columns[1]).toMatchObject({
|
||||
name: 'user_id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: false, // NOT NULL constraint
|
||||
});
|
||||
expect(userProductsTable?.columns[2]).toMatchObject({
|
||||
name: 'product_id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: false, // NOT NULL constraint
|
||||
});
|
||||
expect(userProductsTable?.columns[3]).toMatchObject({
|
||||
name: 'purchased_at',
|
||||
type: 'TIMESTAMP', // DATETIME should map to TIMESTAMP
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
default: 'CURRENT_TIMESTAMP',
|
||||
});
|
||||
|
||||
// ============= CHECK FOREIGN KEY RELATIONSHIPS =============
|
||||
// FK 1: user_products.user_id -> users.id
|
||||
const userIdFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'user_products' &&
|
||||
r.sourceColumn === 'user_id' &&
|
||||
r.targetTable === 'users' &&
|
||||
r.targetColumn === 'id'
|
||||
);
|
||||
expect(userIdFK).toBeDefined();
|
||||
expect(userIdFK).toMatchObject({
|
||||
sourceTable: 'user_products',
|
||||
sourceColumn: 'user_id',
|
||||
targetTable: 'users',
|
||||
targetColumn: 'id',
|
||||
});
|
||||
|
||||
// FK 2: user_products.product_id -> products.id
|
||||
const productIdFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'user_products' &&
|
||||
r.sourceColumn === 'product_id' &&
|
||||
r.targetTable === 'products' &&
|
||||
r.targetColumn === 'id'
|
||||
);
|
||||
expect(productIdFK).toBeDefined();
|
||||
expect(productIdFK).toMatchObject({
|
||||
sourceTable: 'user_products',
|
||||
sourceColumn: 'product_id',
|
||||
targetTable: 'products',
|
||||
targetColumn: 'id',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -32,11 +32,11 @@ export async function fromSQLite(sqlContent: string): Promise<SQLParserResult> {
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// SPECIAL HANDLING: Direct line-by-line parser for SQLite DDL
|
||||
// This ensures we preserve the exact data types from the original DDL
|
||||
// SPECIAL HANDLING: Direct regex-based parser for SQLite DDL
|
||||
// This ensures we handle all SQLite-specific syntax including tables without types
|
||||
const directlyParsedTables = parseCreateTableStatements(sqlContent);
|
||||
|
||||
// Check if we successfully parsed tables directly
|
||||
// Always try direct parsing first as it's more reliable for SQLite
|
||||
if (directlyParsedTables.length > 0) {
|
||||
// Map the direct parsing results to the expected SQLParserResult format
|
||||
directlyParsedTables.forEach((table) => {
|
||||
@@ -56,8 +56,19 @@ export async function fromSQLite(sqlContent: string): Promise<SQLParserResult> {
|
||||
// Process foreign keys using the regex approach
|
||||
findForeignKeysUsingRegex(sqlContent, tableMap, relationships);
|
||||
|
||||
// Return the result
|
||||
return { tables, relationships };
|
||||
// Create placeholder tables for any missing referenced tables
|
||||
addPlaceholderTablesForFKReferences(
|
||||
tables,
|
||||
relationships,
|
||||
tableMap
|
||||
);
|
||||
|
||||
// Filter out any invalid relationships
|
||||
const validRelationships = relationships.filter((rel) => {
|
||||
return isValidForeignKeyRelationship(rel, tables);
|
||||
});
|
||||
|
||||
return { tables, relationships: validRelationships };
|
||||
}
|
||||
|
||||
// Preprocess SQL to handle SQLite quoted identifiers
|
||||
@@ -130,101 +141,182 @@ function parseCreateTableStatements(sqlContent: string): {
|
||||
columns: SQLColumn[];
|
||||
}[] = [];
|
||||
|
||||
// Split SQL content into lines
|
||||
const lines = sqlContent.split('\n');
|
||||
|
||||
let currentTable: { name: string; columns: SQLColumn[] } | null = null;
|
||||
let inCreateTable = false;
|
||||
|
||||
// Process each line
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
|
||||
// Skip empty lines and comments
|
||||
if (!line || line.startsWith('--')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for CREATE TABLE statement
|
||||
if (line.toUpperCase().startsWith('CREATE TABLE')) {
|
||||
// Extract table name
|
||||
const tableNameMatch =
|
||||
/CREATE\s+TABLE\s+(?:if\s+not\s+exists\s+)?["'`]?(\w+)["'`]?/i.exec(
|
||||
line
|
||||
);
|
||||
if (tableNameMatch && tableNameMatch[1]) {
|
||||
inCreateTable = true;
|
||||
currentTable = {
|
||||
name: tableNameMatch[1],
|
||||
columns: [],
|
||||
};
|
||||
// Remove comments before processing
|
||||
const cleanedSQL = sqlContent
|
||||
.split('\n')
|
||||
.map((line) => {
|
||||
const commentIndex = line.indexOf('--');
|
||||
if (commentIndex >= 0) {
|
||||
return line.substring(0, commentIndex);
|
||||
}
|
||||
}
|
||||
// Check for end of CREATE TABLE statement
|
||||
else if (inCreateTable && line.includes(');')) {
|
||||
if (currentTable) {
|
||||
tables.push(currentTable);
|
||||
return line;
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
// Match all CREATE TABLE statements including those without column definitions
|
||||
const createTableRegex =
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["'`]?(\w+)["'`]?\s*\(([^;]+?)\)\s*;/gis;
|
||||
let match;
|
||||
|
||||
while ((match = createTableRegex.exec(cleanedSQL)) !== null) {
|
||||
const tableName = match[1];
|
||||
const tableBody = match[2].trim();
|
||||
|
||||
const table: { name: string; columns: SQLColumn[] } = {
|
||||
name: tableName,
|
||||
columns: [],
|
||||
};
|
||||
|
||||
// Special case: sqlite_sequence or tables with columns but no types
|
||||
if (tableName === 'sqlite_sequence' || !tableBody.includes(' ')) {
|
||||
// Parse simple column list without types (e.g., "name,seq")
|
||||
const simpleColumns = tableBody.split(',').map((col) => col.trim());
|
||||
for (const colName of simpleColumns) {
|
||||
if (
|
||||
colName &&
|
||||
!colName.toUpperCase().startsWith('FOREIGN KEY') &&
|
||||
!colName.toUpperCase().startsWith('PRIMARY KEY') &&
|
||||
!colName.toUpperCase().startsWith('UNIQUE') &&
|
||||
!colName.toUpperCase().startsWith('CHECK') &&
|
||||
!colName.toUpperCase().startsWith('CONSTRAINT')
|
||||
) {
|
||||
table.columns.push({
|
||||
name: colName.replace(/["'`]/g, ''),
|
||||
type: 'TEXT', // Default to TEXT for untyped columns
|
||||
nullable: true,
|
||||
primaryKey: false,
|
||||
unique: false,
|
||||
default: '',
|
||||
increment: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
inCreateTable = false;
|
||||
currentTable = null;
|
||||
}
|
||||
// Process column definitions inside CREATE TABLE
|
||||
else if (inCreateTable && currentTable && line.includes('"')) {
|
||||
// Column line pattern optimized for user's DDL format
|
||||
const columnPattern = /\s*["'`](\w+)["'`]\s+([A-Za-z0-9_]+)(.+)?/i;
|
||||
const match = columnPattern.exec(line);
|
||||
} else {
|
||||
// Parse normal table with typed columns
|
||||
// Split by commas not inside parentheses
|
||||
const columnDefs = [];
|
||||
let current = '';
|
||||
let parenDepth = 0;
|
||||
|
||||
if (match) {
|
||||
const columnName = match[1];
|
||||
const rawType = match[2].toUpperCase();
|
||||
const restOfLine = match[3] || '';
|
||||
for (let i = 0; i < tableBody.length; i++) {
|
||||
const char = tableBody[i];
|
||||
if (char === '(') parenDepth++;
|
||||
else if (char === ')') parenDepth--;
|
||||
else if (char === ',' && parenDepth === 0) {
|
||||
columnDefs.push(current.trim());
|
||||
current = '';
|
||||
continue;
|
||||
}
|
||||
current += char;
|
||||
}
|
||||
if (current.trim()) {
|
||||
columnDefs.push(current.trim());
|
||||
}
|
||||
|
||||
// Determine column properties
|
||||
const isPrimaryKey = restOfLine
|
||||
.toUpperCase()
|
||||
.includes('PRIMARY KEY');
|
||||
const isNotNull = restOfLine.toUpperCase().includes('NOT NULL');
|
||||
const isUnique = restOfLine.toUpperCase().includes('UNIQUE');
|
||||
for (const columnDef of columnDefs) {
|
||||
const line = columnDef.trim();
|
||||
|
||||
// Extract default value
|
||||
let defaultValue = '';
|
||||
const defaultMatch = /DEFAULT\s+([^,\s)]+)/i.exec(restOfLine);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1];
|
||||
// Skip constraints
|
||||
if (
|
||||
line.toUpperCase().startsWith('FOREIGN KEY') ||
|
||||
line.toUpperCase().startsWith('PRIMARY KEY') ||
|
||||
line.toUpperCase().startsWith('UNIQUE') ||
|
||||
line.toUpperCase().startsWith('CHECK') ||
|
||||
line.toUpperCase().startsWith('CONSTRAINT')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Map to appropriate SQLite storage class
|
||||
let columnType = rawType;
|
||||
if (rawType === 'INTEGER' || rawType === 'INT') {
|
||||
columnType = 'INTEGER';
|
||||
} else if (
|
||||
['REAL', 'FLOAT', 'DOUBLE', 'NUMERIC', 'DECIMAL'].includes(
|
||||
rawType
|
||||
)
|
||||
) {
|
||||
columnType = 'REAL';
|
||||
} else if (rawType === 'BLOB' || rawType === 'BINARY') {
|
||||
columnType = 'BLOB';
|
||||
} else if (
|
||||
['TIMESTAMP', 'DATETIME', 'DATE'].includes(rawType)
|
||||
) {
|
||||
columnType = 'TIMESTAMP';
|
||||
} else {
|
||||
columnType = 'TEXT';
|
||||
}
|
||||
// Parse column: handle both quoted and unquoted identifiers
|
||||
// Pattern: [quotes]columnName[quotes] dataType [constraints]
|
||||
const columnPattern = /^["'`]?([\w]+)["'`]?\s+(\w+)(.*)$/i;
|
||||
const columnMatch = columnPattern.exec(line);
|
||||
|
||||
// Add column to the table
|
||||
currentTable.columns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique || isPrimaryKey,
|
||||
default: defaultValue,
|
||||
increment: isPrimaryKey && columnType === 'INTEGER',
|
||||
});
|
||||
if (columnMatch) {
|
||||
const columnName = columnMatch[1];
|
||||
const rawType = columnMatch[2].toUpperCase();
|
||||
const restOfLine = columnMatch[3] || '';
|
||||
const upperRest = restOfLine.toUpperCase();
|
||||
|
||||
// Determine column properties
|
||||
const isPrimaryKey = upperRest.includes('PRIMARY KEY');
|
||||
const isAutoIncrement = upperRest.includes('AUTOINCREMENT');
|
||||
const isNotNull =
|
||||
upperRest.includes('NOT NULL') || isPrimaryKey;
|
||||
const isUnique =
|
||||
upperRest.includes('UNIQUE') || isPrimaryKey;
|
||||
|
||||
// Extract default value
|
||||
let defaultValue = '';
|
||||
const defaultMatch = /DEFAULT\s+([^,)]+)/i.exec(restOfLine);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1].trim();
|
||||
// Remove quotes if present
|
||||
if (
|
||||
(defaultValue.startsWith("'") &&
|
||||
defaultValue.endsWith("'")) ||
|
||||
(defaultValue.startsWith('"') &&
|
||||
defaultValue.endsWith('"'))
|
||||
) {
|
||||
defaultValue = defaultValue.slice(1, -1);
|
||||
}
|
||||
}
|
||||
|
||||
// Map to appropriate SQLite storage class
|
||||
let columnType = rawType;
|
||||
if (rawType === 'INTEGER' || rawType === 'INT') {
|
||||
columnType = 'INTEGER';
|
||||
} else if (
|
||||
[
|
||||
'REAL',
|
||||
'FLOAT',
|
||||
'DOUBLE',
|
||||
'NUMERIC',
|
||||
'DECIMAL',
|
||||
].includes(rawType)
|
||||
) {
|
||||
columnType = 'REAL';
|
||||
} else if (rawType === 'BLOB' || rawType === 'BINARY') {
|
||||
columnType = 'BLOB';
|
||||
} else if (
|
||||
['TIMESTAMP', 'DATETIME', 'DATE', 'TIME'].includes(
|
||||
rawType
|
||||
)
|
||||
) {
|
||||
columnType = 'TIMESTAMP';
|
||||
} else if (
|
||||
['TEXT', 'VARCHAR', 'CHAR', 'CLOB', 'STRING'].includes(
|
||||
rawType
|
||||
) ||
|
||||
rawType.startsWith('VARCHAR') ||
|
||||
rawType.startsWith('CHAR')
|
||||
) {
|
||||
columnType = 'TEXT';
|
||||
} else {
|
||||
// Default to TEXT for unknown types
|
||||
columnType = 'TEXT';
|
||||
}
|
||||
|
||||
// Add column to the table
|
||||
table.columns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isPrimaryKey &&
|
||||
isAutoIncrement &&
|
||||
columnType === 'INTEGER',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (table.columns.length > 0 || tableName === 'sqlite_sequence') {
|
||||
tables.push(table);
|
||||
}
|
||||
}
|
||||
|
||||
return tables;
|
||||
|
||||
7
src/lib/dbml/dbml-export/__tests__/cases/4.dbml
Normal file
7
src/lib/dbml/dbml-export/__tests__/cases/4.dbml
Normal file
@@ -0,0 +1,7 @@
|
||||
Table "public"."orders" {
|
||||
"order_id" integer [pk, not null]
|
||||
"customer_id" integer [not null]
|
||||
"order_date" date [not null, default: `CURRENT_DATE`]
|
||||
"total_amount" numeric [not null, default: 0]
|
||||
"status" varchar(50) [not null, default: 'Pending']
|
||||
}
|
||||
1
src/lib/dbml/dbml-export/__tests__/cases/4.json
Normal file
1
src/lib/dbml/dbml-export/__tests__/cases/4.json
Normal file
@@ -0,0 +1 @@
|
||||
{"id":"6b81a1787207","name":"SQL Import (postgresql)","createdAt":"2025-09-15T08:46:26.747Z","updatedAt":"2025-09-17T11:32:13.876Z","databaseType":"postgresql","tables":[{"id":"5ytf0yj9etpmm7mhmhvpu8kfj","name":"orders","schema":"public","order":1,"fields":[{"id":"w7l77cy9hylvlitdovt4ktdmk","name":"order_id","type":{"id":"integer","name":"integer"},"nullable":false,"primaryKey":true,"unique":false,"default":"","createdAt":1757925986747,"increment":true},{"id":"vz7747t5fxrb62v1eepmahv9v","name":"customer_id","type":{"id":"integer","name":"integer"},"nullable":false,"primaryKey":false,"unique":false,"default":"","createdAt":1757925986747,"increment":false},{"id":"geq9qy6sv4ozl2lg9fvcyzxpf","name":"order_date","type":{"name":"date","id":"date","usageLevel":1},"nullable":false,"primaryKey":false,"unique":false,"default":"CURRENT_DATE()","createdAt":1757925986747,"increment":false},{"id":"z928n7umvpec79t2eif7kmde9","name":"total_amount","type":{"name":"numeric","id":"numeric","fieldAttributes":{"precision":{"max":999,"min":1,"default":10},"scale":{"max":999,"min":0,"default":2}}},"nullable":false,"primaryKey":false,"unique":false,"default":"0","createdAt":1757925986747,"increment":false},{"id":"7bkrd0rp1s17bi1lnle6pesc7","name":"status","type":{"name":"varchar","id":"varchar","fieldAttributes":{"hasCharMaxLength":true},"usageLevel":1},"nullable":false,"primaryKey":false,"unique":false,"default":"'Pending'","createdAt":1757925986747,"increment":false,"characterMaximumLength":"50"}],"indexes":[],"x":113,"y":747,"color":"#8eb7ff","isView":false,"createdAt":1757925986747,"diagramId":"6b81a1787207","parentAreaId":null}],"relationships":[],"dependencies":[],"storageMode":"project","lastProjectSavedAt":"2025-09-17T11:32:13.876Z","areas":[],"creationMethod":"imported","customTypes":[]}
|
||||
129
src/lib/dbml/dbml-export/__tests__/cases/5.inline.dbml
Normal file
129
src/lib/dbml/dbml-export/__tests__/cases/5.inline.dbml
Normal file
@@ -0,0 +1,129 @@
|
||||
Enum "cbhpm_entradas_tipo" {
|
||||
"grupo"
|
||||
"subgrupo"
|
||||
"procedimento"
|
||||
}
|
||||
|
||||
Enum "cid_entradas_tipo" {
|
||||
"capitulo"
|
||||
"agrupamento"
|
||||
"categoria"
|
||||
"subcategoria"
|
||||
}
|
||||
|
||||
Enum "digital_signature_provider" {
|
||||
"soluti"
|
||||
"valid"
|
||||
}
|
||||
|
||||
Enum "impresso_posicao" {
|
||||
"start"
|
||||
"center"
|
||||
"end"
|
||||
}
|
||||
|
||||
Enum "otp_provider" {
|
||||
"clinic"
|
||||
"soluti_bird_id"
|
||||
}
|
||||
|
||||
Enum "tipo_cobranca" {
|
||||
"valor"
|
||||
"porte"
|
||||
}
|
||||
|
||||
Enum "tipo_contato_movel" {
|
||||
"celular"
|
||||
"telefone_residencial"
|
||||
"telefone_comercial"
|
||||
}
|
||||
|
||||
Enum "tipo_contrato" {
|
||||
"trial"
|
||||
"common"
|
||||
}
|
||||
|
||||
Enum "tipo_endereco" {
|
||||
"residencial"
|
||||
"comercial"
|
||||
"cobranca"
|
||||
}
|
||||
|
||||
Enum "tipo_espectro_autista" {
|
||||
"leve"
|
||||
"moderado"
|
||||
"severo"
|
||||
}
|
||||
|
||||
Enum "tipo_estado_civil" {
|
||||
"nao_infomado"
|
||||
"solteiro"
|
||||
"casado"
|
||||
"divorciado"
|
||||
"viuvo"
|
||||
}
|
||||
|
||||
Enum "tipo_etnia" {
|
||||
"nao_infomado"
|
||||
"branca"
|
||||
"preta"
|
||||
"parda"
|
||||
"amarela"
|
||||
"indigena"
|
||||
}
|
||||
|
||||
Enum "tipo_excecao" {
|
||||
"bloqueio"
|
||||
"compromisso"
|
||||
}
|
||||
|
||||
Enum "tipo_metodo_reajuste" {
|
||||
"percentual"
|
||||
"valor"
|
||||
}
|
||||
|
||||
Enum "tipo_pessoa" {
|
||||
"fisica"
|
||||
"juridica"
|
||||
}
|
||||
|
||||
Enum "tipo_procedimento" {
|
||||
"consulta"
|
||||
"exame_laboratorial"
|
||||
"exame_imagem"
|
||||
"procedimento_clinico"
|
||||
"procedimento_cirurgico"
|
||||
"terapia"
|
||||
"outros"
|
||||
}
|
||||
|
||||
Enum "tipo_relacionamento" {
|
||||
"pai"
|
||||
"mae"
|
||||
"conjuge"
|
||||
"filho_a"
|
||||
"tutor_legal"
|
||||
"contato_emergencia"
|
||||
"outro"
|
||||
}
|
||||
|
||||
Enum "tipo_sexo" {
|
||||
"nao_infomado"
|
||||
"masculino"
|
||||
"feminino"
|
||||
"intersexo"
|
||||
}
|
||||
|
||||
Enum "tipo_status_agendamento" {
|
||||
"em espera"
|
||||
"faltou"
|
||||
"ok"
|
||||
}
|
||||
|
||||
Table "public"."organizacao_cfg_impressos" {
|
||||
"id_organizacao" integer [pk, not null, ref: < "public"."organizacao"."id"]
|
||||
}
|
||||
|
||||
Table "public"."organizacao" {
|
||||
"id" integer [pk, not null]
|
||||
}
|
||||
1
src/lib/dbml/dbml-export/__tests__/cases/5.json
Normal file
1
src/lib/dbml/dbml-export/__tests__/cases/5.json
Normal file
File diff suppressed because one or more lines are too long
@@ -4,64 +4,66 @@ import { generateDBMLFromDiagram } from '../dbml-export';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
describe('DBML Export - Diagram Case 1 Tests', () => {
|
||||
const testCase = (caseNumber: string) => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', `${caseNumber}.json`);
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// Check for both regular and inline DBML files
|
||||
const regularDbmlPath = path.join(__dirname, 'cases', `${caseNumber}.dbml`);
|
||||
const inlineDbmlPath = path.join(
|
||||
__dirname,
|
||||
'cases',
|
||||
`${caseNumber}.inline.dbml`
|
||||
);
|
||||
|
||||
const hasRegularDbml = fs.existsSync(regularDbmlPath);
|
||||
const hasInlineDbml = fs.existsSync(inlineDbmlPath);
|
||||
|
||||
// Test regular DBML if file exists
|
||||
if (hasRegularDbml) {
|
||||
const expectedRegularDBML = fs.readFileSync(regularDbmlPath, 'utf-8');
|
||||
expect(result.standardDbml).toBe(expectedRegularDBML);
|
||||
}
|
||||
|
||||
// Test inline DBML if file exists
|
||||
if (hasInlineDbml) {
|
||||
const expectedInlineDBML = fs.readFileSync(inlineDbmlPath, 'utf-8');
|
||||
expect(result.inlineDbml).toBe(expectedInlineDBML);
|
||||
}
|
||||
|
||||
// Ensure at least one DBML file exists
|
||||
if (!hasRegularDbml && !hasInlineDbml) {
|
||||
throw new Error(
|
||||
`No DBML file found for test case ${caseNumber}. Expected either ${caseNumber}.dbml or ${caseNumber}.inline.dbml`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
describe('DBML Export cases', () => {
|
||||
it('should handle case 1 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '1.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '1.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
testCase('1');
|
||||
});
|
||||
|
||||
it('should handle case 2 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '2.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '2.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
testCase('2');
|
||||
});
|
||||
|
||||
it('should handle case 3 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '3.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
testCase('3');
|
||||
});
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
it('should handle case 4 diagram', { timeout: 30000 }, async () => {
|
||||
testCase('4');
|
||||
});
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '3.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
it('should handle case 5 diagram', { timeout: 30000 }, async () => {
|
||||
testCase('5');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,6 @@ import { exportBaseSQL } from '@/lib/data/sql-export/export-sql-script';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { type DBField } from '@/lib/domain/db-field';
|
||||
import type { DBCustomType } from '@/lib/domain/db-custom-type';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
|
||||
@@ -502,38 +501,35 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
return cleanedDbml;
|
||||
};
|
||||
|
||||
// Function to check for DBML reserved keywords
|
||||
const isDBMLKeyword = (name: string): boolean => {
|
||||
const keywords = new Set([
|
||||
'YES',
|
||||
'NO',
|
||||
'TRUE',
|
||||
'FALSE',
|
||||
'NULL', // DBML reserved keywords (boolean literals)
|
||||
]);
|
||||
return keywords.has(name.toUpperCase());
|
||||
};
|
||||
|
||||
// Function to check for SQL keywords (add more if needed)
|
||||
const isSQLKeyword = (name: string): boolean => {
|
||||
const keywords = new Set(['CASE', 'ORDER', 'GROUP', 'FROM', 'TO', 'USER']); // Common SQL keywords
|
||||
return keywords.has(name.toUpperCase());
|
||||
};
|
||||
|
||||
// Function to remove duplicate relationships from the diagram
|
||||
const deduplicateRelationships = (diagram: Diagram): Diagram => {
|
||||
if (!diagram.relationships) return diagram;
|
||||
|
||||
const seenRelationships = new Set<string>();
|
||||
const seenBidirectional = new Set<string>();
|
||||
const uniqueRelationships = diagram.relationships.filter((rel) => {
|
||||
// Create a unique key based on the relationship endpoints
|
||||
const relationshipKey = `${rel.sourceTableId}-${rel.sourceFieldId}->${rel.targetTableId}-${rel.targetFieldId}`;
|
||||
|
||||
// Create a normalized key that's the same for both directions
|
||||
const normalizedKey = [
|
||||
`${rel.sourceTableId}-${rel.sourceFieldId}`,
|
||||
`${rel.targetTableId}-${rel.targetFieldId}`,
|
||||
]
|
||||
.sort()
|
||||
.join('<->');
|
||||
|
||||
if (seenRelationships.has(relationshipKey)) {
|
||||
return false; // Skip duplicate
|
||||
return false; // Skip exact duplicate
|
||||
}
|
||||
|
||||
if (seenBidirectional.has(normalizedKey)) {
|
||||
// This is a bidirectional relationship, skip the second one
|
||||
return false;
|
||||
}
|
||||
|
||||
seenRelationships.add(relationshipKey);
|
||||
seenBidirectional.add(normalizedKey);
|
||||
return true; // Keep unique relationship
|
||||
});
|
||||
|
||||
@@ -543,48 +539,6 @@ const deduplicateRelationships = (diagram: Diagram): Diagram => {
|
||||
};
|
||||
};
|
||||
|
||||
// Function to append comment statements for renamed tables and fields
|
||||
const appendRenameComments = (
|
||||
baseScript: string,
|
||||
sqlRenamedTables: Map<string, string>,
|
||||
fieldRenames: Array<{
|
||||
table: string;
|
||||
originalName: string;
|
||||
newName: string;
|
||||
}>,
|
||||
finalDiagramForExport: Diagram
|
||||
): string => {
|
||||
let script = baseScript;
|
||||
|
||||
// Append COMMENTS for tables renamed due to SQL keywords
|
||||
sqlRenamedTables.forEach((originalName, newName) => {
|
||||
const escapedOriginal = originalName.replace(/'/g, "\\'");
|
||||
// Find the table to get its schema
|
||||
const table = finalDiagramForExport.tables?.find(
|
||||
(t) => t.name === newName
|
||||
);
|
||||
const tableIdentifier = table?.schema
|
||||
? `"${table.schema}"."${newName}"`
|
||||
: `"${newName}"`;
|
||||
script += `\nCOMMENT ON TABLE ${tableIdentifier} IS 'Original name was "${escapedOriginal}" (renamed due to SQL keyword conflict).';`;
|
||||
});
|
||||
|
||||
// Append COMMENTS for fields renamed due to SQL keyword conflicts
|
||||
fieldRenames.forEach(({ table, originalName, newName }) => {
|
||||
const escapedOriginal = originalName.replace(/'/g, "\\'");
|
||||
// Find the table to get its schema
|
||||
const tableObj = finalDiagramForExport.tables?.find(
|
||||
(t) => t.name === table
|
||||
);
|
||||
const tableIdentifier = tableObj?.schema
|
||||
? `"${tableObj.schema}"."${table}"`
|
||||
: `"${table}"`;
|
||||
script += `\nCOMMENT ON COLUMN ${tableIdentifier}."${newName}" IS 'Original name was "${escapedOriginal}" (renamed due to SQL keyword conflict).';`;
|
||||
});
|
||||
|
||||
return script;
|
||||
};
|
||||
|
||||
// Fix DBML formatting to ensure consistent display of char and varchar types
|
||||
const normalizeCharTypeFormat = (dbml: string): string => {
|
||||
// Replace "char (N)" with "char(N)" to match varchar's formatting
|
||||
@@ -843,105 +797,33 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
// Sanitize field names ('from'/'to' in 'relation' table)
|
||||
const cleanDiagram = fixProblematicFieldNames(filteredDiagram);
|
||||
|
||||
// --- Final sanitization and renaming pass ---
|
||||
// Only rename keywords for PostgreSQL/SQLite
|
||||
// For other databases, we'll wrap problematic names in quotes instead
|
||||
const shouldRenameKeywords =
|
||||
diagram.databaseType === DatabaseType.POSTGRESQL ||
|
||||
diagram.databaseType === DatabaseType.SQLITE;
|
||||
const sqlRenamedTables = new Map<string, string>();
|
||||
const fieldRenames: Array<{
|
||||
table: string;
|
||||
originalName: string;
|
||||
newName: string;
|
||||
}> = [];
|
||||
|
||||
// Simplified processing - just handle duplicate field names
|
||||
const processTable = (table: DBTable) => {
|
||||
const originalName = table.name;
|
||||
let safeTableName = originalName;
|
||||
|
||||
// If name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(originalName)) {
|
||||
safeTableName = `"${originalName.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
// Rename table if it's a keyword (PostgreSQL/SQLite only)
|
||||
if (
|
||||
shouldRenameKeywords &&
|
||||
(isDBMLKeyword(originalName) || isSQLKeyword(originalName))
|
||||
) {
|
||||
const newName = `${originalName}_table`;
|
||||
sqlRenamedTables.set(newName, originalName);
|
||||
safeTableName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
}
|
||||
// For other databases, just quote DBML keywords
|
||||
else if (!shouldRenameKeywords && isDBMLKeyword(originalName)) {
|
||||
safeTableName = `"${originalName.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
const fieldNameCounts = new Map<string, number>();
|
||||
const processedFields = table.fields.map((field) => {
|
||||
let finalSafeName = field.name;
|
||||
|
||||
// If field name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(field.name)) {
|
||||
finalSafeName = `"${field.name.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
// Handle duplicate field names
|
||||
const count = fieldNameCounts.get(field.name) || 0;
|
||||
if (count > 0) {
|
||||
const newName = `${field.name}_${count + 1}`;
|
||||
finalSafeName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
return {
|
||||
...field,
|
||||
name: newName,
|
||||
};
|
||||
}
|
||||
fieldNameCounts.set(field.name, count + 1);
|
||||
|
||||
// Create sanitized field
|
||||
const sanitizedField: DBField = {
|
||||
...field,
|
||||
name: finalSafeName,
|
||||
};
|
||||
|
||||
// Rename field if it's a keyword (PostgreSQL/SQLite only)
|
||||
if (
|
||||
shouldRenameKeywords &&
|
||||
(isDBMLKeyword(field.name) || isSQLKeyword(field.name))
|
||||
) {
|
||||
const newFieldName = `${field.name}_field`;
|
||||
fieldRenames.push({
|
||||
table: safeTableName,
|
||||
originalName: field.name,
|
||||
newName: newFieldName,
|
||||
});
|
||||
sanitizedField.name = /[^\w]/.test(newFieldName)
|
||||
? `"${newFieldName.replace(/"/g, '\\"')}"`
|
||||
: newFieldName;
|
||||
}
|
||||
// For other databases, just quote DBML keywords
|
||||
else if (!shouldRenameKeywords && isDBMLKeyword(field.name)) {
|
||||
sanitizedField.name = `"${field.name.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
return sanitizedField;
|
||||
return field;
|
||||
});
|
||||
|
||||
return {
|
||||
...table,
|
||||
name: safeTableName,
|
||||
fields: processedFields,
|
||||
indexes: (table.indexes || [])
|
||||
.filter((index) => !index.isPrimaryKey) // Filter out PK indexes as they're handled separately
|
||||
.map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
name:
|
||||
index.name ||
|
||||
`idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
};
|
||||
};
|
||||
@@ -979,19 +861,6 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
|
||||
baseScript = sanitizeSQLforDBML(baseScript);
|
||||
|
||||
// Append comments for renamed tables and fields (PostgreSQL/SQLite only)
|
||||
if (
|
||||
shouldRenameKeywords &&
|
||||
(sqlRenamedTables.size > 0 || fieldRenames.length > 0)
|
||||
) {
|
||||
baseScript = appendRenameComments(
|
||||
baseScript,
|
||||
sqlRenamedTables,
|
||||
fieldRenames,
|
||||
finalDiagramForExport
|
||||
);
|
||||
}
|
||||
|
||||
standard = fixArrayTypes(
|
||||
normalizeCharTypeFormat(
|
||||
fixMultilineTableNames(
|
||||
|
||||
3
src/lib/dbml/dbml-import/__tests__/cases/1.dbml
Normal file
3
src/lib/dbml/dbml-import/__tests__/cases/1.dbml
Normal file
@@ -0,0 +1,3 @@
|
||||
Table "public"."table_3"{
|
||||
"id" bigint [pk]
|
||||
}
|
||||
1
src/lib/dbml/dbml-import/__tests__/cases/1.json
Normal file
1
src/lib/dbml/dbml-import/__tests__/cases/1.json
Normal file
@@ -0,0 +1 @@
|
||||
{"id":"mqqwkkodxt6p","name":"Diagram 3","createdAt":"2025-09-16T15:33:25.300Z","updatedAt":"2025-09-16T15:33:31.563Z","databaseType":"postgresql","tables":[{"id":"loyxg6mafzos5u971uirjs3zh","name":"table_3","schema":"","order":0,"fields":[{"id":"29e2p9bom0uxo1n0a9ze5auuy","name":"id","type":{"name":"bigint","id":"bigint","usageLevel":2},"nullable":true,"primaryKey":true,"unique":true,"createdAt":1758036805300}],"indexes":[{"id":"5gf0aeptch1uk1bxv0x89wxxe","name":"pk_table_3_id","fieldIds":["29e2p9bom0uxo1n0a9ze5auuy"],"unique":true,"isPrimaryKey":true,"createdAt":1758036811564}],"x":0,"y":0,"color":"#8eb7ff","isView":false,"createdAt":1758036805300,"diagramId":"mqqwkkodxt6p"}],"relationships":[],"dependencies":[],"areas":[],"customTypes":[]}
|
||||
7
src/lib/dbml/dbml-import/__tests__/cases/2.dbml
Normal file
7
src/lib/dbml/dbml-import/__tests__/cases/2.dbml
Normal file
@@ -0,0 +1,7 @@
|
||||
Table "table_3" {
|
||||
"id" bigint [pk]
|
||||
}
|
||||
|
||||
Table "table_2" {
|
||||
"id" bigint [pk, not null, ref: < "table_3"."id"]
|
||||
}
|
||||
1
src/lib/dbml/dbml-import/__tests__/cases/2.json
Normal file
1
src/lib/dbml/dbml-import/__tests__/cases/2.json
Normal file
@@ -0,0 +1 @@
|
||||
{"id":"mqqwkkod6r09","name":"Diagram 10","createdAt":"2025-09-16T15:47:40.655Z","updatedAt":"2025-09-16T15:47:50.179Z","databaseType":"postgresql","tables":[{"id":"6xbco4ihmuiyv2heuw9fggbgx","name":"table_3","schema":"","order":0,"fields":[{"id":"rxftaey7uxvq5qg6ix1hbak1c","name":"id","type":{"name":"bigint","id":"bigint","usageLevel":2},"nullable":true,"primaryKey":true,"unique":true,"createdAt":1758037660654}],"indexes":[{"id":"vsyjjaq2l58urkh9qm2g9hqhd","name":"pk_table_3_id","fieldIds":["rxftaey7uxvq5qg6ix1hbak1c"],"unique":true,"isPrimaryKey":true,"createdAt":1758037660654}],"x":0,"y":0,"color":"#8eb7ff","isView":false,"createdAt":1758037660654,"diagramId":"mqqwkkod6r09"},{"id":"klu6k5ntddcxfdsu0fsfcwbiw","name":"table_2","schema":"","order":1,"fields":[{"id":"qq2415tivmtvun8vd727d9mr2","name":"id","type":{"name":"bigint","id":"bigint","usageLevel":2},"nullable":false,"primaryKey":true,"unique":true,"createdAt":1758037660655}],"indexes":[{"id":"cvv7sgmq07i9y54lz9a97nah5","name":"pk_table_2_id","fieldIds":["qq2415tivmtvun8vd727d9mr2"],"unique":true,"isPrimaryKey":true,"createdAt":1758037660655}],"x":300,"y":0,"color":"#8eb7ff","isView":false,"createdAt":1758037660655,"diagramId":"mqqwkkod6r09"}],"relationships":[{"id":"yw2pbcumsabuncc6rjnp3n87t","name":"table_3_id_table_2_id","sourceSchema":"","targetSchema":"","sourceTableId":"6xbco4ihmuiyv2heuw9fggbgx","targetTableId":"klu6k5ntddcxfdsu0fsfcwbiw","sourceFieldId":"rxftaey7uxvq5qg6ix1hbak1c","targetFieldId":"qq2415tivmtvun8vd727d9mr2","sourceCardinality":"one","targetCardinality":"one","createdAt":1758037660655,"diagramId":"mqqwkkod6r09"}],"dependencies":[],"areas":[],"customTypes":[]}
|
||||
298
src/lib/dbml/dbml-import/__tests__/dbml-import-cases.test.ts
Normal file
298
src/lib/dbml/dbml-import/__tests__/dbml-import-cases.test.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importDBMLToDiagram } from '../dbml-import';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
|
||||
// Type for field map entries
|
||||
interface FieldMapEntry {
|
||||
tableName: string;
|
||||
fieldName: string;
|
||||
}
|
||||
|
||||
// Helper function to compare field properties (excluding IDs and timestamps)
|
||||
function expectFieldsMatch(
|
||||
actualFields: DBField[],
|
||||
expectedFields: DBField[]
|
||||
): void {
|
||||
expect(actualFields).toHaveLength(expectedFields.length);
|
||||
|
||||
for (let i = 0; i < actualFields.length; i++) {
|
||||
const actual = actualFields[i];
|
||||
const expected = expectedFields[i];
|
||||
|
||||
// Compare field properties (excluding ID and createdAt)
|
||||
expect(actual.name).toBe(expected.name);
|
||||
|
||||
// Handle type comparison (could be string or object with name property)
|
||||
if (typeof expected.type === 'object' && expected.type?.name) {
|
||||
expect(actual.type?.name).toBe(expected.type.name);
|
||||
} else if (typeof expected.type === 'string') {
|
||||
expect(actual.type?.name).toBe(expected.type);
|
||||
}
|
||||
|
||||
// Boolean flags with defaults
|
||||
expect(actual.primaryKey).toBe(expected.primaryKey || false);
|
||||
expect(actual.unique).toBe(expected.unique || false);
|
||||
expect(actual.nullable).toBe(expected.nullable ?? true);
|
||||
|
||||
// Optional boolean flag
|
||||
if (expected.increment !== undefined) {
|
||||
expect(actual.increment).toBe(expected.increment);
|
||||
}
|
||||
|
||||
// Optional string/number properties
|
||||
if (expected.characterMaximumLength !== undefined) {
|
||||
expect(actual.characterMaximumLength).toBe(
|
||||
expected.characterMaximumLength
|
||||
);
|
||||
}
|
||||
|
||||
if (expected.precision !== undefined) {
|
||||
expect(actual.precision).toBe(expected.precision);
|
||||
}
|
||||
|
||||
if (expected.scale !== undefined) {
|
||||
expect(actual.scale).toBe(expected.scale);
|
||||
}
|
||||
|
||||
if (expected.default !== undefined) {
|
||||
expect(actual.default).toBe(expected.default);
|
||||
}
|
||||
|
||||
if (expected.collation !== undefined) {
|
||||
expect(actual.collation).toBe(expected.collation);
|
||||
}
|
||||
|
||||
if (expected.comments !== undefined) {
|
||||
expect(actual.comments).toBe(expected.comments);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to compare table properties (excluding IDs)
|
||||
function expectTablesMatch(
|
||||
actualTables: DBTable[],
|
||||
expectedTables: DBTable[],
|
||||
databaseType: DatabaseType
|
||||
): void {
|
||||
expect(actualTables).toHaveLength(expectedTables.length);
|
||||
|
||||
// Sort tables by name for consistent comparison
|
||||
const sortedActual = [...actualTables].sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
);
|
||||
const sortedExpected = [...expectedTables].sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
);
|
||||
|
||||
for (let i = 0; i < sortedActual.length; i++) {
|
||||
const actual = sortedActual[i];
|
||||
const expected = sortedExpected[i];
|
||||
|
||||
// Compare table properties (excluding ID and position)
|
||||
expect(actual.name).toBe(expected.name);
|
||||
|
||||
// Schema comparison - handle differences in how schemas are represented
|
||||
if (expected.schema) {
|
||||
const defaultSchema = defaultSchemas[databaseType];
|
||||
if (defaultSchema && expected.schema === defaultSchema) {
|
||||
// DBML parser might not include default schema or might handle it differently
|
||||
expect(
|
||||
actual.schema === expected.schema ||
|
||||
actual.schema === '' ||
|
||||
actual.schema === undefined
|
||||
).toBeTruthy();
|
||||
} else {
|
||||
expect(actual.schema).toBe(expected.schema);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare fields
|
||||
expectFieldsMatch(actual.fields, expected.fields);
|
||||
|
||||
// Check indexes exist for tables with primary keys
|
||||
const hasPrimaryKeyField = actual.fields.some((f) => f.primaryKey);
|
||||
if (hasPrimaryKeyField) {
|
||||
expect(actual.indexes).toBeDefined();
|
||||
expect(actual.indexes.length).toBeGreaterThan(0);
|
||||
|
||||
const pkIndex = actual.indexes.find((idx) => idx.isPrimaryKey);
|
||||
expect(pkIndex).toBeDefined();
|
||||
expect(pkIndex?.unique).toBe(true);
|
||||
}
|
||||
|
||||
// Check comments if present
|
||||
if (expected.comments !== undefined) {
|
||||
expect(actual.comments).toBe(expected.comments);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to compare relationships (excluding IDs)
|
||||
function expectRelationshipsMatch(
|
||||
actualRelationships: DBRelationship[],
|
||||
expectedRelationships: DBRelationship[],
|
||||
actualTables: DBTable[],
|
||||
expectedTables: DBTable[]
|
||||
): void {
|
||||
expect(actualRelationships).toHaveLength(expectedRelationships.length);
|
||||
|
||||
// Create lookup maps for table and field names by ID
|
||||
const expectedTableMap = new Map(expectedTables.map((t) => [t.id, t.name]));
|
||||
const actualTableMap = new Map(actualTables.map((t) => [t.id, t.name]));
|
||||
|
||||
const expectedFieldMap = new Map<string, FieldMapEntry>();
|
||||
const actualFieldMap = new Map<string, FieldMapEntry>();
|
||||
|
||||
expectedTables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
expectedFieldMap.set(field.id, {
|
||||
tableName: table.name,
|
||||
fieldName: field.name,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
actualTables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
actualFieldMap.set(field.id, {
|
||||
tableName: table.name,
|
||||
fieldName: field.name,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Sort relationships for consistent comparison
|
||||
const sortRelationships = (
|
||||
rels: DBRelationship[],
|
||||
tableMap: Map<string, string>,
|
||||
fieldMap: Map<string, FieldMapEntry>
|
||||
) => {
|
||||
return [...rels].sort((a, b) => {
|
||||
const aSourceTable = tableMap.get(a.sourceTableId) || '';
|
||||
const bSourceTable = tableMap.get(b.sourceTableId) || '';
|
||||
const aTargetTable = tableMap.get(a.targetTableId) || '';
|
||||
const bTargetTable = tableMap.get(b.targetTableId) || '';
|
||||
|
||||
const tableCompare =
|
||||
aSourceTable.localeCompare(bSourceTable) ||
|
||||
aTargetTable.localeCompare(bTargetTable);
|
||||
if (tableCompare !== 0) return tableCompare;
|
||||
|
||||
const aSourceField = fieldMap.get(a.sourceFieldId)?.fieldName || '';
|
||||
const bSourceField = fieldMap.get(b.sourceFieldId)?.fieldName || '';
|
||||
const aTargetField = fieldMap.get(a.targetFieldId)?.fieldName || '';
|
||||
const bTargetField = fieldMap.get(b.targetFieldId)?.fieldName || '';
|
||||
|
||||
return (
|
||||
aSourceField.localeCompare(bSourceField) ||
|
||||
aTargetField.localeCompare(bTargetField)
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const sortedActual = sortRelationships(
|
||||
actualRelationships,
|
||||
actualTableMap,
|
||||
actualFieldMap
|
||||
);
|
||||
const sortedExpected = sortRelationships(
|
||||
expectedRelationships,
|
||||
expectedTableMap,
|
||||
expectedFieldMap
|
||||
);
|
||||
|
||||
for (let i = 0; i < sortedActual.length; i++) {
|
||||
const actual = sortedActual[i];
|
||||
const expected = sortedExpected[i];
|
||||
|
||||
// Get table and field names for comparison
|
||||
const actualSourceTable = actualTableMap.get(actual.sourceTableId);
|
||||
const actualTargetTable = actualTableMap.get(actual.targetTableId);
|
||||
const expectedSourceTable = expectedTableMap.get(
|
||||
expected.sourceTableId
|
||||
);
|
||||
const expectedTargetTable = expectedTableMap.get(
|
||||
expected.targetTableId
|
||||
);
|
||||
|
||||
const actualSourceField = actualFieldMap.get(actual.sourceFieldId);
|
||||
const actualTargetField = actualFieldMap.get(actual.targetFieldId);
|
||||
const expectedSourceField = expectedFieldMap.get(
|
||||
expected.sourceFieldId
|
||||
);
|
||||
const expectedTargetField = expectedFieldMap.get(
|
||||
expected.targetFieldId
|
||||
);
|
||||
|
||||
// Compare relationship by table and field names
|
||||
expect(actualSourceTable).toBe(expectedSourceTable);
|
||||
expect(actualTargetTable).toBe(expectedTargetTable);
|
||||
expect(actualSourceField?.fieldName).toBe(
|
||||
expectedSourceField?.fieldName
|
||||
);
|
||||
expect(actualTargetField?.fieldName).toBe(
|
||||
expectedTargetField?.fieldName
|
||||
);
|
||||
|
||||
// Compare cardinality
|
||||
expect(actual.sourceCardinality).toBe(expected.sourceCardinality);
|
||||
expect(actual.targetCardinality).toBe(expected.targetCardinality);
|
||||
|
||||
// Compare relationship name if present
|
||||
if (expected.name !== undefined) {
|
||||
expect(actual.name).toBe(expected.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Main test helper function
|
||||
async function testDBMLImportCase(caseNumber: string): Promise<void> {
|
||||
// Read the DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', `${caseNumber}.dbml`);
|
||||
const dbmlContent = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Read the expected JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', `${caseNumber}.json`);
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
const expectedData = JSON.parse(jsonContent);
|
||||
|
||||
// Import DBML to diagram
|
||||
const result = await importDBMLToDiagram(dbmlContent, {
|
||||
databaseType: expectedData.databaseType || DatabaseType.POSTGRESQL,
|
||||
});
|
||||
|
||||
// Check basic diagram properties
|
||||
expect(result.name).toBe('DBML Import'); // Name is always 'DBML Import'
|
||||
expect(result.databaseType).toBe(expectedData.databaseType);
|
||||
|
||||
// Check tables and fields
|
||||
expectTablesMatch(
|
||||
result.tables || [],
|
||||
expectedData.tables || [],
|
||||
expectedData.databaseType || DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Check relationships
|
||||
expectRelationshipsMatch(
|
||||
result.relationships || [],
|
||||
expectedData.relationships || [],
|
||||
result.tables || [],
|
||||
expectedData.tables || []
|
||||
);
|
||||
}
|
||||
|
||||
describe('DBML Import cases', () => {
|
||||
it('should handle case 1 - simple table with pk and unique', async () => {
|
||||
await testDBMLImportCase('1');
|
||||
});
|
||||
|
||||
it('should handle case 2 - tables with relationships', async () => {
|
||||
await testDBMLImportCase('2');
|
||||
});
|
||||
});
|
||||
149
src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts
Normal file
149
src/lib/dbml/dbml-import/__tests__/dbml-integration.test.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { importDBMLToDiagram } from '@/lib/dbml/dbml-import/dbml-import';
|
||||
|
||||
// This test verifies the DBML integration without UI components
|
||||
describe('DBML Integration Tests', () => {
|
||||
it('should handle DBML import in create diagram flow', async () => {
|
||||
const dbmlContent = `
|
||||
Table users {
|
||||
id uuid [pk, not null]
|
||||
email varchar [unique, not null]
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id uuid [pk]
|
||||
title varchar
|
||||
content text
|
||||
user_id uuid [ref: > users.id]
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
Table comments {
|
||||
id uuid [pk]
|
||||
content text
|
||||
post_id uuid [ref: > posts.id]
|
||||
user_id uuid [ref: > users.id]
|
||||
}
|
||||
|
||||
// This will be ignored
|
||||
TableGroup "Content" {
|
||||
posts
|
||||
comments
|
||||
}
|
||||
|
||||
// This will be ignored too
|
||||
Note test_note {
|
||||
'This is a test note'
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
||||
|
||||
// Verify basic structure
|
||||
expect(diagram).toBeDefined();
|
||||
expect(diagram.tables).toHaveLength(3);
|
||||
expect(diagram.relationships).toHaveLength(3);
|
||||
|
||||
// Verify tables
|
||||
const tableNames = diagram.tables?.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['comments', 'posts', 'users']);
|
||||
|
||||
// Verify users table
|
||||
const usersTable = diagram.tables?.find((t) => t.name === 'users');
|
||||
expect(usersTable).toBeDefined();
|
||||
expect(usersTable?.fields).toHaveLength(3);
|
||||
|
||||
const emailField = usersTable?.fields.find((f) => f.name === 'email');
|
||||
expect(emailField?.unique).toBe(true);
|
||||
expect(emailField?.nullable).toBe(false);
|
||||
|
||||
// Verify relationships
|
||||
// There should be 3 relationships total
|
||||
expect(diagram.relationships).toHaveLength(3);
|
||||
|
||||
// Find the relationship from users to posts (DBML ref is: posts.user_id > users.id)
|
||||
// This creates a relationship FROM users TO posts (one user has many posts)
|
||||
const postsTable = diagram.tables?.find((t) => t.name === 'posts');
|
||||
const usersTableId = usersTable?.id;
|
||||
|
||||
const userPostRelation = diagram.relationships?.find(
|
||||
(r) =>
|
||||
r.sourceTableId === usersTableId &&
|
||||
r.targetTableId === postsTable?.id
|
||||
);
|
||||
|
||||
expect(userPostRelation).toBeDefined();
|
||||
expect(userPostRelation?.sourceCardinality).toBe('one');
|
||||
expect(userPostRelation?.targetCardinality).toBe('many');
|
||||
});
|
||||
|
||||
it('should handle DBML with special features', async () => {
|
||||
const dbmlContent = `
|
||||
// Enum will be converted to varchar
|
||||
Table users {
|
||||
id int [pk]
|
||||
status enum
|
||||
tags text[] // Array will be converted to text
|
||||
favorite_product_id int
|
||||
}
|
||||
|
||||
Table products [headercolor: #FF0000] {
|
||||
id int [pk]
|
||||
name varchar
|
||||
price decimal(10,2)
|
||||
}
|
||||
|
||||
Ref: products.id < users.favorite_product_id`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
||||
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
|
||||
// Check enum conversion
|
||||
const usersTable = diagram.tables?.find((t) => t.name === 'users');
|
||||
const statusField = usersTable?.fields.find((f) => f.name === 'status');
|
||||
expect(statusField?.type.id).toBe('varchar');
|
||||
|
||||
// Check array type conversion
|
||||
const tagsField = usersTable?.fields.find((f) => f.name === 'tags');
|
||||
expect(tagsField?.type.id).toBe('text');
|
||||
|
||||
// Check that header color was removed
|
||||
const productsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'products'
|
||||
);
|
||||
expect(productsTable).toBeDefined();
|
||||
expect(productsTable?.name).toBe('products');
|
||||
});
|
||||
|
||||
it('should handle empty or invalid DBML gracefully', async () => {
|
||||
// Empty DBML
|
||||
const emptyDiagram = await importDBMLToDiagram('');
|
||||
expect(emptyDiagram.tables).toHaveLength(0);
|
||||
expect(emptyDiagram.relationships).toHaveLength(0);
|
||||
|
||||
// Only comments
|
||||
const commentDiagram = await importDBMLToDiagram('// Just a comment');
|
||||
expect(commentDiagram.tables).toHaveLength(0);
|
||||
expect(commentDiagram.relationships).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should preserve diagram metadata when importing DBML', async () => {
|
||||
const dbmlContent = `Table test {
|
||||
id int [pk]
|
||||
}`;
|
||||
const diagram = await importDBMLToDiagram(dbmlContent);
|
||||
|
||||
// Default values
|
||||
expect(diagram.name).toBe('DBML Import');
|
||||
expect(diagram.databaseType).toBe(DatabaseType.GENERIC);
|
||||
|
||||
// These can be overridden by the dialog
|
||||
diagram.name = 'My Custom Diagram';
|
||||
diagram.databaseType = DatabaseType.POSTGRESQL;
|
||||
|
||||
expect(diagram.name).toBe('My Custom Diagram');
|
||||
expect(diagram.databaseType).toBe(DatabaseType.POSTGRESQL);
|
||||
});
|
||||
});
|
||||
@@ -180,7 +180,7 @@ describe('DBML Schema Handling - Fantasy Realm Database', () => {
|
||||
expect(artifactsTable?.schema).toBe(''); // No schema = empty string
|
||||
});
|
||||
|
||||
it('should rename reserved keywords for PostgreSQL', async () => {
|
||||
it('should handle reserved keywords for PostgreSQL', async () => {
|
||||
const dbmlContent = `
|
||||
Table "magic_items" {
|
||||
"id" bigint [pk]
|
||||
@@ -197,10 +197,9 @@ describe('DBML Schema Handling - Fantasy Realm Database', () => {
|
||||
|
||||
const exported = generateDBMLFromDiagram(diagram);
|
||||
|
||||
// For PostgreSQL, keywords should be renamed in export
|
||||
expect(exported.standardDbml).toContain('Order_field');
|
||||
expect(exported.standardDbml).toContain('Yes_field');
|
||||
expect(exported.standardDbml).toContain('No_field');
|
||||
expect(exported.standardDbml).toContain('Order');
|
||||
expect(exported.standardDbml).toContain('Yes');
|
||||
expect(exported.standardDbml).toContain('No');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -15,6 +15,8 @@ import {
|
||||
type DBCustomType,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
|
||||
export const defaultDBMLDiagramName = 'DBML Import';
|
||||
|
||||
// Preprocess DBML to handle unsupported features
|
||||
export const preprocessDBML = (content: string): string => {
|
||||
let processed = content;
|
||||
@@ -196,7 +198,7 @@ export const importDBMLToDiagram = async (
|
||||
if (!dbmlContent.trim()) {
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
name: defaultDBMLDiagramName,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
@@ -214,7 +216,7 @@ export const importDBMLToDiagram = async (
|
||||
if (!sanitizedContent.trim()) {
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
name: defaultDBMLDiagramName,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
@@ -229,7 +231,7 @@ export const importDBMLToDiagram = async (
|
||||
if (!parsedData.schemas || parsedData.schemas.length === 0) {
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
name: defaultDBMLDiagramName,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
@@ -480,7 +482,7 @@ export const importDBMLToDiagram = async (
|
||||
}),
|
||||
nullable: !field.not_null,
|
||||
primaryKey: field.pk || false,
|
||||
unique: field.unique || false,
|
||||
unique: field.unique || field.pk || false, // Primary keys are always unique
|
||||
createdAt: Date.now(),
|
||||
characterMaximumLength: field.characterMaximumLength,
|
||||
precision: field.precision,
|
||||
@@ -734,7 +736,7 @@ export const importDBMLToDiagram = async (
|
||||
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
name: defaultDBMLDiagramName,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables,
|
||||
relationships,
|
||||
|
||||
52
src/lib/dbml/dbml-import/verify-dbml.ts
Normal file
52
src/lib/dbml/dbml-import/verify-dbml.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Parser } from '@dbml/core';
|
||||
import { preprocessDBML, sanitizeDBML } from './dbml-import';
|
||||
import type { DBMLError } from './dbml-import-error';
|
||||
import { parseDBMLError } from './dbml-import-error';
|
||||
|
||||
export const verifyDBML = (
|
||||
content: string
|
||||
):
|
||||
| {
|
||||
hasError: true;
|
||||
error: unknown;
|
||||
parsedError?: DBMLError;
|
||||
errorText: string;
|
||||
}
|
||||
| {
|
||||
hasError: false;
|
||||
} => {
|
||||
try {
|
||||
const preprocessedContent = preprocessDBML(content);
|
||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||
const parser = new Parser();
|
||||
parser.parse(sanitizedContent, 'dbmlv2');
|
||||
} catch (e) {
|
||||
const parsedError = parseDBMLError(e);
|
||||
if (parsedError) {
|
||||
return {
|
||||
hasError: true,
|
||||
parsedError: parsedError,
|
||||
error: e,
|
||||
errorText: parsedError.message,
|
||||
};
|
||||
} else {
|
||||
if (e instanceof Error) {
|
||||
return {
|
||||
hasError: true,
|
||||
error: e,
|
||||
errorText: e.message,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hasError: true,
|
||||
error: e,
|
||||
errorText: JSON.stringify(e),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasError: false,
|
||||
};
|
||||
};
|
||||
77
src/lib/domain/diff/area-diff.ts
Normal file
77
src/lib/domain/diff/area-diff.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { z } from 'zod';
|
||||
import type { Area } from '../area';
|
||||
|
||||
export type AreaDiffAttribute = keyof Pick<
|
||||
Area,
|
||||
'name' | 'color' | 'x' | 'y' | 'width' | 'height'
|
||||
>;
|
||||
|
||||
const areaDiffAttributeSchema: z.ZodType<AreaDiffAttribute> = z.union([
|
||||
z.literal('name'),
|
||||
z.literal('color'),
|
||||
z.literal('x'),
|
||||
z.literal('y'),
|
||||
z.literal('width'),
|
||||
z.literal('height'),
|
||||
]);
|
||||
|
||||
export interface AreaDiffChanged {
|
||||
object: 'area';
|
||||
type: 'changed';
|
||||
areaId: string;
|
||||
attribute: AreaDiffAttribute;
|
||||
oldValue?: string | number | null;
|
||||
newValue?: string | number | null;
|
||||
}
|
||||
|
||||
export const AreaDiffChangedSchema: z.ZodType<AreaDiffChanged> = z.object({
|
||||
object: z.literal('area'),
|
||||
type: z.literal('changed'),
|
||||
areaId: z.string(),
|
||||
attribute: areaDiffAttributeSchema,
|
||||
oldValue: z.union([z.string(), z.number(), z.null()]).optional(),
|
||||
newValue: z.union([z.string(), z.number(), z.null()]).optional(),
|
||||
});
|
||||
|
||||
export interface AreaDiffRemoved {
|
||||
object: 'area';
|
||||
type: 'removed';
|
||||
areaId: string;
|
||||
}
|
||||
|
||||
export const AreaDiffRemovedSchema: z.ZodType<AreaDiffRemoved> = z.object({
|
||||
object: z.literal('area'),
|
||||
type: z.literal('removed'),
|
||||
areaId: z.string(),
|
||||
});
|
||||
|
||||
export interface AreaDiffAdded<T = Area> {
|
||||
object: 'area';
|
||||
type: 'added';
|
||||
areaAdded: T;
|
||||
}
|
||||
|
||||
export const createAreaDiffAddedSchema = <T = Area>(
|
||||
areaSchema: z.ZodType<T>
|
||||
): z.ZodType<AreaDiffAdded<T>> => {
|
||||
return z.object({
|
||||
object: z.literal('area'),
|
||||
type: z.literal('added'),
|
||||
areaAdded: areaSchema,
|
||||
}) as z.ZodType<AreaDiffAdded<T>>;
|
||||
};
|
||||
|
||||
export type AreaDiff<T = Area> =
|
||||
| AreaDiffChanged
|
||||
| AreaDiffRemoved
|
||||
| AreaDiffAdded<T>;
|
||||
|
||||
export const createAreaDiffSchema = <T = Area>(
|
||||
areaSchema: z.ZodType<T>
|
||||
): z.ZodType<AreaDiff<T>> => {
|
||||
return z.union([
|
||||
AreaDiffChangedSchema,
|
||||
AreaDiffRemovedSchema,
|
||||
createAreaDiffAddedSchema(areaSchema),
|
||||
]) as z.ZodType<AreaDiff<T>>;
|
||||
};
|
||||
883
src/lib/domain/diff/diff-check/__tests__/diff-check.test.ts
Normal file
883
src/lib/domain/diff/diff-check/__tests__/diff-check.test.ts
Normal file
@@ -0,0 +1,883 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { generateDiff } from '../diff-check';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { TableDiffChanged } from '../../table-diff';
|
||||
import type { FieldDiffChanged } from '../../field-diff';
|
||||
import type { AreaDiffChanged } from '../../area-diff';
|
||||
|
||||
// Helper function to create a mock diagram
|
||||
function createMockDiagram(overrides?: Partial<Diagram>): Diagram {
|
||||
return {
|
||||
id: 'diagram-1',
|
||||
name: 'Test Diagram',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
areas: [],
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// Helper function to create a mock table
|
||||
function createMockTable(overrides?: Partial<DBTable>): DBTable {
|
||||
return {
|
||||
id: 'table-1',
|
||||
name: 'users',
|
||||
fields: [],
|
||||
indexes: [],
|
||||
x: 0,
|
||||
y: 0,
|
||||
...overrides,
|
||||
} as DBTable;
|
||||
}
|
||||
|
||||
// Helper function to create a mock field
|
||||
function createMockField(overrides?: Partial<DBField>): DBField {
|
||||
return {
|
||||
id: 'field-1',
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
...overrides,
|
||||
} as DBField;
|
||||
}
|
||||
|
||||
// Helper function to create a mock relationship
|
||||
function createMockRelationship(
|
||||
overrides?: Partial<DBRelationship>
|
||||
): DBRelationship {
|
||||
return {
|
||||
id: 'rel-1',
|
||||
sourceTableId: 'table-1',
|
||||
targetTableId: 'table-2',
|
||||
sourceFieldId: 'field-1',
|
||||
targetFieldId: 'field-2',
|
||||
type: 'one-to-many',
|
||||
...overrides,
|
||||
} as DBRelationship;
|
||||
}
|
||||
|
||||
// Helper function to create a mock area
|
||||
function createMockArea(overrides?: Partial<Area>): Area {
|
||||
return {
|
||||
id: 'area-1',
|
||||
name: 'Main Area',
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 100,
|
||||
height: 100,
|
||||
color: 'blue',
|
||||
...overrides,
|
||||
} as Area;
|
||||
}
|
||||
|
||||
describe('generateDiff', () => {
|
||||
describe('Basic Table Diffing', () => {
|
||||
it('should detect added tables', () => {
|
||||
const oldDiagram = createMockDiagram({ tables: [] });
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable()],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('table-table-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('added');
|
||||
expect(result.changedTables.has('table-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect removed tables', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable()],
|
||||
});
|
||||
const newDiagram = createMockDiagram({ tables: [] });
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('table-table-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('removed');
|
||||
expect(result.changedTables.has('table-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect table name changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ name: 'users' })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ name: 'customers' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('table-name-table-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('changed');
|
||||
expect((diff as TableDiffChanged)?.attribute).toBe('name');
|
||||
});
|
||||
|
||||
it('should detect table position changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ x: 0, y: 0 })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ x: 100, y: 200 })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
attributes: {
|
||||
tables: ['name', 'comments', 'color', 'x', 'y'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(2);
|
||||
expect(result.diffMap.has('table-x-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('table-y-table-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect table width changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ width: 150 })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ width: 250 })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
attributes: {
|
||||
tables: ['width'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('table-width-table-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('changed');
|
||||
expect((diff as TableDiffChanged)?.attribute).toBe('width');
|
||||
expect((diff as TableDiffChanged)?.oldValue).toBe(150);
|
||||
expect((diff as TableDiffChanged)?.newValue).toBe(250);
|
||||
});
|
||||
|
||||
it('should detect multiple table dimension changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ x: 0, y: 0, width: 100 })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ x: 50, y: 75, width: 200 })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
attributes: {
|
||||
tables: ['x', 'y', 'width'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(3);
|
||||
expect(result.diffMap.has('table-x-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('table-y-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('table-width-table-1')).toBe(true);
|
||||
|
||||
const widthDiff = result.diffMap.get('table-width-table-1');
|
||||
expect(widthDiff?.type).toBe('changed');
|
||||
expect((widthDiff as TableDiffChanged)?.oldValue).toBe(100);
|
||||
expect((widthDiff as TableDiffChanged)?.newValue).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Field Diffing', () => {
|
||||
it('should detect added fields', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ fields: [] })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [createMockField()],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('field-field-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('added');
|
||||
expect(result.changedFields.has('field-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect removed fields', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [createMockField()],
|
||||
}),
|
||||
],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ fields: [] })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('field-field-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('removed');
|
||||
});
|
||||
|
||||
it('should detect field type changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [
|
||||
createMockField({
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [
|
||||
createMockField({
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('field-type-field-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('changed');
|
||||
expect((diff as FieldDiffChanged)?.attribute).toBe('type');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Relationship Diffing', () => {
|
||||
it('should detect added relationships', () => {
|
||||
const oldDiagram = createMockDiagram({ relationships: [] });
|
||||
const newDiagram = createMockDiagram({
|
||||
relationships: [createMockRelationship()],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('relationship-rel-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('added');
|
||||
});
|
||||
|
||||
it('should detect removed relationships', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
relationships: [createMockRelationship()],
|
||||
});
|
||||
const newDiagram = createMockDiagram({ relationships: [] });
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('relationship-rel-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('removed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Area Diffing', () => {
|
||||
it('should detect added areas when includeAreas is true', () => {
|
||||
const oldDiagram = createMockDiagram({ areas: [] });
|
||||
const newDiagram = createMockDiagram({
|
||||
areas: [createMockArea()],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('area-area-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('added');
|
||||
expect(result.changedAreas.has('area-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not detect area changes when includeAreas is false', () => {
|
||||
const oldDiagram = createMockDiagram({ areas: [] });
|
||||
const newDiagram = createMockDiagram({
|
||||
areas: [createMockArea()],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should detect area width changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
areas: [createMockArea({ width: 100 })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
areas: [createMockArea({ width: 200 })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
attributes: {
|
||||
areas: ['width'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('area-width-area-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('changed');
|
||||
expect((diff as AreaDiffChanged)?.attribute).toBe('width');
|
||||
expect((diff as AreaDiffChanged)?.oldValue).toBe(100);
|
||||
expect((diff as AreaDiffChanged)?.newValue).toBe(200);
|
||||
});
|
||||
|
||||
it('should detect area height changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
areas: [createMockArea({ height: 100 })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
areas: [createMockArea({ height: 300 })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
attributes: {
|
||||
areas: ['height'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const diff = result.diffMap.get('area-height-area-1');
|
||||
expect(diff).toBeDefined();
|
||||
expect(diff?.type).toBe('changed');
|
||||
expect((diff as AreaDiffChanged)?.attribute).toBe('height');
|
||||
expect((diff as AreaDiffChanged)?.oldValue).toBe(100);
|
||||
expect((diff as AreaDiffChanged)?.newValue).toBe(300);
|
||||
});
|
||||
|
||||
it('should detect multiple area dimension changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
areas: [
|
||||
createMockArea({ x: 0, y: 0, width: 100, height: 100 }),
|
||||
],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
areas: [
|
||||
createMockArea({ x: 50, y: 50, width: 200, height: 300 }),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
attributes: {
|
||||
areas: ['x', 'y', 'width', 'height'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(4);
|
||||
expect(result.diffMap.has('area-x-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-y-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-width-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-height-area-1')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Custom Matchers', () => {
|
||||
it('should use custom table matcher to match by name', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', name: 'users' })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-2', name: 'users' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
matchers: {
|
||||
table: (table, tables) =>
|
||||
tables.find((t) => t.name === table.name),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Should not detect any changes since tables match by name
|
||||
expect(result.diffMap.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should detect changes when custom matcher finds no match', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', name: 'users' })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-2', name: 'customers' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
matchers: {
|
||||
table: (table, tables) =>
|
||||
tables.find((t) => t.name === table.name),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Should detect both added and removed since names don't match
|
||||
expect(result.diffMap.size).toBe(2);
|
||||
expect(result.diffMap.has('table-table-1')).toBe(true); // removed
|
||||
expect(result.diffMap.has('table-table-2')).toBe(true); // added
|
||||
});
|
||||
|
||||
it('should use custom field matcher to match by name', () => {
|
||||
const field1 = createMockField({
|
||||
id: 'field-1',
|
||||
name: 'email',
|
||||
nullable: true,
|
||||
});
|
||||
const field2 = createMockField({
|
||||
id: 'field-2',
|
||||
name: 'email',
|
||||
nullable: false,
|
||||
});
|
||||
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', fields: [field1] })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', fields: [field2] })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
matchers: {
|
||||
field: (field, fields) =>
|
||||
fields.find((f) => f.name === field.name),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// With name-based matching, field-1 should match field-2 by name
|
||||
// and detect the nullable change
|
||||
const nullableChange = result.diffMap.get('field-nullable-field-1');
|
||||
expect(nullableChange).toBeDefined();
|
||||
expect(nullableChange?.type).toBe('changed');
|
||||
expect((nullableChange as FieldDiffChanged)?.attribute).toBe(
|
||||
'nullable'
|
||||
);
|
||||
});
|
||||
|
||||
it('should use case-insensitive custom matcher', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', name: 'Users' })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-2', name: 'users' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
matchers: {
|
||||
table: (table, tables) =>
|
||||
tables.find(
|
||||
(t) =>
|
||||
t.name.toLowerCase() ===
|
||||
table.name.toLowerCase()
|
||||
),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// With case-insensitive name matching, the tables are matched
|
||||
// but the name case difference is still detected as a change
|
||||
expect(result.diffMap.size).toBe(1);
|
||||
const nameChange = result.diffMap.get('table-name-table-1');
|
||||
expect(nameChange).toBeDefined();
|
||||
expect(nameChange?.type).toBe('changed');
|
||||
expect((nameChange as TableDiffChanged)?.attribute).toBe('name');
|
||||
expect((nameChange as TableDiffChanged)?.oldValue).toBe('Users');
|
||||
expect((nameChange as TableDiffChanged)?.newValue).toBe('users');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Filtering Options', () => {
|
||||
it('should only check specified change types', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1', name: 'users' })],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-2', name: 'products' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
changeTypes: {
|
||||
tables: ['added'], // Only check for added tables
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Should only detect added table (table-2)
|
||||
const addedTables = Array.from(result.diffMap.values()).filter(
|
||||
(diff) => diff.type === 'added' && diff.object === 'table'
|
||||
);
|
||||
expect(addedTables.length).toBe(1);
|
||||
|
||||
// Should not detect removed table (table-1)
|
||||
const removedTables = Array.from(result.diffMap.values()).filter(
|
||||
(diff) => diff.type === 'removed' && diff.object === 'table'
|
||||
);
|
||||
expect(removedTables.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should only check specified attributes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
name: 'users',
|
||||
color: 'blue',
|
||||
comments: 'old comment',
|
||||
}),
|
||||
],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
name: 'customers',
|
||||
color: 'red',
|
||||
comments: 'new comment',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
attributes: {
|
||||
tables: ['name'], // Only check name changes
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Should only detect name change
|
||||
const nameChanges = Array.from(result.diffMap.values()).filter(
|
||||
(diff) =>
|
||||
diff.type === 'changed' &&
|
||||
diff.attribute === 'name' &&
|
||||
diff.object === 'table'
|
||||
);
|
||||
expect(nameChanges.length).toBe(1);
|
||||
|
||||
// Should not detect color or comments changes
|
||||
const otherChanges = Array.from(result.diffMap.values()).filter(
|
||||
(diff) =>
|
||||
diff.type === 'changed' &&
|
||||
(diff.attribute === 'color' ||
|
||||
diff.attribute === 'comments') &&
|
||||
diff.object === 'table'
|
||||
);
|
||||
expect(otherChanges.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should respect include flags', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [createMockField()],
|
||||
indexes: [{ id: 'idx-1', name: 'idx' } as DBIndex],
|
||||
}),
|
||||
],
|
||||
});
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
fields: [],
|
||||
indexes: [],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeFields: false,
|
||||
includeIndexes: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Should only detect index removal, not field removal
|
||||
expect(result.diffMap.has('index-idx-1')).toBe(true);
|
||||
expect(result.diffMap.has('field-field-1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Scenarios', () => {
|
||||
it('should detect all dimensional changes for tables and areas', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 100,
|
||||
}),
|
||||
],
|
||||
areas: [
|
||||
createMockArea({
|
||||
id: 'area-1',
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 200,
|
||||
height: 150,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
x: 10,
|
||||
y: 20,
|
||||
width: 120,
|
||||
}),
|
||||
],
|
||||
areas: [
|
||||
createMockArea({
|
||||
id: 'area-1',
|
||||
x: 25,
|
||||
y: 35,
|
||||
width: 250,
|
||||
height: 175,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
attributes: {
|
||||
tables: ['x', 'y', 'width'],
|
||||
areas: ['x', 'y', 'width', 'height'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Table dimensional changes
|
||||
expect(result.diffMap.has('table-x-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('table-y-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('table-width-table-1')).toBe(true);
|
||||
|
||||
// Area dimensional changes
|
||||
expect(result.diffMap.has('area-x-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-y-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-width-area-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-height-area-1')).toBe(true);
|
||||
|
||||
// Verify the correct values
|
||||
const tableWidthDiff = result.diffMap.get('table-width-table-1');
|
||||
expect((tableWidthDiff as TableDiffChanged)?.oldValue).toBe(100);
|
||||
expect((tableWidthDiff as TableDiffChanged)?.newValue).toBe(120);
|
||||
|
||||
const areaWidthDiff = result.diffMap.get('area-width-area-1');
|
||||
expect((areaWidthDiff as AreaDiffChanged)?.oldValue).toBe(200);
|
||||
expect((areaWidthDiff as AreaDiffChanged)?.newValue).toBe(250);
|
||||
|
||||
const areaHeightDiff = result.diffMap.get('area-height-area-1');
|
||||
expect((areaHeightDiff as AreaDiffChanged)?.oldValue).toBe(150);
|
||||
expect((areaHeightDiff as AreaDiffChanged)?.newValue).toBe(175);
|
||||
});
|
||||
|
||||
it('should handle multiple simultaneous changes', () => {
|
||||
const oldDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
name: 'users',
|
||||
fields: [
|
||||
createMockField({ id: 'field-1', name: 'id' }),
|
||||
createMockField({ id: 'field-2', name: 'email' }),
|
||||
],
|
||||
}),
|
||||
createMockTable({
|
||||
id: 'table-2',
|
||||
name: 'products',
|
||||
}),
|
||||
],
|
||||
relationships: [createMockRelationship()],
|
||||
});
|
||||
|
||||
const newDiagram = createMockDiagram({
|
||||
tables: [
|
||||
createMockTable({
|
||||
id: 'table-1',
|
||||
name: 'customers', // Changed name
|
||||
fields: [
|
||||
createMockField({ id: 'field-1', name: 'id' }),
|
||||
// Removed field-2
|
||||
createMockField({ id: 'field-3', name: 'name' }), // Added field
|
||||
],
|
||||
}),
|
||||
// Removed table-2
|
||||
createMockTable({
|
||||
id: 'table-3',
|
||||
name: 'orders', // Added table
|
||||
}),
|
||||
],
|
||||
relationships: [], // Removed relationship
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: oldDiagram,
|
||||
newDiagram,
|
||||
});
|
||||
|
||||
// Verify all changes are detected
|
||||
expect(result.diffMap.has('table-name-table-1')).toBe(true); // Table name change
|
||||
expect(result.diffMap.has('field-field-2')).toBe(true); // Removed field
|
||||
expect(result.diffMap.has('field-field-3')).toBe(true); // Added field
|
||||
expect(result.diffMap.has('table-table-2')).toBe(true); // Removed table
|
||||
expect(result.diffMap.has('table-table-3')).toBe(true); // Added table
|
||||
expect(result.diffMap.has('relationship-rel-1')).toBe(true); // Removed relationship
|
||||
});
|
||||
|
||||
it('should handle empty diagrams', () => {
|
||||
const emptyDiagram1 = createMockDiagram();
|
||||
const emptyDiagram2 = createMockDiagram();
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: emptyDiagram1,
|
||||
newDiagram: emptyDiagram2,
|
||||
});
|
||||
|
||||
expect(result.diffMap.size).toBe(0);
|
||||
expect(result.changedTables.size).toBe(0);
|
||||
expect(result.changedFields.size).toBe(0);
|
||||
expect(result.changedAreas.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle diagrams with undefined collections', () => {
|
||||
const diagram1 = createMockDiagram({
|
||||
tables: undefined,
|
||||
relationships: undefined,
|
||||
areas: undefined,
|
||||
});
|
||||
const diagram2 = createMockDiagram({
|
||||
tables: [createMockTable({ id: 'table-1' })],
|
||||
relationships: [createMockRelationship({ id: 'rel-1' })],
|
||||
areas: [createMockArea({ id: 'area-1' })],
|
||||
});
|
||||
|
||||
const result = generateDiff({
|
||||
diagram: diagram1,
|
||||
newDiagram: diagram2,
|
||||
options: {
|
||||
includeAreas: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Should detect all as added
|
||||
expect(result.diffMap.has('table-table-1')).toBe(true);
|
||||
expect(result.diffMap.has('relationship-rel-1')).toBe(true);
|
||||
expect(result.diffMap.has('area-area-1')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,36 +8,43 @@ import type { RelationshipDiff } from './relationship-diff';
|
||||
import { createRelationshipDiffSchema } from './relationship-diff';
|
||||
import type { TableDiff } from './table-diff';
|
||||
import { createTableDiffSchema } from './table-diff';
|
||||
import type { DBField, DBIndex, DBRelationship, DBTable } from '..';
|
||||
import type { AreaDiff } from './area-diff';
|
||||
import { createAreaDiffSchema } from './area-diff';
|
||||
import type { DBField, DBIndex, DBRelationship, DBTable, Area } from '..';
|
||||
|
||||
export type ChartDBDiff<
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
TArea = Area,
|
||||
> =
|
||||
| TableDiff<TTable>
|
||||
| FieldDiff<TField>
|
||||
| IndexDiff<TIndex>
|
||||
| RelationshipDiff<TRelationship>;
|
||||
| RelationshipDiff<TRelationship>
|
||||
| AreaDiff<TArea>;
|
||||
|
||||
export const createChartDBDiffSchema = <
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
TArea = Area,
|
||||
>(
|
||||
tableSchema: z.ZodType<TTable>,
|
||||
fieldSchema: z.ZodType<TField>,
|
||||
indexSchema: z.ZodType<TIndex>,
|
||||
relationshipSchema: z.ZodType<TRelationship>
|
||||
): z.ZodType<ChartDBDiff<TTable, TField, TIndex, TRelationship>> => {
|
||||
relationshipSchema: z.ZodType<TRelationship>,
|
||||
areaSchema: z.ZodType<TArea>
|
||||
): z.ZodType<ChartDBDiff<TTable, TField, TIndex, TRelationship, TArea>> => {
|
||||
return z.union([
|
||||
createTableDiffSchema(tableSchema),
|
||||
createFieldDiffSchema(fieldSchema),
|
||||
createIndexDiffSchema(indexSchema),
|
||||
createRelationshipDiffSchema(relationshipSchema),
|
||||
]) as z.ZodType<ChartDBDiff<TTable, TField, TIndex, TRelationship>>;
|
||||
createAreaDiffSchema(areaSchema),
|
||||
]) as z.ZodType<ChartDBDiff<TTable, TField, TIndex, TRelationship, TArea>>;
|
||||
};
|
||||
|
||||
export type DiffMap<
|
||||
@@ -45,18 +52,21 @@ export type DiffMap<
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
> = Map<string, ChartDBDiff<TTable, TField, TIndex, TRelationship>>;
|
||||
TArea = Area,
|
||||
> = Map<string, ChartDBDiff<TTable, TField, TIndex, TRelationship, TArea>>;
|
||||
|
||||
export type DiffObject<
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
TArea = Area,
|
||||
> =
|
||||
| TableDiff<TTable>['object']
|
||||
| FieldDiff<TField>['object']
|
||||
| IndexDiff<TIndex>['object']
|
||||
| RelationshipDiff<TRelationship>['object'];
|
||||
| RelationshipDiff<TRelationship>['object']
|
||||
| AreaDiff<TArea>['object'];
|
||||
|
||||
type ExtractDiffKind<T> = T extends { object: infer O; type: infer Type }
|
||||
? T extends { attribute: infer A }
|
||||
@@ -69,16 +79,18 @@ export type DiffKind<
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
> = ExtractDiffKind<ChartDBDiff<TTable, TField, TIndex, TRelationship>>;
|
||||
TArea = Area,
|
||||
> = ExtractDiffKind<ChartDBDiff<TTable, TField, TIndex, TRelationship, TArea>>;
|
||||
|
||||
export const isDiffOfKind = <
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
TArea = Area,
|
||||
>(
|
||||
diff: ChartDBDiff<TTable, TField, TIndex, TRelationship>,
|
||||
kind: DiffKind<TTable, TField, TIndex, TRelationship>
|
||||
diff: ChartDBDiff<TTable, TField, TIndex, TRelationship, TArea>,
|
||||
kind: DiffKind<TTable, TField, TIndex, TRelationship, TArea>
|
||||
): boolean => {
|
||||
if ('attribute' in kind) {
|
||||
return (
|
||||
|
||||
@@ -3,13 +3,16 @@ import type { DBTable } from '../db-table';
|
||||
|
||||
export type TableDiffAttribute = keyof Pick<
|
||||
DBTable,
|
||||
'name' | 'comments' | 'color'
|
||||
'name' | 'comments' | 'color' | 'x' | 'y' | 'width'
|
||||
>;
|
||||
|
||||
const tableDiffAttributeSchema: z.ZodType<TableDiffAttribute> = z.union([
|
||||
z.literal('name'),
|
||||
z.literal('comments'),
|
||||
z.literal('color'),
|
||||
z.literal('x'),
|
||||
z.literal('y'),
|
||||
z.literal('width'),
|
||||
]);
|
||||
|
||||
export interface TableDiffChanged {
|
||||
@@ -17,8 +20,8 @@ export interface TableDiffChanged {
|
||||
type: 'changed';
|
||||
tableId: string;
|
||||
attribute: TableDiffAttribute;
|
||||
oldValue?: string | null;
|
||||
newValue?: string | null;
|
||||
oldValue?: string | number | null;
|
||||
newValue?: string | number | null;
|
||||
}
|
||||
|
||||
export const TableDiffChangedSchema: z.ZodType<TableDiffChanged> = z.object({
|
||||
@@ -26,8 +29,8 @@ export const TableDiffChangedSchema: z.ZodType<TableDiffChanged> = z.object({
|
||||
type: z.literal('changed'),
|
||||
tableId: z.string(),
|
||||
attribute: tableDiffAttributeSchema,
|
||||
oldValue: z.string().or(z.null()).optional(),
|
||||
newValue: z.string().or(z.null()).optional(),
|
||||
oldValue: z.union([z.string(), z.number(), z.null()]).optional(),
|
||||
newValue: z.union([z.string(), z.number(), z.null()]).optional(),
|
||||
});
|
||||
|
||||
export interface TableDiffRemoved {
|
||||
|
||||
157
src/lib/import-method/__tests__/detect-import-type.test.ts
Normal file
157
src/lib/import-method/__tests__/detect-import-type.test.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { detectImportMethod } from '../detect-import-method';
|
||||
|
||||
describe('detectImportMethod', () => {
|
||||
describe('DBML detection', () => {
|
||||
it('should detect DBML with Table definition', () => {
|
||||
const content = `Table users {
|
||||
id int [pk]
|
||||
name varchar
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with Ref definition', () => {
|
||||
const content = `Table posts {
|
||||
user_id int
|
||||
}
|
||||
|
||||
Ref: posts.user_id > users.id`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with pk attribute', () => {
|
||||
const content = `id integer [pk]`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with ref attribute', () => {
|
||||
const content = `user_id int [ref: > users.id]`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with Enum definition', () => {
|
||||
const content = `Enum status {
|
||||
active
|
||||
inactive
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with TableGroup', () => {
|
||||
const content = `TableGroup commerce {
|
||||
users
|
||||
orders
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should detect DBML with Note', () => {
|
||||
const content = `Note project_note {
|
||||
'This is a note about the project'
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should prioritize DBML over SQL when both patterns exist', () => {
|
||||
const content = `CREATE TABLE test (id int);
|
||||
Table users {
|
||||
id int [pk]
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
});
|
||||
|
||||
describe('SQL DDL detection', () => {
|
||||
it('should detect CREATE TABLE statement', () => {
|
||||
const content = `CREATE TABLE users (
|
||||
id INT PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
|
||||
it('should detect ALTER TABLE statement', () => {
|
||||
const content = `ALTER TABLE users ADD COLUMN email VARCHAR(255);`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
|
||||
it('should detect DROP TABLE statement', () => {
|
||||
const content = `DROP TABLE IF EXISTS users;`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
|
||||
it('should detect CREATE INDEX statement', () => {
|
||||
const content = `CREATE INDEX idx_users_email ON users(email);`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
|
||||
it('should detect multiple DDL statements', () => {
|
||||
const content = `CREATE TABLE users (id INT);
|
||||
CREATE TABLE posts (id INT);
|
||||
ALTER TABLE posts ADD CONSTRAINT fk_user FOREIGN KEY (user_id) REFERENCES users(id);`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
|
||||
it('should detect DDL case-insensitively', () => {
|
||||
const content = `create table users (id int);`;
|
||||
expect(detectImportMethod(content)).toBe('ddl');
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON detection', () => {
|
||||
it('should detect JSON object', () => {
|
||||
const content = `{
|
||||
"tables": [],
|
||||
"relationships": []
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('query');
|
||||
});
|
||||
|
||||
it('should detect JSON array', () => {
|
||||
const content = `[
|
||||
{"name": "users"},
|
||||
{"name": "posts"}
|
||||
]`;
|
||||
expect(detectImportMethod(content)).toBe('query');
|
||||
});
|
||||
|
||||
it('should detect minified JSON', () => {
|
||||
const content = `{"tables":[],"relationships":[]}`;
|
||||
expect(detectImportMethod(content)).toBe('query');
|
||||
});
|
||||
|
||||
it('should detect JSON with whitespace', () => {
|
||||
const content = ` {
|
||||
"data": true
|
||||
} `;
|
||||
expect(detectImportMethod(content)).toBe('query');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should return null for empty content', () => {
|
||||
expect(detectImportMethod('')).toBeNull();
|
||||
expect(detectImportMethod(' ')).toBeNull();
|
||||
expect(detectImportMethod('\n\n')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for unrecognized content', () => {
|
||||
const content = `This is just some random text
|
||||
that doesn't match any pattern`;
|
||||
expect(detectImportMethod(content)).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle content with special characters', () => {
|
||||
const content = `Table users {
|
||||
name varchar // Special chars: áéíóú
|
||||
}`;
|
||||
expect(detectImportMethod(content)).toBe('dbml');
|
||||
});
|
||||
|
||||
it('should handle malformed JSON gracefully', () => {
|
||||
const content = `{ "incomplete": `;
|
||||
expect(detectImportMethod(content)).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
59
src/lib/import-method/detect-import-method.ts
Normal file
59
src/lib/import-method/detect-import-method.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { ImportMethod } from './import-method';
|
||||
|
||||
export const detectImportMethod = (content: string): ImportMethod | null => {
|
||||
if (!content || content.trim().length === 0) return null;
|
||||
|
||||
const upperContent = content.toUpperCase();
|
||||
|
||||
// Check for DBML patterns first (case sensitive)
|
||||
const dbmlPatterns = [
|
||||
/^Table\s+\w+\s*{/m,
|
||||
/^Ref:\s*\w+/m,
|
||||
/^Enum\s+\w+\s*{/m,
|
||||
/^TableGroup\s+/m,
|
||||
/^Note\s+\w+\s*{/m,
|
||||
/\[pk\]/,
|
||||
/\[ref:\s*[<>-]/,
|
||||
];
|
||||
|
||||
const hasDBMLPatterns = dbmlPatterns.some((pattern) =>
|
||||
pattern.test(content)
|
||||
);
|
||||
if (hasDBMLPatterns) return 'dbml';
|
||||
|
||||
// Common SQL DDL keywords
|
||||
const ddlKeywords = [
|
||||
'CREATE TABLE',
|
||||
'ALTER TABLE',
|
||||
'DROP TABLE',
|
||||
'CREATE INDEX',
|
||||
'CREATE VIEW',
|
||||
'CREATE PROCEDURE',
|
||||
'CREATE FUNCTION',
|
||||
'CREATE SCHEMA',
|
||||
'CREATE DATABASE',
|
||||
];
|
||||
|
||||
// Check for SQL DDL patterns
|
||||
const hasDDLKeywords = ddlKeywords.some((keyword) =>
|
||||
upperContent.includes(keyword)
|
||||
);
|
||||
if (hasDDLKeywords) return 'ddl';
|
||||
|
||||
// Check if it looks like JSON
|
||||
try {
|
||||
// Just check structure, don't need full parse for detection
|
||||
if (
|
||||
(content.trim().startsWith('{') && content.trim().endsWith('}')) ||
|
||||
(content.trim().startsWith('[') && content.trim().endsWith(']'))
|
||||
) {
|
||||
return 'query';
|
||||
}
|
||||
} catch (error) {
|
||||
// Not valid JSON, might be partial
|
||||
console.error('Error detecting content type:', error);
|
||||
}
|
||||
|
||||
// If we can't confidently detect, return null
|
||||
return null;
|
||||
};
|
||||
1
src/lib/import-method/import-method.ts
Normal file
1
src/lib/import-method/import-method.ts
Normal file
@@ -0,0 +1 @@
|
||||
export type ImportMethod = 'query' | 'ddl' | 'dbml';
|
||||
687
src/lib/utils/__tests__/apply-ids.test.ts
Normal file
687
src/lib/utils/__tests__/apply-ids.test.ts
Normal file
@@ -0,0 +1,687 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { applyIds } from '../apply-ids';
|
||||
import {
|
||||
DatabaseType,
|
||||
DBCustomTypeKind,
|
||||
type Diagram,
|
||||
type DBTable,
|
||||
type DBField,
|
||||
type DBIndex,
|
||||
type DBRelationship,
|
||||
type DBDependency,
|
||||
type DBCustomType,
|
||||
} from '../../domain';
|
||||
|
||||
describe('applyIds', () => {
|
||||
const createBaseDiagram = (overrides?: Partial<Diagram>): Diagram => ({
|
||||
id: 'diagram1',
|
||||
name: 'Test Diagram',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createTable = (overrides: Partial<DBTable>): DBTable => ({
|
||||
id: 'table-1',
|
||||
name: 'table',
|
||||
schema: 'public',
|
||||
x: 0,
|
||||
y: 0,
|
||||
fields: [],
|
||||
indexes: [],
|
||||
color: '#000000',
|
||||
comments: null,
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createField = (overrides: Partial<DBField>): DBField => ({
|
||||
id: 'field-1',
|
||||
name: 'field',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
comments: null,
|
||||
collation: null,
|
||||
createdAt: Date.now(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createIndex = (overrides: Partial<DBIndex>): DBIndex => ({
|
||||
id: 'index-1',
|
||||
name: 'index',
|
||||
unique: false,
|
||||
fieldIds: [],
|
||||
createdAt: Date.now(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createRelationship = (
|
||||
overrides: Partial<DBRelationship>
|
||||
): DBRelationship => ({
|
||||
id: 'rel-1',
|
||||
name: 'relationship',
|
||||
sourceTableId: 'table-1',
|
||||
sourceFieldId: 'field-1',
|
||||
targetTableId: 'table-2',
|
||||
targetFieldId: 'field-2',
|
||||
sourceCardinality: 'many',
|
||||
targetCardinality: 'one',
|
||||
createdAt: Date.now(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createDependency = (
|
||||
overrides: Partial<DBDependency>
|
||||
): DBDependency => ({
|
||||
id: 'dep-1',
|
||||
tableId: 'table-1',
|
||||
dependentTableId: 'table-2',
|
||||
createdAt: Date.now(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createCustomType = (
|
||||
overrides: Partial<DBCustomType>
|
||||
): DBCustomType => ({
|
||||
id: 'type-1',
|
||||
name: 'custom_type',
|
||||
schema: 'public',
|
||||
kind: DBCustomTypeKind.enum,
|
||||
values: [],
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('table ID mapping', () => {
|
||||
it('should preserve table IDs when tables match by name and schema', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
}),
|
||||
createTable({
|
||||
id: 'source-table-2',
|
||||
name: 'posts',
|
||||
schema: 'public',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
comments: 'Users table',
|
||||
}),
|
||||
createTable({
|
||||
id: 'target-table-2',
|
||||
name: 'posts',
|
||||
schema: 'public',
|
||||
x: 200,
|
||||
y: 200,
|
||||
color: '#00ff00',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables?.[0].id).toBe('source-table-1');
|
||||
expect(result.tables?.[0].name).toBe('users');
|
||||
expect(result.tables?.[0].x).toBe(100); // Should keep target's position
|
||||
expect(result.tables?.[0].color).toBe('#ff0000'); // Should keep target's color
|
||||
expect(result.tables?.[1].id).toBe('source-table-2');
|
||||
expect(result.tables?.[1].name).toBe('posts');
|
||||
});
|
||||
|
||||
it('should keep target table IDs when no matching source table exists', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'orders',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables?.[0].id).toBe('target-table-1'); // Should keep target ID
|
||||
expect(result.tables?.[0].name).toBe('orders');
|
||||
});
|
||||
});
|
||||
|
||||
describe('field ID mapping', () => {
|
||||
it('should preserve field IDs when fields match by name within the same table', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'source-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
}),
|
||||
createField({
|
||||
id: 'source-field-2',
|
||||
name: 'email',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
comments: 'Primary key',
|
||||
}),
|
||||
createField({
|
||||
id: 'target-field-2',
|
||||
name: 'email',
|
||||
type: { id: 'text', name: 'text' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables?.[0].fields).toHaveLength(2);
|
||||
expect(result.tables?.[0].fields[0].id).toBe('source-field-1');
|
||||
expect(result.tables?.[0].fields[0].name).toBe('id');
|
||||
expect(result.tables?.[0].fields[0].type.id).toBe('bigint'); // Should keep target's type
|
||||
expect(result.tables?.[0].fields[1].id).toBe('source-field-2');
|
||||
expect(result.tables?.[0].fields[1].name).toBe('email');
|
||||
expect(result.tables?.[0].fields[1].nullable).toBe(true); // Should keep target's nullable
|
||||
});
|
||||
});
|
||||
|
||||
describe('index ID mapping', () => {
|
||||
it('should preserve index IDs and update field references', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'source-field-1',
|
||||
name: 'email',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
}),
|
||||
],
|
||||
indexes: [
|
||||
createIndex({
|
||||
id: 'source-index-1',
|
||||
name: 'idx_email',
|
||||
unique: true,
|
||||
fieldIds: ['source-field-1'],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-1',
|
||||
name: 'email',
|
||||
type: { id: 'text', name: 'text' },
|
||||
}),
|
||||
],
|
||||
indexes: [
|
||||
createIndex({
|
||||
id: 'target-index-1',
|
||||
name: 'idx_email',
|
||||
unique: false,
|
||||
fieldIds: ['target-field-1'],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables?.[0].indexes).toHaveLength(1);
|
||||
expect(result.tables?.[0].indexes[0].id).toBe('source-index-1');
|
||||
expect(result.tables?.[0].indexes[0].fieldIds).toEqual([
|
||||
'source-field-1',
|
||||
]); // Should update field reference
|
||||
expect(result.tables?.[0].indexes[0].unique).toBe(false); // Should keep target's unique setting
|
||||
});
|
||||
});
|
||||
|
||||
describe('relationship ID mapping', () => {
|
||||
it('should preserve relationship IDs and update table/field references', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'source-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createTable({
|
||||
id: 'source-table-2',
|
||||
name: 'posts',
|
||||
schema: 'public',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'source-field-2',
|
||||
name: 'user_id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [
|
||||
createRelationship({
|
||||
id: 'source-rel-1',
|
||||
name: 'fk_posts_users',
|
||||
sourceTableId: 'source-table-2',
|
||||
sourceFieldId: 'source-field-2',
|
||||
targetTableId: 'source-table-1',
|
||||
targetFieldId: 'source-field-1',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createTable({
|
||||
id: 'target-table-2',
|
||||
name: 'posts',
|
||||
schema: 'public',
|
||||
x: 200,
|
||||
y: 200,
|
||||
color: '#00ff00',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-2',
|
||||
name: 'user_id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
nullable: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
relationships: [
|
||||
createRelationship({
|
||||
id: 'target-rel-1',
|
||||
name: 'fk_posts_users',
|
||||
sourceTableId: 'target-table-2',
|
||||
sourceFieldId: 'target-field-2',
|
||||
targetTableId: 'target-table-1',
|
||||
targetFieldId: 'target-field-1',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships?.[0].id).toBe('source-rel-1');
|
||||
expect(result.relationships?.[0].sourceTableId).toBe(
|
||||
'source-table-2'
|
||||
);
|
||||
expect(result.relationships?.[0].sourceFieldId).toBe(
|
||||
'source-field-2'
|
||||
);
|
||||
expect(result.relationships?.[0].targetTableId).toBe(
|
||||
'source-table-1'
|
||||
);
|
||||
expect(result.relationships?.[0].targetFieldId).toBe(
|
||||
'source-field-1'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dependency ID mapping', () => {
|
||||
it('should preserve dependency IDs and update table references', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
}),
|
||||
createTable({
|
||||
id: 'source-table-2',
|
||||
name: 'user_view',
|
||||
schema: 'public',
|
||||
isView: true,
|
||||
}),
|
||||
],
|
||||
dependencies: [
|
||||
createDependency({
|
||||
id: 'source-dep-1',
|
||||
tableId: 'source-table-2',
|
||||
dependentTableId: 'source-table-1',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
}),
|
||||
createTable({
|
||||
id: 'target-table-2',
|
||||
name: 'user_view',
|
||||
schema: 'public',
|
||||
x: 200,
|
||||
y: 200,
|
||||
color: '#00ff00',
|
||||
isView: true,
|
||||
}),
|
||||
],
|
||||
dependencies: [
|
||||
createDependency({
|
||||
id: 'target-dep-1',
|
||||
tableId: 'target-table-2',
|
||||
dependentTableId: 'target-table-1',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.dependencies).toHaveLength(1);
|
||||
expect(result.dependencies?.[0].id).toBe('source-dep-1');
|
||||
expect(result.dependencies?.[0].tableId).toBe('source-table-2');
|
||||
expect(result.dependencies?.[0].dependentTableId).toBe(
|
||||
'source-table-1'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom type ID mapping', () => {
|
||||
it('should preserve custom type IDs when types match by name and schema', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
customTypes: [
|
||||
createCustomType({
|
||||
id: 'source-type-1',
|
||||
name: 'user_role',
|
||||
schema: 'public',
|
||||
values: ['admin', 'user', 'guest'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
customTypes: [
|
||||
createCustomType({
|
||||
id: 'target-type-1',
|
||||
name: 'user_role',
|
||||
schema: 'public',
|
||||
values: ['admin', 'user', 'guest', 'moderator'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.customTypes).toHaveLength(1);
|
||||
expect(result.customTypes?.[0].id).toBe('source-type-1');
|
||||
expect(result.customTypes?.[0].values).toEqual([
|
||||
'admin',
|
||||
'user',
|
||||
'guest',
|
||||
'moderator',
|
||||
]); // Should keep target's values
|
||||
});
|
||||
});
|
||||
|
||||
describe('complex scenarios', () => {
|
||||
it('should handle partial matches correctly', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'source-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
}),
|
||||
createField({
|
||||
id: 'source-field-2',
|
||||
name: 'email',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'bigint', name: 'bigint' },
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
}),
|
||||
createField({
|
||||
id: 'target-field-3',
|
||||
name: 'username',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createTable({
|
||||
id: 'target-table-2',
|
||||
name: 'posts',
|
||||
schema: 'public',
|
||||
x: 200,
|
||||
y: 200,
|
||||
color: '#00ff00',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables?.[0].id).toBe('source-table-1');
|
||||
expect(result.tables?.[0].fields).toHaveLength(2);
|
||||
expect(result.tables?.[0].fields[0].id).toBe('source-field-1'); // Matched field
|
||||
expect(result.tables?.[0].fields[1].id).toBe('target-field-3'); // Unmatched field keeps target ID
|
||||
expect(result.tables?.[1].id).toBe('target-table-2'); // Unmatched table keeps target ID
|
||||
});
|
||||
|
||||
it('should handle different schemas correctly', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'users',
|
||||
schema: 'auth',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result.tables?.[0].id).toBe('target-table-1'); // Different schemas, no match
|
||||
});
|
||||
|
||||
it('should handle empty diagrams', () => {
|
||||
const sourceDiagram = createBaseDiagram();
|
||||
const targetDiagram = createBaseDiagram();
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
expect(result).toEqual(targetDiagram);
|
||||
});
|
||||
|
||||
it('should return target diagram unchanged when source has no matching entities', () => {
|
||||
const sourceDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'source-table-1',
|
||||
name: 'products',
|
||||
schema: 'inventory',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const targetDiagram = createBaseDiagram({
|
||||
tables: [
|
||||
createTable({
|
||||
id: 'target-table-1',
|
||||
name: 'users',
|
||||
schema: 'public',
|
||||
x: 100,
|
||||
y: 100,
|
||||
color: '#ff0000',
|
||||
fields: [
|
||||
createField({
|
||||
id: 'target-field-1',
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = applyIds({ sourceDiagram, targetDiagram });
|
||||
|
||||
// Should keep all target IDs since nothing matches
|
||||
expect(result.tables?.[0].id).toBe('target-table-1');
|
||||
expect(result.tables?.[0].fields[0].id).toBe('target-field-1');
|
||||
expect(result.tables?.[0].name).toBe('users');
|
||||
expect(result.tables?.[0].schema).toBe('public');
|
||||
});
|
||||
});
|
||||
});
|
||||
328
src/lib/utils/apply-ids.ts
Normal file
328
src/lib/utils/apply-ids.ts
Normal file
@@ -0,0 +1,328 @@
|
||||
import { defaultSchemas } from '../data/default-schemas';
|
||||
import type { DBCustomType, DBTable, Diagram } from '../domain';
|
||||
|
||||
const createTableKey = ({
|
||||
table,
|
||||
defaultSchema,
|
||||
}: {
|
||||
table: DBTable;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${table.schema ?? defaultSchema ?? ''}::${table.name}`;
|
||||
};
|
||||
|
||||
const createFieldKey = ({
|
||||
table,
|
||||
fieldName,
|
||||
defaultSchema,
|
||||
}: {
|
||||
table: DBTable;
|
||||
fieldName: string;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${table.schema ?? defaultSchema ?? ''}::${table.name}::${fieldName}`;
|
||||
};
|
||||
|
||||
const createIndexKey = ({
|
||||
table,
|
||||
indexName,
|
||||
defaultSchema,
|
||||
}: {
|
||||
table: DBTable;
|
||||
indexName: string;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${table.schema ?? defaultSchema ?? ''}::${table.name}::${indexName}`;
|
||||
};
|
||||
|
||||
const createRelationshipKey = ({
|
||||
relationshipName,
|
||||
defaultSchema,
|
||||
}: {
|
||||
relationshipName: string;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${defaultSchema ?? ''}::${relationshipName}`;
|
||||
};
|
||||
|
||||
const createDependencyKey = ({
|
||||
table,
|
||||
dependentTable,
|
||||
defaultSchema,
|
||||
}: {
|
||||
table: DBTable;
|
||||
dependentTable: DBTable;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${table.schema ?? defaultSchema ?? ''}::${table.name}::${dependentTable.schema ?? defaultSchema ?? ''}::${dependentTable.name}`;
|
||||
};
|
||||
|
||||
const createCustomTypeKey = ({
|
||||
customType,
|
||||
defaultSchema,
|
||||
}: {
|
||||
customType: DBCustomType;
|
||||
defaultSchema?: string;
|
||||
}) => {
|
||||
return `${customType.schema ?? defaultSchema ?? ''}::${customType.name}`;
|
||||
};
|
||||
|
||||
export const applyIds = ({
|
||||
sourceDiagram,
|
||||
targetDiagram,
|
||||
}: {
|
||||
sourceDiagram: Diagram;
|
||||
targetDiagram: Diagram;
|
||||
}): Diagram => {
|
||||
// Create a mapping of old IDs to new IDs
|
||||
const tablesIdMapping = new Map<string, string>();
|
||||
const fieldsIdMapping = new Map<string, string>();
|
||||
const indexesIdMapping = new Map<string, string>();
|
||||
const relationshipsIdMapping = new Map<string, string>();
|
||||
const dependenciesIdMapping = new Map<string, string>();
|
||||
const customTypesIdMapping = new Map<string, string>();
|
||||
|
||||
const sourceDefaultSchema = defaultSchemas[sourceDiagram.databaseType];
|
||||
const targetDefaultSchema = defaultSchemas[targetDiagram.databaseType];
|
||||
|
||||
// build idMapping
|
||||
sourceDiagram?.tables?.forEach((sourceTable) => {
|
||||
const sourceKey = createTableKey({
|
||||
table: sourceTable,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
tablesIdMapping.set(sourceKey, sourceTable.id);
|
||||
|
||||
sourceTable.fields.forEach((field) => {
|
||||
const fieldKey = createFieldKey({
|
||||
table: sourceTable,
|
||||
fieldName: field.name,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
fieldsIdMapping.set(fieldKey, field.id);
|
||||
});
|
||||
|
||||
sourceTable.indexes.forEach((index) => {
|
||||
const indexKey = createIndexKey({
|
||||
table: sourceTable,
|
||||
indexName: index.name,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
indexesIdMapping.set(indexKey, index.id);
|
||||
});
|
||||
});
|
||||
|
||||
sourceDiagram.relationships?.forEach((relationship) => {
|
||||
const relationshipKey = createRelationshipKey({
|
||||
relationshipName: relationship.name,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
relationshipsIdMapping.set(relationshipKey, relationship.id);
|
||||
});
|
||||
|
||||
sourceDiagram.dependencies?.forEach((dependency) => {
|
||||
const table = sourceDiagram.tables?.find(
|
||||
(t) => t.id === dependency.tableId
|
||||
);
|
||||
const dependentTable = sourceDiagram.tables?.find(
|
||||
(t) => t.id === dependency.dependentTableId
|
||||
);
|
||||
|
||||
if (!table || !dependentTable) return;
|
||||
|
||||
const dependencyKey = createDependencyKey({
|
||||
table,
|
||||
dependentTable,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
|
||||
dependenciesIdMapping.set(dependencyKey, dependency.id);
|
||||
});
|
||||
|
||||
sourceDiagram.customTypes?.forEach((customType) => {
|
||||
const customTypeKey = createCustomTypeKey({
|
||||
customType,
|
||||
defaultSchema: sourceDefaultSchema,
|
||||
});
|
||||
customTypesIdMapping.set(customTypeKey, customType.id);
|
||||
});
|
||||
|
||||
// Map current ID -> new ID for target diagram entities
|
||||
const targetTableIdMapping = new Map<string, string>();
|
||||
const targetFieldIdMapping = new Map<string, string>();
|
||||
const targetIndexIdMapping = new Map<string, string>();
|
||||
const targetRelationshipIdMapping = new Map<string, string>();
|
||||
const targetDependencyIdMapping = new Map<string, string>();
|
||||
const targetCustomTypeIdMapping = new Map<string, string>();
|
||||
|
||||
targetDiagram?.tables?.forEach((targetTable) => {
|
||||
const targetKey = createTableKey({
|
||||
table: targetTable,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
const newId = tablesIdMapping.get(targetKey);
|
||||
if (newId) {
|
||||
targetTableIdMapping.set(targetTable.id, newId);
|
||||
}
|
||||
|
||||
targetTable.fields.forEach((field) => {
|
||||
const fieldKey = createFieldKey({
|
||||
table: targetTable,
|
||||
fieldName: field.name,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
const newFieldId = fieldsIdMapping.get(fieldKey);
|
||||
if (newFieldId) {
|
||||
targetFieldIdMapping.set(field.id, newFieldId);
|
||||
}
|
||||
});
|
||||
|
||||
targetTable.indexes.forEach((index) => {
|
||||
const indexKey = createIndexKey({
|
||||
table: targetTable,
|
||||
indexName: index.name,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
const newIndexId = indexesIdMapping.get(indexKey);
|
||||
if (newIndexId) {
|
||||
targetIndexIdMapping.set(index.id, newIndexId);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
targetDiagram.relationships?.forEach((relationship) => {
|
||||
const relationshipKey = createRelationshipKey({
|
||||
relationshipName: relationship.name,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
const newId = relationshipsIdMapping.get(relationshipKey);
|
||||
if (newId) {
|
||||
targetRelationshipIdMapping.set(relationship.id, newId);
|
||||
}
|
||||
});
|
||||
|
||||
targetDiagram.dependencies?.forEach((dependency) => {
|
||||
const table = targetDiagram.tables?.find(
|
||||
(t) => t.id === dependency.tableId
|
||||
);
|
||||
const dependentTable = targetDiagram.tables?.find(
|
||||
(t) => t.id === dependency.dependentTableId
|
||||
);
|
||||
|
||||
if (!table || !dependentTable) return;
|
||||
|
||||
const dependencyKey = createDependencyKey({
|
||||
table,
|
||||
dependentTable,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
|
||||
const newId = dependenciesIdMapping.get(dependencyKey);
|
||||
if (newId) {
|
||||
targetDependencyIdMapping.set(dependency.id, newId);
|
||||
}
|
||||
});
|
||||
|
||||
targetDiagram.customTypes?.forEach((customType) => {
|
||||
const customTypeKey = createCustomTypeKey({
|
||||
customType,
|
||||
defaultSchema: targetDefaultSchema,
|
||||
});
|
||||
const newId = customTypesIdMapping.get(customTypeKey);
|
||||
if (newId) {
|
||||
targetCustomTypeIdMapping.set(customType.id, newId);
|
||||
}
|
||||
});
|
||||
|
||||
// Apply the ID mappings to create the final diagram
|
||||
const result: Diagram = {
|
||||
...targetDiagram,
|
||||
tables: targetDiagram.tables?.map((table) => {
|
||||
const newTableId = targetTableIdMapping.get(table.id) ?? table.id;
|
||||
|
||||
return {
|
||||
...table,
|
||||
id: newTableId,
|
||||
fields: table.fields.map((field) => {
|
||||
const newFieldId =
|
||||
targetFieldIdMapping.get(field.id) ?? field.id;
|
||||
return {
|
||||
...field,
|
||||
id: newFieldId,
|
||||
};
|
||||
}),
|
||||
indexes: table.indexes.map((index) => {
|
||||
const newIndexId =
|
||||
targetIndexIdMapping.get(index.id) ?? index.id;
|
||||
|
||||
// Update field IDs in index
|
||||
const updatedFieldIds = index.fieldIds.map((fieldId) => {
|
||||
return targetFieldIdMapping.get(fieldId) ?? fieldId;
|
||||
});
|
||||
|
||||
return {
|
||||
...index,
|
||||
id: newIndexId,
|
||||
fieldIds: updatedFieldIds,
|
||||
};
|
||||
}),
|
||||
};
|
||||
}),
|
||||
relationships: targetDiagram.relationships?.map((relationship) => {
|
||||
const newRelationshipId =
|
||||
targetRelationshipIdMapping.get(relationship.id) ??
|
||||
relationship.id;
|
||||
|
||||
// Update table and field IDs in relationships
|
||||
const newSourceTableId =
|
||||
targetTableIdMapping.get(relationship.sourceTableId) ??
|
||||
relationship.sourceTableId;
|
||||
const newTargetTableId =
|
||||
targetTableIdMapping.get(relationship.targetTableId) ??
|
||||
relationship.targetTableId;
|
||||
const newSourceFieldId =
|
||||
targetFieldIdMapping.get(relationship.sourceFieldId) ??
|
||||
relationship.sourceFieldId;
|
||||
const newTargetFieldId =
|
||||
targetFieldIdMapping.get(relationship.targetFieldId) ??
|
||||
relationship.targetFieldId;
|
||||
|
||||
return {
|
||||
...relationship,
|
||||
id: newRelationshipId,
|
||||
sourceTableId: newSourceTableId,
|
||||
targetTableId: newTargetTableId,
|
||||
sourceFieldId: newSourceFieldId,
|
||||
targetFieldId: newTargetFieldId,
|
||||
};
|
||||
}),
|
||||
dependencies: targetDiagram.dependencies?.map((dependency) => {
|
||||
const newDependencyId =
|
||||
targetDependencyIdMapping.get(dependency.id) ?? dependency.id;
|
||||
const newTableId =
|
||||
targetTableIdMapping.get(dependency.tableId) ??
|
||||
dependency.tableId;
|
||||
const newDependentTableId =
|
||||
targetTableIdMapping.get(dependency.dependentTableId) ??
|
||||
dependency.dependentTableId;
|
||||
|
||||
return {
|
||||
...dependency,
|
||||
id: newDependencyId,
|
||||
tableId: newTableId,
|
||||
dependentTableId: newDependentTableId,
|
||||
};
|
||||
}),
|
||||
customTypes: targetDiagram.customTypes?.map((customType) => {
|
||||
const newCustomTypeId =
|
||||
targetCustomTypeIdMapping.get(customType.id) ?? customType.id;
|
||||
|
||||
return {
|
||||
...customType,
|
||||
id: newCustomTypeId,
|
||||
};
|
||||
}),
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
@@ -1,13 +1,13 @@
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { calcTableHeight } from '@/lib/domain/db-table';
|
||||
import { calcTableHeight, MIN_TABLE_SIZE } from '@/lib/domain/db-table';
|
||||
|
||||
/**
|
||||
* Check if a table is inside an area based on their positions and dimensions
|
||||
*/
|
||||
const isTableInsideArea = (table: DBTable, area: Area): boolean => {
|
||||
export const isTableInsideArea = (table: DBTable, area: Area): boolean => {
|
||||
// Get table dimensions (assuming default width if not specified)
|
||||
const tableWidth = table.width ?? 224; // MIN_TABLE_SIZE from db-table.ts
|
||||
const tableWidth = table.width ?? MIN_TABLE_SIZE;
|
||||
const tableHeight = calcTableHeight(table);
|
||||
|
||||
// Check if table's top-left corner is inside the area
|
||||
@@ -33,7 +33,10 @@ const isTableInsideArea = (table: DBTable, area: Area): boolean => {
|
||||
/**
|
||||
* Find which area contains a table
|
||||
*/
|
||||
const findContainingArea = (table: DBTable, areas: Area[]): Area | null => {
|
||||
export const findContainingArea = (
|
||||
table: DBTable,
|
||||
areas: Area[]
|
||||
): Area | null => {
|
||||
// Sort areas by order (if available) to prioritize top-most areas
|
||||
const sortedAreas = [...areas].sort(
|
||||
(a, b) => (b.order ?? 0) - (a.order ?? 0)
|
||||
3
src/lib/utils/index.ts
Normal file
3
src/lib/utils/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './utils';
|
||||
export * from './apply-ids';
|
||||
export * from './area-utils';
|
||||
@@ -32,7 +32,7 @@ export const AreaNodeContextMenu: React.FC<
|
||||
<ContextMenu>
|
||||
<ContextMenuTrigger>{children}</ContextMenuTrigger>
|
||||
<ContextMenuContent>
|
||||
{onEditName && (
|
||||
{onEditName ? (
|
||||
<ContextMenuItem
|
||||
onClick={onEditName}
|
||||
className="flex justify-between gap-3"
|
||||
@@ -40,7 +40,7 @@ export const AreaNodeContextMenu: React.FC<
|
||||
<span>Edit Area Name</span>
|
||||
<Pencil className="size-3.5" />
|
||||
</ContextMenuItem>
|
||||
)}
|
||||
) : null}
|
||||
<ContextMenuItem
|
||||
onClick={removeAreaHandler}
|
||||
className="flex justify-between gap-3"
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import type { NodeProps, Node } from '@xyflow/react';
|
||||
import { NodeResizer } from '@xyflow/react';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useEditClickOutside } from '@/hooks/use-click-outside';
|
||||
import { useKeyPressEvent } from 'react-use';
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
@@ -37,11 +36,12 @@ export const AreaNode: React.FC<NodeProps<AreaNodeType>> = React.memo(
|
||||
const focused = !!selected && !dragging;
|
||||
|
||||
const editAreaName = useCallback(() => {
|
||||
if (!editMode) return;
|
||||
if (areaName.trim()) {
|
||||
updateArea(area.id, { name: areaName.trim() });
|
||||
}
|
||||
setEditMode(false);
|
||||
}, [areaName, area.id, updateArea]);
|
||||
}, [areaName, area.id, updateArea, editMode]);
|
||||
|
||||
const abortEdit = useCallback(() => {
|
||||
setEditMode(false);
|
||||
@@ -53,36 +53,20 @@ export const AreaNode: React.FC<NodeProps<AreaNodeType>> = React.memo(
|
||||
openAreaFromSidebar(area.id);
|
||||
}, [selectSidebarSection, openAreaFromSidebar, area.id]);
|
||||
|
||||
// Handle click outside to save and exit edit mode
|
||||
useEditClickOutside(inputRef, editMode, editAreaName);
|
||||
useClickAway(inputRef, editAreaName);
|
||||
useKeyPressEvent('Enter', editAreaName);
|
||||
useKeyPressEvent('Escape', abortEdit);
|
||||
|
||||
const enterEditMode = useCallback(
|
||||
(e?: React.MouseEvent) => {
|
||||
e?.stopPropagation();
|
||||
setAreaName(area.name);
|
||||
setEditMode(true);
|
||||
},
|
||||
[area.name]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure the input is rendered
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus();
|
||||
inputRef.current.select();
|
||||
}
|
||||
}, 50);
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
}
|
||||
}, [editMode]);
|
||||
const enterEditMode = (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setEditMode(true);
|
||||
};
|
||||
|
||||
return (
|
||||
<AreaNodeContextMenu area={area} onEditName={enterEditMode}>
|
||||
<AreaNodeContextMenu
|
||||
area={area}
|
||||
onEditName={() => setEditMode(true)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-full flex-col rounded-md border-2 shadow-sm',
|
||||
|
||||
@@ -79,7 +79,10 @@ import { useCanvas } from '@/hooks/use-canvas';
|
||||
import type { AreaNodeType } from './area-node/area-node';
|
||||
import { AreaNode } from './area-node/area-node';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { updateTablesParentAreas, getTablesInArea } from './area-utils';
|
||||
import {
|
||||
updateTablesParentAreas,
|
||||
getTablesInArea,
|
||||
} from '@/lib/utils/area-utils';
|
||||
import { CanvasFilter } from './canvas-filter/canvas-filter';
|
||||
import { useHotkeys } from 'react-hotkeys-hook';
|
||||
import { ShowAllButton } from './show-all-button';
|
||||
|
||||
@@ -11,7 +11,6 @@ import { Separator } from '@/components/separator/separator';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useUpdateTable } from '@/hooks/use-update-table';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useClickOutside } from '@/hooks/use-click-outside';
|
||||
|
||||
export interface TableEditModeProps {
|
||||
table: DBTable;
|
||||
@@ -109,9 +108,6 @@ export const TableEditMode: React.FC<TableEditModeProps> = React.memo(
|
||||
}
|
||||
}, [createField, table.id]);
|
||||
|
||||
// Close edit mode when clicking outside
|
||||
useClickOutside(containerRef, onClose, isVisible);
|
||||
|
||||
const handleColorChange = useCallback(
|
||||
(newColor: string) => {
|
||||
updateTable(table.id, { color: newColor });
|
||||
|
||||
@@ -13,6 +13,7 @@ import { Copy, Pencil, Trash2, Workflow } from 'lucide-react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
|
||||
export interface TableNodeContextMenuProps {
|
||||
table: DBTable;
|
||||
@@ -22,10 +23,11 @@ export const TableNodeContextMenu: React.FC<
|
||||
React.PropsWithChildren<TableNodeContextMenuProps>
|
||||
> = ({ children, table }) => {
|
||||
const { removeTable, readonly, createTable } = useChartDB();
|
||||
const { openTableFromSidebar } = useLayout();
|
||||
const { closeAllTablesInSidebar } = useLayout();
|
||||
const { t } = useTranslation();
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
const { openCreateRelationshipDialog } = useDialog();
|
||||
const { setEditTableModeTable } = useCanvas();
|
||||
|
||||
const duplicateTableHandler = useCallback(() => {
|
||||
const clonedTable = cloneTable(table);
|
||||
@@ -38,8 +40,13 @@ export const TableNodeContextMenu: React.FC<
|
||||
}, [createTable, table]);
|
||||
|
||||
const editTableHandler = useCallback(() => {
|
||||
openTableFromSidebar(table.id);
|
||||
}, [openTableFromSidebar, table.id]);
|
||||
if (readonly) {
|
||||
return;
|
||||
}
|
||||
|
||||
closeAllTablesInSidebar();
|
||||
setEditTableModeTable({ tableId: table.id });
|
||||
}, [table.id, setEditTableModeTable, closeAllTablesInSidebar, readonly]);
|
||||
|
||||
const removeTableHandler = useCallback(() => {
|
||||
removeTable(table.id);
|
||||
|
||||
@@ -5,6 +5,7 @@ import React, {
|
||||
useCallback,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import { useDebounceFn } from 'ahooks';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
@@ -25,7 +26,6 @@ import { generateDBMLFromDiagram } from '@/lib/dbml/dbml-export/dbml-export';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { importDBMLToDiagram } from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import { applyDBMLChanges } from '@/lib/dbml/apply-dbml/apply-dbml';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import { parseDBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
@@ -113,6 +113,17 @@ export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
const { hideLoader, showLoader } = useFullScreenLoader();
|
||||
const emphasisTimeoutRef = useRef<NodeJS.Timeout>();
|
||||
const readOnlyDisposableRef = useRef<monaco.IDisposable>();
|
||||
const currentDiagramRef = useRef<Diagram>(currentDiagram);
|
||||
const originalDiagramRef = useRef<Diagram | null>(originalDiagram);
|
||||
|
||||
// Keep refs updated
|
||||
useEffect(() => {
|
||||
currentDiagramRef.current = currentDiagram;
|
||||
}, [currentDiagram]);
|
||||
|
||||
useEffect(() => {
|
||||
originalDiagramRef.current = originalDiagram;
|
||||
}, [originalDiagram]);
|
||||
|
||||
// --- Check for empty field name warnings only on mount ---
|
||||
useEffect(() => {
|
||||
@@ -190,7 +201,7 @@ export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
);
|
||||
|
||||
const sourceDiagram: Diagram =
|
||||
originalDiagram ?? currentDiagram;
|
||||
originalDiagramRef.current ?? currentDiagramRef.current;
|
||||
|
||||
const targetDiagram: Diagram = {
|
||||
...sourceDiagram,
|
||||
@@ -204,9 +215,9 @@ export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
targetDiagram,
|
||||
});
|
||||
|
||||
if (originalDiagram) {
|
||||
if (originalDiagramRef.current) {
|
||||
resetDiff();
|
||||
loadDiagramFromData(originalDiagram);
|
||||
loadDiagramFromData(originalDiagramRef.current);
|
||||
}
|
||||
|
||||
calculateDiff({
|
||||
@@ -232,18 +243,12 @@ export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
t,
|
||||
originalDiagram,
|
||||
currentDiagram,
|
||||
resetDiff,
|
||||
loadDiagramFromData,
|
||||
calculateDiff,
|
||||
databaseType,
|
||||
]
|
||||
[t, resetDiff, loadDiagramFromData, calculateDiff, databaseType]
|
||||
);
|
||||
|
||||
const debouncedShowDiff = useDebounce(showDiff, 1000);
|
||||
const { run: debouncedShowDiff } = useDebounceFn(showDiff, {
|
||||
wait: 1000,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!isEditMode || !editedDbml) {
|
||||
@@ -359,8 +364,10 @@ export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
{
|
||||
label: 'View',
|
||||
icon: PencilOff,
|
||||
onClick: () =>
|
||||
setIsEditMode((prev) => !prev),
|
||||
onClick: () => {
|
||||
resetDiff();
|
||||
setIsEditMode((prev) => !prev);
|
||||
},
|
||||
},
|
||||
]
|
||||
: [
|
||||
|
||||
@@ -173,9 +173,11 @@ export const RelationshipListItemContent: React.FC<
|
||||
<SelectItem value="many_to_one">
|
||||
{t('relationship_type.many_to_one')}
|
||||
</SelectItem>
|
||||
<SelectItem value="many_to_many">
|
||||
{t('relationship_type.many_to_many')}
|
||||
</SelectItem>
|
||||
{relationshipType === 'many_to_many' ? (
|
||||
<SelectItem value="many_to_many">
|
||||
{t('relationship_type.many_to_many')}
|
||||
</SelectItem>
|
||||
) : null}
|
||||
</SelectGroup>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
@@ -11,8 +11,7 @@ import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useFocusOn } from '@/hooks/use-focus-on';
|
||||
import { useEditClickOutside } from '@/hooks/use-click-outside';
|
||||
import { useKeyPressEvent } from 'react-use';
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
@@ -43,37 +42,31 @@ export const RelationshipListItemHeader: React.FC<
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
|
||||
const editRelationshipName = useCallback(() => {
|
||||
if (!editMode) return;
|
||||
if (relationshipName.trim() && relationshipName !== relationship.name) {
|
||||
updateRelationship(relationship.id, {
|
||||
name: relationshipName.trim(),
|
||||
});
|
||||
}
|
||||
|
||||
setEditMode(false);
|
||||
}, [
|
||||
relationshipName,
|
||||
relationship.id,
|
||||
updateRelationship,
|
||||
editMode,
|
||||
relationship.name,
|
||||
]);
|
||||
|
||||
const abortEdit = useCallback(() => {
|
||||
setEditMode(false);
|
||||
setRelationshipName(relationship.name);
|
||||
}, [relationship.name]);
|
||||
|
||||
// Handle click outside to save and exit edit mode
|
||||
useEditClickOutside(inputRef, editMode, editRelationshipName);
|
||||
useClickAway(inputRef, editRelationshipName);
|
||||
useKeyPressEvent('Enter', editRelationshipName);
|
||||
useKeyPressEvent('Escape', abortEdit);
|
||||
|
||||
const enterEditMode = useCallback(
|
||||
(event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => {
|
||||
event.stopPropagation();
|
||||
setRelationshipName(relationship.name);
|
||||
setEditMode(true);
|
||||
},
|
||||
[relationship.name]
|
||||
);
|
||||
const enterEditMode = (
|
||||
event: React.MouseEvent<HTMLButtonElement, MouseEvent>
|
||||
) => {
|
||||
event.stopPropagation();
|
||||
setEditMode(true);
|
||||
};
|
||||
|
||||
const handleFocusOnRelationship = useCallback(
|
||||
(event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => {
|
||||
|
||||
@@ -15,8 +15,7 @@ import { ListItemHeaderButton } from '@/pages/editor-page/side-panel/list-item-h
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useEditClickOutside } from '@/hooks/use-click-outside';
|
||||
import { useKeyPressEvent } from 'react-use';
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { useSortable } from '@dnd-kit/sortable';
|
||||
import {
|
||||
DropdownMenu,
|
||||
@@ -68,30 +67,27 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
const { listeners } = useSortable({ id: table.id });
|
||||
|
||||
const editTableName = useCallback(() => {
|
||||
if (!editMode) return;
|
||||
if (tableName.trim()) {
|
||||
updateTable(table.id, { name: tableName.trim() });
|
||||
}
|
||||
|
||||
setEditMode(false);
|
||||
}, [tableName, table.id, updateTable]);
|
||||
}, [tableName, table.id, updateTable, editMode]);
|
||||
|
||||
const abortEdit = useCallback(() => {
|
||||
setEditMode(false);
|
||||
setTableName(table.name);
|
||||
}, [table.name]);
|
||||
|
||||
// Handle click outside to save and exit edit mode
|
||||
useEditClickOutside(inputRef, editMode, editTableName);
|
||||
useClickAway(inputRef, editTableName);
|
||||
useKeyPressEvent('Enter', editTableName);
|
||||
useKeyPressEvent('Escape', abortEdit);
|
||||
|
||||
const enterEditMode = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setTableName(table.name);
|
||||
setEditMode(true);
|
||||
},
|
||||
[table.name]
|
||||
);
|
||||
const enterEditMode = (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setEditMode(true);
|
||||
};
|
||||
|
||||
const handleFocusOnTable = useCallback(
|
||||
(event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => {
|
||||
@@ -253,20 +249,6 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
}
|
||||
}, [table.name]);
|
||||
|
||||
useEffect(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure the input is rendered
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus();
|
||||
inputRef.current.select();
|
||||
}
|
||||
}, 50);
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
}
|
||||
}, [editMode]);
|
||||
|
||||
return (
|
||||
<div className="group flex h-11 flex-1 items-center justify-between gap-1 overflow-hidden">
|
||||
{!readonly ? (
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { useEditClickOutside } from '@/hooks/use-click-outside';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Check, Pencil } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useKeyPressEvent } from 'react-use';
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { DiagramIcon } from '@/components/diagram-icon/diagram-icon';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { cn } from '@/lib/utils';
|
||||
@@ -32,6 +31,18 @@ export const DiagramName: React.FC<DiagramNameProps> = () => {
|
||||
setEditedDiagramName(diagramName);
|
||||
}, [diagramName]);
|
||||
|
||||
const editDiagramName = useCallback(() => {
|
||||
if (editedDiagramName.trim()) {
|
||||
updateDiagramName(editedDiagramName.trim());
|
||||
}
|
||||
setEditMode(false);
|
||||
}, [editedDiagramName, updateDiagramName]);
|
||||
|
||||
// Handle click outside to save and exit edit mode
|
||||
useClickAway(inputRef, editDiagramName);
|
||||
|
||||
useKeyPressEvent('Enter', editDiagramName);
|
||||
|
||||
useEffect(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure the input is rendered
|
||||
@@ -46,23 +57,6 @@ export const DiagramName: React.FC<DiagramNameProps> = () => {
|
||||
}
|
||||
}, [editMode]);
|
||||
|
||||
const editDiagramName = useCallback(() => {
|
||||
if (editedDiagramName.trim()) {
|
||||
updateDiagramName(editedDiagramName.trim());
|
||||
}
|
||||
setEditMode(false);
|
||||
}, [editedDiagramName, updateDiagramName]);
|
||||
|
||||
const abortEdit = useCallback(() => {
|
||||
setEditMode(false);
|
||||
setEditedDiagramName(diagramName);
|
||||
}, [diagramName]);
|
||||
|
||||
// Handle click outside to save and exit edit mode
|
||||
useEditClickOutside(inputRef, editMode, editDiagramName);
|
||||
useKeyPressEvent('Enter', editDiagramName);
|
||||
useKeyPressEvent('Escape', abortEdit);
|
||||
|
||||
const enterEditMode = useCallback(
|
||||
(event: React.MouseEvent<HTMLElement, MouseEvent>) => {
|
||||
event.stopPropagation();
|
||||
@@ -103,12 +97,11 @@ export const DiagramName: React.FC<DiagramNameProps> = () => {
|
||||
onChange={(e) =>
|
||||
setEditedDiagramName(e.target.value)
|
||||
}
|
||||
className="ml-1 h-7 focus-visible:ring-0"
|
||||
className="h-7 max-w-[300px] focus-visible:ring-0"
|
||||
style={{
|
||||
width: `${Math.max(
|
||||
editedDiagramName.length * 8 + 20,
|
||||
100
|
||||
)}px`,
|
||||
width: `${
|
||||
editedDiagramName.length * 8 + 30
|
||||
}px`,
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
@@ -126,7 +119,7 @@ export const DiagramName: React.FC<DiagramNameProps> = () => {
|
||||
<h1
|
||||
className={cn(
|
||||
labelVariants(),
|
||||
'group-hover:underline'
|
||||
'group-hover:underline max-w-[300px] truncate'
|
||||
)}
|
||||
onDoubleClick={(e) => {
|
||||
enterEditMode(e);
|
||||
@@ -141,10 +134,13 @@ export const DiagramName: React.FC<DiagramNameProps> = () => {
|
||||
</Tooltip>
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="ml-1 size-5 p-0 opacity-0 transition-opacity hover:bg-primary-foreground group-hover:opacity-100"
|
||||
className="ml-1 hidden size-5 p-0 hover:bg-background/50 group-hover:flex"
|
||||
onClick={enterEditMode}
|
||||
>
|
||||
<Pencil className="size-3 text-slate-500 dark:text-slate-400" />
|
||||
<Pencil
|
||||
strokeWidth="1.5"
|
||||
className="!size-3.5 text-slate-600 dark:text-slate-400"
|
||||
/>
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -46,7 +46,6 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
openExportImageDialog,
|
||||
openExportDiagramDialog,
|
||||
openImportDiagramDialog,
|
||||
openImportDBMLDialog,
|
||||
} = useDialog();
|
||||
const { showAlert } = useAlert();
|
||||
const { setTheme, theme } = useTheme();
|
||||
@@ -185,9 +184,6 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
<MenubarItem onClick={openImportDiagramDialog}>
|
||||
.json
|
||||
</MenubarItem>
|
||||
<MenubarItem onClick={() => openImportDBMLDialog()}>
|
||||
.dbml
|
||||
</MenubarItem>
|
||||
<MenubarSeparator />
|
||||
<MenubarItem
|
||||
onClick={() =>
|
||||
|
||||
Reference in New Issue
Block a user