mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-24 08:33:44 +00:00
Compare commits
18 Commits
v1.15.1
...
jf/edit-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4fdc2ccd91 | ||
|
|
8954d893bb | ||
|
|
1a6688e85e | ||
|
|
5e81c1848a | ||
|
|
2bd9ca25b2 | ||
|
|
b016a70691 | ||
|
|
a0fb1ed08b | ||
|
|
ffddcdcc98 | ||
|
|
fe9ef275b8 | ||
|
|
df89f0b6b9 | ||
|
|
534d2858af | ||
|
|
2a64deebb8 | ||
|
|
e5e1d59327 | ||
|
|
aa290615ca | ||
|
|
ec6e46fe81 | ||
|
|
ac128d67de | ||
|
|
07937a2f51 | ||
|
|
d8e0bc7db8 |
277
package-lock.json
generated
277
package-lock.json
generated
@@ -9,7 +9,7 @@
|
||||
"version": "1.15.1",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
"@dbml/core": "^3.13.9",
|
||||
"@dnd-kit/sortable": "^8.0.0",
|
||||
"@monaco-editor/react": "^4.6.0",
|
||||
"@radix-ui/react-accordion": "^1.2.0",
|
||||
@@ -586,15 +586,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
"version": "7.26.2",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
|
||||
"integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.25.9",
|
||||
"@babel/helper-validator-identifier": "^7.27.1",
|
||||
"js-tokens": "^4.0.0",
|
||||
"picocolors": "^1.0.0"
|
||||
"picocolors": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -738,18 +738,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-string-parser": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz",
|
||||
"integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
|
||||
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-validator-identifier": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz",
|
||||
"integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
|
||||
"integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -766,26 +766,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helpers": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.7.tgz",
|
||||
"integrity": "sha512-8NHiL98vsi0mbPQmYAGWwfcFaOy4j2HY49fXJCfuDcdE7fMIsH9a7GdaeXpIBsbT7307WU8KCMp5pUVDNL4f9A==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
|
||||
"integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/template": "^7.25.9",
|
||||
"@babel/types": "^7.26.7"
|
||||
"@babel/template": "^7.27.2",
|
||||
"@babel/types": "^7.28.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/parser": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.7.tgz",
|
||||
"integrity": "sha512-kEvgGGgEjRUutvdVvZhbn/BxVt+5VSpwXz1j3WYXQbXDo8KzFOPNG2GQbdAiNq8g6wn1yKk7C/qrke03a84V+w==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz",
|
||||
"integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/types": "^7.26.7"
|
||||
"@babel/types": "^7.28.4"
|
||||
},
|
||||
"bin": {
|
||||
"parser": "bin/babel-parser.js"
|
||||
@@ -827,27 +827,24 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.7.tgz",
|
||||
"integrity": "sha512-AOPI3D+a8dXnja+iwsUqGRjr1BbZIe771sXdapOtYI531gSqpi92vXivKcq2asu/DFpdl1ceFAKZyRzK2PCVcQ==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
|
||||
"integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.14.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/template": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz",
|
||||
"integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==",
|
||||
"version": "7.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
|
||||
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.25.9",
|
||||
"@babel/parser": "^7.25.9",
|
||||
"@babel/types": "^7.25.9"
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
"@babel/parser": "^7.27.2",
|
||||
"@babel/types": "^7.27.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -883,25 +880,25 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/types": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.7.tgz",
|
||||
"integrity": "sha512-t8kDRGrKXyp6+tjUh7hw2RLyclsW4TRoRvRHtSyAX9Bb5ldlFh+90YAYY6awRXrlB4G5G2izNeGySpATlFzmOg==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz",
|
||||
"integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-string-parser": "^7.25.9",
|
||||
"@babel/helper-validator-identifier": "^7.25.9"
|
||||
"@babel/helper-string-parser": "^7.27.1",
|
||||
"@babel/helper-validator-identifier": "^7.27.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@dbml/core": {
|
||||
"version": "3.9.5",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/core/-/core-3.9.5.tgz",
|
||||
"integrity": "sha512-lX/G5qer42irufv5rvx6Y3ISV2ZLDRlxj8R+OZMdhC6wAw0VYPYIts23MdMFPY39Iay0TDtfmwsbOsVy/yjSIg==",
|
||||
"version": "3.13.9",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/core/-/core-3.13.9.tgz",
|
||||
"integrity": "sha512-JgJ470yuTZU7tP64ZL5FpEh7zSXjSoKzkARmin8iVVhdsNM8Nq4e+FFhG6J6acPtGHtoLahOs9LqrC17B9MqYg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@dbml/parse": "^3.9.5",
|
||||
"@dbml/parse": "^3.13.9",
|
||||
"antlr4": "^4.13.1",
|
||||
"lodash": "^4.17.15",
|
||||
"parsimmon": "^1.13.0",
|
||||
@@ -912,15 +909,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@dbml/parse": {
|
||||
"version": "3.9.5",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/parse/-/parse-3.9.5.tgz",
|
||||
"integrity": "sha512-z8MjBYDFiYf7WtsagwGATEye81xQcO9VXFzttSjdJ+wgdSFzFSex9letJPIMIcYXBkm4Fg5qLDk+G9uq/413Dg==",
|
||||
"version": "3.13.9",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/parse/-/parse-3.13.9.tgz",
|
||||
"integrity": "sha512-JMfOxWquXMZpF/MTLy2xWLImx3z9D0t67T7x/BT892WvmhM+9cnJHFA2URT1NXu9jdajbTTFuoWSyzdsfNpaRw==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.21"
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"lodash": "^4.17.21"
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@dnd-kit/accessibility": {
|
||||
@@ -1370,9 +1367,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz",
|
||||
"integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==",
|
||||
"version": "4.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz",
|
||||
"integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1417,9 +1414,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/config-array": {
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz",
|
||||
"integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==",
|
||||
"version": "0.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz",
|
||||
"integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
@@ -1431,10 +1428,20 @@
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/config-helpers": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz",
|
||||
"integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/core": {
|
||||
"version": "0.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz",
|
||||
"integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==",
|
||||
"version": "0.15.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz",
|
||||
"integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
@@ -1445,9 +1452,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/eslintrc": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz",
|
||||
"integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==",
|
||||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz",
|
||||
"integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1482,13 +1489,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.19.0.tgz",
|
||||
"integrity": "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ==",
|
||||
"version": "9.35.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.35.0.tgz",
|
||||
"integrity": "sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://eslint.org/donate"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/object-schema": {
|
||||
@@ -1502,13 +1512,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/plugin-kit": {
|
||||
"version": "0.2.5",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz",
|
||||
"integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==",
|
||||
"version": "0.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz",
|
||||
"integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@eslint/core": "^0.10.0",
|
||||
"@eslint/core": "^0.15.2",
|
||||
"levn": "^0.4.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1606,9 +1616,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/retry": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz",
|
||||
"integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==",
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz",
|
||||
"integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -4276,12 +4286,6 @@
|
||||
"@types/deep-eql": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/cookie": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz",
|
||||
"integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/d3-color": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz",
|
||||
@@ -4553,9 +4557,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -4961,9 +4965,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.14.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
|
||||
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
|
||||
"version": "8.15.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
|
||||
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
@@ -5486,9 +5490,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -6578,22 +6582,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint": {
|
||||
"version": "9.19.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.19.0.tgz",
|
||||
"integrity": "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA==",
|
||||
"version": "9.35.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.35.0.tgz",
|
||||
"integrity": "sha512-QePbBFMJFjgmlE+cXAlbHZbHpdFVS2E/6vzCy7aKlebddvl1vadiC4JFV5u/wqTkNUwEV8WrQi257jf5f06hrg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.2.0",
|
||||
"@eslint-community/eslint-utils": "^4.8.0",
|
||||
"@eslint-community/regexpp": "^4.12.1",
|
||||
"@eslint/config-array": "^0.19.0",
|
||||
"@eslint/core": "^0.10.0",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "9.19.0",
|
||||
"@eslint/plugin-kit": "^0.2.5",
|
||||
"@eslint/config-array": "^0.21.0",
|
||||
"@eslint/config-helpers": "^0.3.1",
|
||||
"@eslint/core": "^0.15.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "9.35.0",
|
||||
"@eslint/plugin-kit": "^0.3.5",
|
||||
"@humanfs/node": "^0.16.6",
|
||||
"@humanwhocodes/module-importer": "^1.0.1",
|
||||
"@humanwhocodes/retry": "^0.4.1",
|
||||
"@humanwhocodes/retry": "^0.4.2",
|
||||
"@types/estree": "^1.0.6",
|
||||
"@types/json-schema": "^7.0.15",
|
||||
"ajv": "^6.12.4",
|
||||
@@ -6601,9 +6606,9 @@
|
||||
"cross-spawn": "^7.0.6",
|
||||
"debug": "^4.3.2",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"eslint-scope": "^8.2.0",
|
||||
"eslint-visitor-keys": "^4.2.0",
|
||||
"espree": "^10.3.0",
|
||||
"eslint-scope": "^8.4.0",
|
||||
"eslint-visitor-keys": "^4.2.1",
|
||||
"espree": "^10.4.0",
|
||||
"esquery": "^1.5.0",
|
||||
"esutils": "^2.0.2",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
@@ -6812,9 +6817,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-scope": {
|
||||
"version": "8.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
|
||||
"integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
|
||||
"version": "8.4.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz",
|
||||
"integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
@@ -6842,9 +6847,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -6862,15 +6867,15 @@
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/espree": {
|
||||
"version": "10.3.0",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
|
||||
"integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
|
||||
"version": "10.4.0",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
|
||||
"integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"acorn": "^8.14.0",
|
||||
"acorn": "^8.15.0",
|
||||
"acorn-jsx": "^5.3.2",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -6880,9 +6885,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/espree/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -7365,9 +7370,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
@@ -8440,6 +8445,12 @@
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash-es": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
|
||||
"integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.merge": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
|
||||
@@ -9600,15 +9611,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-router": {
|
||||
"version": "7.1.5",
|
||||
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.1.5.tgz",
|
||||
"integrity": "sha512-8BUF+hZEU4/z/JD201yK6S+UYhsf58bzYIDq2NS1iGpwxSXDu7F+DeGSkIXMFBuHZB21FSiCzEcUb18cQNdRkA==",
|
||||
"version": "7.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.8.2.tgz",
|
||||
"integrity": "sha512-7M2fR1JbIZ/jFWqelpvSZx+7vd7UlBTfdZqf6OSdF9g6+sfdqJDAWcak6ervbHph200ePlu+7G8LdoiC3ReyAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/cookie": "^0.6.0",
|
||||
"cookie": "^1.0.1",
|
||||
"set-cookie-parser": "^2.6.0",
|
||||
"turbo-stream": "2.4.0"
|
||||
"set-cookie-parser": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
@@ -9624,12 +9633,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-router-dom": {
|
||||
"version": "7.1.5",
|
||||
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.1.5.tgz",
|
||||
"integrity": "sha512-/4f9+up0Qv92D3bB8iN5P1s3oHAepSGa9h5k6tpTFlixTTskJZwKGhJ6vRJ277tLD1zuaZTt95hyGWV1Z37csQ==",
|
||||
"version": "7.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.8.2.tgz",
|
||||
"integrity": "sha512-Z4VM5mKDipal2jQ385H6UBhiiEDlnJPx6jyWsTYoZQdl5TrjxEV2a9yl3Fi60NBJxYzOTGTTHXPi0pdizvTwow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"react-router": "7.1.5"
|
||||
"react-router": "7.8.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
@@ -9760,12 +9769,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.14.1",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
|
||||
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/regexp.prototype.flags": {
|
||||
"version": "1.5.4",
|
||||
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
|
||||
@@ -11026,12 +11029,6 @@
|
||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/turbo-stream": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz",
|
||||
"integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
@@ -11313,9 +11310,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.4.14",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz",
|
||||
"integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==",
|
||||
"version": "5.4.20",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz",
|
||||
"integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
"@dbml/core": "^3.13.9",
|
||||
"@dnd-kit/sortable": "^8.0.0",
|
||||
"@monaco-editor/react": "^4.6.0",
|
||||
"@radix-ui/react-accordion": "^1.2.0",
|
||||
|
||||
@@ -11,18 +11,26 @@ import {
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/dropdown-menu/dropdown-menu';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
|
||||
export interface ButtonAlternative {
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
icon?: React.ReactNode;
|
||||
className?: string;
|
||||
tooltip?: string;
|
||||
}
|
||||
|
||||
export interface ButtonWithAlternativesProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
|
||||
VariantProps<typeof buttonVariants> {
|
||||
asChild?: boolean;
|
||||
alternatives: Array<{
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
icon?: React.ReactNode;
|
||||
className?: string;
|
||||
}>;
|
||||
alternatives: Array<ButtonAlternative>;
|
||||
dropdownTriggerClassName?: string;
|
||||
chevronDownIconClassName?: string;
|
||||
}
|
||||
@@ -87,19 +95,36 @@ const ButtonWithAlternatives = React.forwardRef<
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
{alternatives.map((alternative, index) => (
|
||||
<DropdownMenuItem
|
||||
key={index}
|
||||
onClick={alternative.onClick}
|
||||
disabled={alternative.disabled}
|
||||
className={cn(alternative.className)}
|
||||
>
|
||||
<span className="flex w-full items-center justify-between gap-2">
|
||||
{alternative.label}
|
||||
{alternative.icon}
|
||||
</span>
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
{alternatives.map((alternative, index) => {
|
||||
const menuItem = (
|
||||
<DropdownMenuItem
|
||||
key={index}
|
||||
onClick={alternative.onClick}
|
||||
disabled={alternative.disabled}
|
||||
className={cn(alternative.className)}
|
||||
>
|
||||
<span className="flex w-full items-center justify-between gap-2">
|
||||
{alternative.label}
|
||||
{alternative.icon}
|
||||
</span>
|
||||
</DropdownMenuItem>
|
||||
);
|
||||
|
||||
if (alternative.tooltip) {
|
||||
return (
|
||||
<Tooltip key={index}>
|
||||
<TooltipTrigger asChild>
|
||||
{menuItem}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="left">
|
||||
{alternative.tooltip}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return menuItem;
|
||||
})}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
) : null}
|
||||
|
||||
@@ -5,27 +5,45 @@ import {
|
||||
PopoverTrigger,
|
||||
} from '@/components/popover/popover';
|
||||
import { colorOptions } from '@/lib/colors';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
export interface ColorPickerProps {
|
||||
color: string;
|
||||
onChange: (color: string) => void;
|
||||
disabled?: boolean;
|
||||
popoverOnMouseDown?: (e: React.MouseEvent) => void;
|
||||
popoverOnClick?: (e: React.MouseEvent) => void;
|
||||
}
|
||||
|
||||
export const ColorPicker = React.forwardRef<
|
||||
React.ElementRef<typeof PopoverTrigger>,
|
||||
ColorPickerProps
|
||||
>(({ color, onChange }, ref) => {
|
||||
>(({ color, onChange, disabled, popoverOnMouseDown, popoverOnClick }, ref) => {
|
||||
return (
|
||||
<Popover>
|
||||
<PopoverTrigger asChild ref={ref}>
|
||||
<PopoverTrigger
|
||||
asChild
|
||||
ref={ref}
|
||||
disabled={disabled}
|
||||
{...(disabled ? { onClick: (e) => e.preventDefault() } : {})}
|
||||
>
|
||||
<div
|
||||
className="h-6 w-8 cursor-pointer rounded-md border-2 border-muted transition-shadow hover:shadow-md"
|
||||
className={cn(
|
||||
'h-6 w-8 cursor-pointer rounded-md border-2 border-muted transition-shadow hover:shadow-md',
|
||||
{
|
||||
'hover:shadow-none cursor-default': disabled,
|
||||
}
|
||||
)}
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
}}
|
||||
/>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-fit">
|
||||
<PopoverContent
|
||||
className="w-fit"
|
||||
onMouseDown={popoverOnMouseDown}
|
||||
onClick={popoverOnClick}
|
||||
>
|
||||
<div className="grid grid-cols-4 gap-2">
|
||||
{colorOptions.map((option) => (
|
||||
<div
|
||||
|
||||
@@ -27,6 +27,7 @@ export interface SelectBoxOption {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
group?: string;
|
||||
icon?: React.ReactNode;
|
||||
}
|
||||
|
||||
export interface SelectBoxProps {
|
||||
@@ -53,6 +54,10 @@ export interface SelectBoxProps {
|
||||
open?: boolean;
|
||||
onOpenChange?: (open: boolean) => void;
|
||||
popoverClassName?: string;
|
||||
readonly?: boolean;
|
||||
footerButtons?: React.ReactNode;
|
||||
commandOnMouseDown?: (e: React.MouseEvent) => void;
|
||||
commandOnClick?: (e: React.MouseEvent) => void;
|
||||
}
|
||||
|
||||
export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
@@ -78,6 +83,10 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
open,
|
||||
onOpenChange: setOpen,
|
||||
popoverClassName,
|
||||
readonly,
|
||||
footerButtons,
|
||||
commandOnMouseDown,
|
||||
commandOnClick,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
@@ -152,18 +161,20 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
className={`inline-flex min-w-0 shrink-0 items-center gap-1 rounded-md border py-0.5 pl-2 pr-1 text-xs font-medium text-foreground transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 ${oneLine ? 'mx-0.5' : ''}`}
|
||||
>
|
||||
<span>{option.label}</span>
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
handleSelect(option.value);
|
||||
}}
|
||||
className="flex items-center rounded-sm px-px text-muted-foreground/60 hover:bg-accent hover:text-muted-foreground"
|
||||
>
|
||||
<Cross2Icon />
|
||||
</span>
|
||||
{!readonly ? (
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
handleSelect(option.value);
|
||||
}}
|
||||
className="flex items-center rounded-sm px-px text-muted-foreground/60 hover:bg-accent hover:text-muted-foreground"
|
||||
>
|
||||
<Cross2Icon />
|
||||
</span>
|
||||
) : null}
|
||||
</span>
|
||||
)),
|
||||
[options, value, handleSelect, oneLine, keepOrder]
|
||||
[options, value, handleSelect, oneLine, keepOrder, readonly]
|
||||
);
|
||||
|
||||
const isAllSelected = React.useMemo(
|
||||
@@ -236,6 +247,8 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
matches?.map((match) => match?.toString())
|
||||
)
|
||||
}
|
||||
onMouseDown={commandOnMouseDown}
|
||||
onClick={commandOnClick}
|
||||
>
|
||||
{multiple && (
|
||||
<div
|
||||
@@ -250,6 +263,11 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-1 items-center truncate">
|
||||
{option.icon ? (
|
||||
<span className="mr-2 shrink-0">
|
||||
{option.icon}
|
||||
</span>
|
||||
) : null}
|
||||
<span>
|
||||
{isRegexMatch ? searchTerm : option.label}
|
||||
{!isRegexMatch && optionSuffix
|
||||
@@ -276,7 +294,15 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</CommandItem>
|
||||
);
|
||||
},
|
||||
[value, multiple, searchTerm, handleSelect, optionSuffix]
|
||||
[
|
||||
value,
|
||||
multiple,
|
||||
searchTerm,
|
||||
handleSelect,
|
||||
optionSuffix,
|
||||
commandOnClick,
|
||||
commandOnMouseDown,
|
||||
]
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -284,7 +310,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
<PopoverTrigger asChild tabIndex={0} onKeyDown={handleKeyDown}>
|
||||
<div
|
||||
className={cn(
|
||||
`flex min-h-[36px] cursor-pointer items-center justify-between rounded-md border px-3 py-1 data-[state=open]:border-ring ${disabled ? 'bg-muted pointer-events-none' : ''}`,
|
||||
`flex min-h-[36px] cursor-pointer items-center justify-between rounded-md border px-3 py-1 data-[state=open]:border-ring ${disabled ? 'bg-muted pointer-events-none' : ''} ${readonly ? 'pointer-events-none' : ''}`,
|
||||
className
|
||||
)}
|
||||
>
|
||||
@@ -354,6 +380,8 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
popoverClassName
|
||||
)}
|
||||
align="center"
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<Command
|
||||
filter={(value, search, keywords) => {
|
||||
@@ -443,6 +471,9 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</Command>
|
||||
{footerButtons ? (
|
||||
<div className="border-t">{footerButtons}</div>
|
||||
) : null}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
|
||||
@@ -14,6 +14,16 @@ export interface CanvasContext {
|
||||
overlapGraph: Graph<string>;
|
||||
setShowFilter: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
showFilter: boolean;
|
||||
editTableModeTable: {
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null;
|
||||
setEditTableModeTable: React.Dispatch<
|
||||
React.SetStateAction<{
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null>
|
||||
>;
|
||||
}
|
||||
|
||||
export const canvasContext = createContext<CanvasContext>({
|
||||
@@ -23,4 +33,6 @@ export const canvasContext = createContext<CanvasContext>({
|
||||
overlapGraph: createGraph(),
|
||||
setShowFilter: emptyFn,
|
||||
showFilter: false,
|
||||
editTableModeTable: null,
|
||||
setEditTableModeTable: emptyFn,
|
||||
});
|
||||
|
||||
@@ -33,6 +33,10 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
const { fitView } = useReactFlow();
|
||||
const [overlapGraph, setOverlapGraph] =
|
||||
useState<Graph<string>>(createGraph());
|
||||
const [editTableModeTable, setEditTableModeTable] = useState<{
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null>(null);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const diagramIdActiveFilterRef = useRef<string>();
|
||||
@@ -127,6 +131,8 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
overlapGraph,
|
||||
setShowFilter,
|
||||
showFilter,
|
||||
editTableModeTable,
|
||||
setEditTableModeTable,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -41,8 +41,7 @@ export const ChartDBProvider: React.FC<
|
||||
React.PropsWithChildren<ChartDBProviderProps>
|
||||
> = ({ children, diagram, readonly: readonlyProp }) => {
|
||||
const { hasDiff } = useDiff();
|
||||
const dbStorage = useStorage();
|
||||
let db = dbStorage;
|
||||
const storageDB = useStorage();
|
||||
const events = useEventEmitter<ChartDBEvent>();
|
||||
const { addUndoAction, resetRedoStack, resetUndoStack } =
|
||||
useRedoUndoStack();
|
||||
@@ -102,10 +101,6 @@ export const ChartDBProvider: React.FC<
|
||||
[readonlyProp, hasDiff]
|
||||
);
|
||||
|
||||
if (readonly) {
|
||||
db = storageInitialValue;
|
||||
}
|
||||
|
||||
const schemas = useMemo(
|
||||
() =>
|
||||
databasesWithSchemas.includes(databaseType)
|
||||
@@ -134,6 +129,11 @@ export const ChartDBProvider: React.FC<
|
||||
[tables, defaultSchemaName, databaseType]
|
||||
);
|
||||
|
||||
const db = useMemo(
|
||||
() => (readonly ? storageInitialValue : storageDB),
|
||||
[storageDB, readonly]
|
||||
);
|
||||
|
||||
const currentDiagram: Diagram = useMemo(
|
||||
() => ({
|
||||
id: diagramId,
|
||||
@@ -1580,17 +1580,17 @@ export const ChartDBProvider: React.FC<
|
||||
|
||||
const updateDiagramData: ChartDBContext['updateDiagramData'] = useCallback(
|
||||
async (diagram, options) => {
|
||||
const st = options?.forceUpdateStorage ? dbStorage : db;
|
||||
const st = options?.forceUpdateStorage ? storageDB : db;
|
||||
await st.deleteDiagram(diagram.id);
|
||||
await st.addDiagram({ diagram });
|
||||
loadDiagramFromData(diagram);
|
||||
},
|
||||
[db, dbStorage, loadDiagramFromData]
|
||||
[db, storageDB, loadDiagramFromData]
|
||||
);
|
||||
|
||||
const loadDiagram: ChartDBContext['loadDiagram'] = useCallback(
|
||||
async (diagramId: string) => {
|
||||
const diagram = await db.getDiagram(diagramId, {
|
||||
const diagram = await storageDB.getDiagram(diagramId, {
|
||||
includeRelationships: true,
|
||||
includeTables: true,
|
||||
includeDependencies: true,
|
||||
@@ -1604,7 +1604,7 @@ export const ChartDBProvider: React.FC<
|
||||
|
||||
return diagram;
|
||||
},
|
||||
[db, loadDiagramFromData]
|
||||
[storageDB, loadDiagramFromData]
|
||||
);
|
||||
|
||||
// Custom type operations
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Dialog, DialogContent } from '@/components/dialog/dialog';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/data/import-metadata/import';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useConfig } from '@/hooks/use-config';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
|
||||
@@ -69,6 +69,7 @@ export const SelectDatabase: React.FC<SelectDatabaseProps> = ({
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={createNewDiagram}
|
||||
disabled={databaseType === DatabaseType.GENERIC}
|
||||
>
|
||||
{t('new_diagram_dialog.empty_diagram')}
|
||||
</Button>
|
||||
|
||||
@@ -17,7 +17,7 @@ import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
exportBaseSQL,
|
||||
exportSQL,
|
||||
} from '@/lib/data/export-metadata/export-sql-script';
|
||||
} from '@/lib/data/sql-export/export-sql-script';
|
||||
import { databaseTypeToLabelMap } from '@/lib/databases';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { Annoyed, Sparkles } from 'lucide-react';
|
||||
|
||||
@@ -7,7 +7,7 @@ import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import { loadDatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/data/import-metadata/import';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useRedoUndoStack } from '@/hooks/use-redo-undo-stack';
|
||||
import { Trans, useTranslation } from 'react-i18next';
|
||||
|
||||
@@ -132,7 +132,7 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
const preprocessedContent = preprocessDBML(content);
|
||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||
const parser = new Parser();
|
||||
parser.parse(sanitizedContent, 'dbml');
|
||||
parser.parse(sanitizedContent, 'dbmlv2');
|
||||
} catch (e) {
|
||||
const parsedError = parseDBMLError(e);
|
||||
if (parsedError) {
|
||||
|
||||
50
src/hooks/use-click-outside.ts
Normal file
50
src/hooks/use-click-outside.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { useEffect, useCallback, type RefObject } from 'react';
|
||||
|
||||
/**
|
||||
* Custom hook that handles click outside detection with capture phase
|
||||
* to work properly with React Flow canvas and other event-stopping elements
|
||||
*/
|
||||
export function useClickOutside(
|
||||
ref: RefObject<HTMLElement>,
|
||||
handler: () => void,
|
||||
isActive = true
|
||||
) {
|
||||
useEffect(() => {
|
||||
if (!isActive) return;
|
||||
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (ref.current && !ref.current.contains(event.target as Node)) {
|
||||
handler();
|
||||
}
|
||||
};
|
||||
|
||||
// Use capture phase to catch events before React Flow or other libraries can stop them
|
||||
document.addEventListener('mousedown', handleClickOutside, true);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside, true);
|
||||
};
|
||||
}, [ref, handler, isActive]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specialized version of useClickOutside for edit mode inputs
|
||||
* Adds a small delay to prevent race conditions with blur events
|
||||
*/
|
||||
export function useEditClickOutside(
|
||||
inputRef: RefObject<HTMLElement>,
|
||||
editMode: boolean,
|
||||
onSave: () => void,
|
||||
delay = 100
|
||||
) {
|
||||
const handleClickOutside = useCallback(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure any pending state updates are processed
|
||||
setTimeout(() => {
|
||||
onSave();
|
||||
}, delay);
|
||||
}
|
||||
}, [editMode, onSave, delay]);
|
||||
|
||||
useClickOutside(inputRef, handleClickOutside, editMode);
|
||||
}
|
||||
142
src/hooks/use-focus-on.ts
Normal file
142
src/hooks/use-focus-on.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
|
||||
interface FocusOptions {
|
||||
select?: boolean;
|
||||
}
|
||||
|
||||
export const useFocusOn = () => {
|
||||
const { fitView, setNodes, setEdges } = useReactFlow();
|
||||
const { hideSidePanel } = useLayout();
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
const focusOnArea = useCallback(
|
||||
(areaId: string, options: FocusOptions = {}) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) =>
|
||||
node.id === areaId
|
||||
? {
|
||||
...node,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...node,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: areaId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setNodes, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
const focusOnTable = useCallback(
|
||||
(tableId: string, options: FocusOptions = {}) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) =>
|
||||
node.id === tableId
|
||||
? {
|
||||
...node,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...node,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: tableId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setNodes, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
const focusOnRelationship = useCallback(
|
||||
(
|
||||
relationshipId: string,
|
||||
sourceTableId: string,
|
||||
targetTableId: string,
|
||||
options: FocusOptions = {}
|
||||
) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setEdges((edges) =>
|
||||
edges.map((edge) =>
|
||||
edge.id === relationshipId
|
||||
? {
|
||||
...edge,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...edge,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: sourceTableId,
|
||||
},
|
||||
{
|
||||
id: targetTableId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setEdges, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
return {
|
||||
focusOnArea,
|
||||
focusOnTable,
|
||||
focusOnRelationship,
|
||||
};
|
||||
};
|
||||
320
src/hooks/use-update-table-field.ts
Normal file
320
src/hooks/use-update-table-field.ts
Normal file
@@ -0,0 +1,320 @@
|
||||
import { useCallback, useMemo, useState, useEffect } from 'react';
|
||||
import { useChartDB } from './use-chartdb';
|
||||
import { useDebounce } from './use-debounce-v2';
|
||||
import type { DBField, DBTable } from '@/lib/domain';
|
||||
import type {
|
||||
SelectBoxOption,
|
||||
SelectBoxProps,
|
||||
} from '@/components/select-box/select-box';
|
||||
import {
|
||||
dataTypeDataToDataType,
|
||||
sortedDataTypeMap,
|
||||
} from '@/lib/data/data-types/data-types';
|
||||
import { generateDBFieldSuffix } from '@/lib/domain/db-field';
|
||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
|
||||
const generateFieldRegexPatterns = (
|
||||
dataType: DataTypeData
|
||||
): {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
} => {
|
||||
if (!dataType.fieldAttributes) {
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
}
|
||||
|
||||
const typeName = dataType.name;
|
||||
const fieldAttributes = dataType.fieldAttributes;
|
||||
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
if (fieldAttributes.hasCharMaxLengthOption) {
|
||||
return {
|
||||
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
|
||||
extractRegex: /\((\d+|max)\)/i,
|
||||
};
|
||||
}
|
||||
return {
|
||||
regex: `^${typeName}\\(\\d+\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
|
||||
extractRegex: new RegExp(
|
||||
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
};
|
||||
|
||||
export const useUpdateTableField = (
|
||||
table: DBTable,
|
||||
field: DBField,
|
||||
customUpdateField?: (attrs: Partial<DBField>) => void
|
||||
) => {
|
||||
const {
|
||||
databaseType,
|
||||
customTypes,
|
||||
updateField: chartDBUpdateField,
|
||||
removeField: chartDBRemoveField,
|
||||
} = useChartDB();
|
||||
|
||||
// Local state for responsive UI
|
||||
const [localFieldName, setLocalFieldName] = useState(field.name);
|
||||
const [localNullable, setLocalNullable] = useState(field.nullable);
|
||||
const [localPrimaryKey, setLocalPrimaryKey] = useState(field.primaryKey);
|
||||
|
||||
// Update local state when field properties change externally
|
||||
useEffect(() => {
|
||||
setLocalFieldName(field.name);
|
||||
setLocalNullable(field.nullable);
|
||||
setLocalPrimaryKey(field.primaryKey);
|
||||
}, [field.name, field.nullable, field.primaryKey]);
|
||||
|
||||
// Use custom updateField if provided, otherwise use the chartDB one
|
||||
const updateField = useMemo(
|
||||
() =>
|
||||
customUpdateField
|
||||
? (
|
||||
_tableId: string,
|
||||
_fieldId: string,
|
||||
attrs: Partial<DBField>
|
||||
) => customUpdateField(attrs)
|
||||
: chartDBUpdateField,
|
||||
[customUpdateField, chartDBUpdateField]
|
||||
);
|
||||
|
||||
// Calculate primary key fields for validation
|
||||
const primaryKeyFields = useMemo(() => {
|
||||
return table.fields.filter((f) => f.primaryKey);
|
||||
}, [table.fields]);
|
||||
|
||||
const primaryKeyCount = useMemo(
|
||||
() => primaryKeyFields.length,
|
||||
[primaryKeyFields.length]
|
||||
);
|
||||
|
||||
// Generate data type options for select box
|
||||
const dataFieldOptions = useMemo(() => {
|
||||
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
||||
databaseType
|
||||
].map((type) => {
|
||||
const regexPatterns = generateFieldRegexPatterns(type);
|
||||
|
||||
return {
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: regexPatterns.regex,
|
||||
extractRegex: regexPatterns.extractRegex,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
if (!customTypes?.length) {
|
||||
return standardTypes;
|
||||
}
|
||||
|
||||
// Add custom types as options
|
||||
const customTypeOptions: SelectBoxOption[] = customTypes.map(
|
||||
(type) => ({
|
||||
label: type.name,
|
||||
value: type.name,
|
||||
description:
|
||||
type.kind === 'enum' ? `${type.values?.join(' | ')}` : '',
|
||||
group: 'Custom Types',
|
||||
})
|
||||
);
|
||||
|
||||
return [...standardTypes, ...customTypeOptions];
|
||||
}, [databaseType, customTypes]);
|
||||
|
||||
// Handle data type change
|
||||
const handleDataTypeChange = useCallback<
|
||||
NonNullable<SelectBoxProps['onChange']>
|
||||
>(
|
||||
(value, regexMatches) => {
|
||||
const dataType = sortedDataTypeMap[databaseType].find(
|
||||
(v) => v.id === value
|
||||
) ?? {
|
||||
id: value as string,
|
||||
name: value as string,
|
||||
};
|
||||
|
||||
let characterMaximumLength: string | undefined = undefined;
|
||||
let precision: number | undefined = undefined;
|
||||
let scale: number | undefined = undefined;
|
||||
|
||||
if (regexMatches?.length) {
|
||||
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
||||
} else if (
|
||||
dataType?.fieldAttributes?.precision &&
|
||||
dataType?.fieldAttributes?.scale
|
||||
) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
scale = regexMatches[2]
|
||||
? parseInt(regexMatches[2])
|
||||
: undefined;
|
||||
} else if (dataType?.fieldAttributes?.precision) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
dataType?.fieldAttributes?.hasCharMaxLength &&
|
||||
field.characterMaximumLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.precision && field.precision) {
|
||||
precision = field.precision;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.scale && field.scale) {
|
||||
scale = field.scale;
|
||||
}
|
||||
}
|
||||
|
||||
updateField(table.id, field.id, {
|
||||
characterMaximumLength,
|
||||
precision,
|
||||
scale,
|
||||
increment: undefined,
|
||||
default: undefined,
|
||||
type: dataTypeDataToDataType(
|
||||
dataType ?? {
|
||||
id: value as string,
|
||||
name: value as string,
|
||||
}
|
||||
),
|
||||
});
|
||||
},
|
||||
[
|
||||
updateField,
|
||||
databaseType,
|
||||
field.characterMaximumLength,
|
||||
field.precision,
|
||||
field.scale,
|
||||
field.id,
|
||||
table.id,
|
||||
]
|
||||
);
|
||||
|
||||
// Debounced update for field name
|
||||
const debouncedNameUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: string) => {
|
||||
if (value.trim() !== field.name) {
|
||||
updateField(table.id, field.id, { name: value });
|
||||
}
|
||||
},
|
||||
[updateField, table.id, field.id, field.name]
|
||||
),
|
||||
300 // 300ms debounce for text input
|
||||
);
|
||||
|
||||
// Debounced update for nullable toggle
|
||||
const debouncedNullableUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: boolean) => {
|
||||
updateField(table.id, field.id, { nullable: value });
|
||||
},
|
||||
[updateField, table.id, field.id]
|
||||
),
|
||||
100 // 100ms debounce for toggle
|
||||
);
|
||||
|
||||
// Debounced update for primary key toggle
|
||||
const debouncedPrimaryKeyUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: boolean, primaryKeyCount: number) => {
|
||||
if (value) {
|
||||
// When setting as primary key
|
||||
const updates: Partial<DBField> = {
|
||||
primaryKey: true,
|
||||
};
|
||||
// Only auto-set unique if this will be the only primary key
|
||||
if (primaryKeyCount === 0) {
|
||||
updates.unique = true;
|
||||
}
|
||||
updateField(table.id, field.id, updates);
|
||||
} else {
|
||||
// When removing primary key
|
||||
updateField(table.id, field.id, {
|
||||
primaryKey: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
[updateField, table.id, field.id]
|
||||
),
|
||||
100 // 100ms debounce for toggle
|
||||
);
|
||||
|
||||
// Handle primary key toggle with optimistic update
|
||||
const handlePrimaryKeyToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
setLocalPrimaryKey(value);
|
||||
debouncedPrimaryKeyUpdate(value, primaryKeyCount);
|
||||
},
|
||||
[primaryKeyCount, debouncedPrimaryKeyUpdate]
|
||||
);
|
||||
|
||||
// Handle nullable toggle with optimistic update
|
||||
const handleNullableToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
setLocalNullable(value);
|
||||
debouncedNullableUpdate(value);
|
||||
},
|
||||
[debouncedNullableUpdate]
|
||||
);
|
||||
|
||||
// Handle name change with optimistic update
|
||||
const handleNameChange = useCallback(
|
||||
(value: string) => {
|
||||
setLocalFieldName(value);
|
||||
debouncedNameUpdate(value);
|
||||
},
|
||||
[debouncedNameUpdate]
|
||||
);
|
||||
|
||||
// Utility function to generate field suffix for display
|
||||
const generateFieldSuffix = useCallback(
|
||||
(typeId?: string) => {
|
||||
return generateDBFieldSuffix(field, {
|
||||
databaseType,
|
||||
forceExtended: true,
|
||||
typeId,
|
||||
});
|
||||
},
|
||||
[field, databaseType]
|
||||
);
|
||||
|
||||
const removeField = useCallback(() => {
|
||||
chartDBRemoveField(table.id, field.id);
|
||||
}, [chartDBRemoveField, table.id, field.id]);
|
||||
|
||||
return {
|
||||
dataFieldOptions,
|
||||
handleDataTypeChange,
|
||||
handlePrimaryKeyToggle,
|
||||
handleNullableToggle,
|
||||
handleNameChange,
|
||||
generateFieldSuffix,
|
||||
primaryKeyCount,
|
||||
fieldName: localFieldName,
|
||||
nullable: localNullable,
|
||||
primaryKey: localPrimaryKey,
|
||||
removeField,
|
||||
};
|
||||
};
|
||||
42
src/hooks/use-update-table.ts
Normal file
42
src/hooks/use-update-table.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { useCallback, useState, useEffect } from 'react';
|
||||
import { useChartDB } from './use-chartdb';
|
||||
import { useDebounce } from './use-debounce-v2';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
|
||||
// Hook for updating table properties with debouncing for performance
|
||||
export const useUpdateTable = (table: DBTable) => {
|
||||
const { updateTable: chartDBUpdateTable } = useChartDB();
|
||||
const [localTableName, setLocalTableName] = useState(table.name);
|
||||
|
||||
// Debounced update function
|
||||
const debouncedUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: string) => {
|
||||
if (value.trim() && value.trim() !== table.name) {
|
||||
chartDBUpdateTable(table.id, { name: value.trim() });
|
||||
}
|
||||
},
|
||||
[chartDBUpdateTable, table.id, table.name]
|
||||
),
|
||||
1000 // 1000ms debounce
|
||||
);
|
||||
|
||||
// Update local state immediately for responsive UI
|
||||
const handleTableNameChange = useCallback(
|
||||
(value: string) => {
|
||||
setLocalTableName(value);
|
||||
debouncedUpdate(value);
|
||||
},
|
||||
[debouncedUpdate]
|
||||
);
|
||||
|
||||
// Update local state when table name changes externally
|
||||
useEffect(() => {
|
||||
setLocalTableName(table.name);
|
||||
}, [table.name]);
|
||||
|
||||
return {
|
||||
tableName: localTableName,
|
||||
handleTableNameChange,
|
||||
};
|
||||
};
|
||||
@@ -14,13 +14,13 @@ export const ar: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'الإجراءات',
|
||||
new: 'مخطط جديد',
|
||||
new: 'جديد...',
|
||||
browse: 'تصفح...',
|
||||
save: 'حفظ',
|
||||
import: 'استيراد قاعدة بيانات',
|
||||
export_sql: 'SQL تصدير',
|
||||
export_as: 'تصدير كـ',
|
||||
delete_diagram: 'حذف الرسم البياني',
|
||||
delete_diagram: 'حذف',
|
||||
},
|
||||
edit: {
|
||||
edit: 'تحرير',
|
||||
@@ -74,10 +74,10 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'إعادة ترتيب الرسم البياني',
|
||||
title: 'ترتيب تلقائي للرسم البياني',
|
||||
description:
|
||||
'هذا الإجراء سيقوم بإعادة ترتيب الجداول في المخطط بشكل تلقائي. هل تريد المتابعة؟',
|
||||
reorder: 'إعادة ترتيب',
|
||||
reorder: 'ترتيب تلقائي',
|
||||
cancel: 'إلغاء',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const ar: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'لم يتم تحديد قيم التعداد',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const ar: LanguageTranslation = {
|
||||
show_all: 'عرض الكل',
|
||||
undo: 'تراجع',
|
||||
redo: 'إعادة',
|
||||
reorder_diagram: 'إعادة ترتيب الرسم البياني',
|
||||
reorder_diagram: 'ترتيب تلقائي للرسم البياني',
|
||||
highlight_overlapping_tables: 'تمييز الجداول المتداخلة',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -313,7 +314,7 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'فتح مخطط',
|
||||
title: 'فتح قاعدة بيانات',
|
||||
description: 'اختر مخططًا لفتحه من القائمة ادناه',
|
||||
table_columns: {
|
||||
name: 'الإسم',
|
||||
@@ -327,7 +328,7 @@ export const ar: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'فتح',
|
||||
duplicate: 'تكرار',
|
||||
delete: 'حذف الرسم التخطيطي',
|
||||
delete: 'حذف',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const bn: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'কার্য',
|
||||
new: 'নতুন ডায়াগ্রাম',
|
||||
new: 'নতুন...',
|
||||
browse: 'ব্রাউজ করুন...',
|
||||
save: 'সংরক্ষণ করুন',
|
||||
import: 'ডাটাবেস আমদানি করুন',
|
||||
export_sql: 'SQL রপ্তানি করুন',
|
||||
export_as: 'রূপে রপ্তানি করুন',
|
||||
delete_diagram: 'ডায়াগ্রাম মুছুন',
|
||||
delete_diagram: 'মুছুন',
|
||||
},
|
||||
edit: {
|
||||
edit: 'সম্পাদনা',
|
||||
@@ -75,10 +75,10 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
title: 'স্বয়ংক্রিয় ডায়াগ্রাম সাজান',
|
||||
description:
|
||||
'এই কাজটি ডায়াগ্রামের সমস্ত টেবিল পুনর্বিন্যাস করবে। আপনি কি চালিয়ে যেতে চান?',
|
||||
reorder: 'পুনর্বিন্যাস করুন',
|
||||
reorder: 'স্বয়ংক্রিয় সাজান',
|
||||
cancel: 'বাতিল করুন',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const bn: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'কোন enum মান সংজ্ঞায়িত নেই',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const bn: LanguageTranslation = {
|
||||
show_all: 'সব দেখান',
|
||||
undo: 'পূর্বাবস্থায় ফিরুন',
|
||||
redo: 'পুনরায় করুন',
|
||||
reorder_diagram: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
reorder_diagram: 'স্বয়ংক্রিয় ডায়াগ্রাম সাজান',
|
||||
highlight_overlapping_tables: 'ওভারল্যাপিং টেবিল হাইলাইট করুন',
|
||||
|
||||
// TODO: Translate
|
||||
@@ -315,7 +316,7 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'চিত্র খুলুন',
|
||||
title: 'ডেটাবেস খুলুন',
|
||||
description: 'নিচের তালিকা থেকে একটি চিত্র নির্বাচন করুন।',
|
||||
table_columns: {
|
||||
name: 'নাম',
|
||||
@@ -329,7 +330,7 @@ export const bn: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'খুলুন',
|
||||
duplicate: 'ডুপ্লিকেট',
|
||||
delete: 'ডায়াগ্রাম মুছুন',
|
||||
delete: 'মুছুন',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const de: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Aktionen',
|
||||
new: 'Neues Diagramm',
|
||||
new: 'Neu...',
|
||||
browse: 'Durchsuchen...',
|
||||
save: 'Speichern',
|
||||
import: 'Datenbank importieren',
|
||||
export_sql: 'SQL exportieren',
|
||||
export_as: 'Exportieren als',
|
||||
delete_diagram: 'Diagramm löschen',
|
||||
delete_diagram: 'Löschen',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Bearbeiten',
|
||||
@@ -75,10 +75,10 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Diagramm neu anordnen',
|
||||
title: 'Diagramm automatisch anordnen',
|
||||
description:
|
||||
'Diese Aktion wird alle Tabellen im Diagramm neu anordnen. Möchten Sie fortfahren?',
|
||||
reorder: 'Neu anordnen',
|
||||
reorder: 'Automatisch anordnen',
|
||||
cancel: 'Abbrechen',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const de: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Keine Enum-Werte definiert',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const de: LanguageTranslation = {
|
||||
show_all: 'Alle anzeigen',
|
||||
undo: 'Rückgängig',
|
||||
redo: 'Wiederholen',
|
||||
reorder_diagram: 'Diagramm neu anordnen',
|
||||
reorder_diagram: 'Diagramm automatisch anordnen',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
@@ -318,7 +319,7 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Diagramm öffnen',
|
||||
title: 'Datenbank öffnen',
|
||||
description: 'Wählen Sie ein Diagramm aus der Liste unten aus.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -332,7 +333,7 @@ export const de: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Öffnen',
|
||||
duplicate: 'Duplizieren',
|
||||
delete: 'Diagramm löschen',
|
||||
delete: 'Löschen',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const en = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'New Diagram',
|
||||
new: 'New...',
|
||||
browse: 'Browse...',
|
||||
save: 'Save',
|
||||
import: 'Import',
|
||||
export_sql: 'Export SQL',
|
||||
export_as: 'Export as',
|
||||
delete_diagram: 'Delete Diagram',
|
||||
delete_diagram: 'Delete',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Edit',
|
||||
@@ -73,10 +73,10 @@ export const en = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reorder Diagram',
|
||||
title: 'Auto Arrange Diagram',
|
||||
description:
|
||||
'This action will rearrange all tables in the diagram. Do you want to continue?',
|
||||
reorder: 'Reorder',
|
||||
reorder: 'Auto Arrange',
|
||||
cancel: 'Cancel',
|
||||
},
|
||||
|
||||
@@ -242,6 +242,7 @@ export const en = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'No enum values defined',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -264,7 +265,7 @@ export const en = {
|
||||
show_all: 'Show All',
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Reorder Diagram',
|
||||
reorder_diagram: 'Auto Arrange Diagram',
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -306,7 +307,7 @@ export const en = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Open Diagram',
|
||||
title: 'Open Database',
|
||||
description: 'Select a diagram to open from the list below.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -320,7 +321,7 @@ export const en = {
|
||||
diagram_actions: {
|
||||
open: 'Open',
|
||||
duplicate: 'Duplicate',
|
||||
delete: 'Delete Diagram',
|
||||
delete: 'Delete',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const es: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Acciones',
|
||||
new: 'Nuevo Diagrama',
|
||||
new: 'Nuevo...',
|
||||
browse: 'Examinar...',
|
||||
save: 'Guardar',
|
||||
import: 'Importar Base de Datos',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
delete_diagram: 'Eliminar Diagrama',
|
||||
delete_diagram: 'Eliminar',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Editar',
|
||||
@@ -74,10 +74,10 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reordenar Diagrama',
|
||||
title: 'Organizar Diagrama Automáticamente',
|
||||
description:
|
||||
'Esta acción reorganizará todas las tablas en el diagrama. ¿Deseas continuar?',
|
||||
reorder: 'Reordenar',
|
||||
reorder: 'Organizar Automáticamente',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const es: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'No hay valores de enum definidos',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const es: LanguageTranslation = {
|
||||
show_all: 'Mostrar Todo',
|
||||
undo: 'Deshacer',
|
||||
redo: 'Rehacer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
reorder_diagram: 'Organizar Diagrama Automáticamente',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -315,7 +316,7 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Abrir Diagrama',
|
||||
title: 'Abrir Base de Datos',
|
||||
description:
|
||||
'Selecciona un diagrama para abrir de la lista a continuación.',
|
||||
table_columns: {
|
||||
@@ -330,7 +331,7 @@ export const es: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Eliminar Diagrama',
|
||||
delete: 'Eliminar',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const fr: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'Nouveau Diagramme',
|
||||
new: 'Nouveau...',
|
||||
browse: 'Parcourir...',
|
||||
save: 'Enregistrer',
|
||||
import: 'Importer Base de Données',
|
||||
export_sql: 'Exporter SQL',
|
||||
export_as: 'Exporter en tant que',
|
||||
delete_diagram: 'Supprimer le Diagramme',
|
||||
delete_diagram: 'Supprimer',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Édition',
|
||||
@@ -73,10 +73,10 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Réorganiser le Diagramme',
|
||||
title: 'Organiser Automatiquement le Diagramme',
|
||||
description:
|
||||
'Cette action réorganisera toutes les tables dans le diagramme. Voulez-vous continuer ?',
|
||||
reorder: 'Réorganiser',
|
||||
reorder: 'Organiser Automatiquement',
|
||||
cancel: 'Annuler',
|
||||
},
|
||||
|
||||
@@ -246,6 +246,7 @@ export const fr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: "Aucune valeur d'énumération définie",
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -268,7 +269,7 @@ export const fr: LanguageTranslation = {
|
||||
show_all: 'Afficher Tout',
|
||||
undo: 'Annuler',
|
||||
redo: 'Rétablir',
|
||||
reorder_diagram: 'Réorganiser le Diagramme',
|
||||
reorder_diagram: 'Organiser Automatiquement le Diagramme',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Ouvrir Diagramme',
|
||||
title: 'Ouvrir Base de Données',
|
||||
description:
|
||||
'Sélectionnez un diagramme à ouvrir dans la liste ci-dessous.',
|
||||
table_columns: {
|
||||
@@ -327,7 +328,7 @@ export const fr: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Ouvrir',
|
||||
duplicate: 'Dupliquer',
|
||||
delete: 'Supprimer le diagramme',
|
||||
delete: 'Supprimer',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const gu: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'ક્રિયાઓ',
|
||||
new: 'નવું ડાયાગ્રામ',
|
||||
new: 'નવું...',
|
||||
browse: 'બ્રાઉજ કરો...',
|
||||
save: 'સાચવો',
|
||||
import: 'ડેટાબેસ આયાત કરો',
|
||||
export_sql: 'SQL નિકાસ કરો',
|
||||
export_as: 'રૂપે નિકાસ કરો',
|
||||
delete_diagram: 'ડાયાગ્રામ કાઢી નાખો',
|
||||
delete_diagram: 'કાઢી નાખો',
|
||||
},
|
||||
edit: {
|
||||
edit: 'ફેરફાર',
|
||||
@@ -75,10 +75,10 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ડાયાગ્રામ ફરી વ્યવસ્થિત કરો',
|
||||
title: 'ડાયાગ્રામ ઑટોમેટિક ગોઠવો',
|
||||
description:
|
||||
'આ ક્રિયા ડાયાગ્રામમાં બધી ટેબલ્સને ફરીથી વ્યવસ્થિત કરશે. શું તમે ચાલુ રાખવા માંગો છો?',
|
||||
reorder: 'ફરી વ્યવસ્થિત કરો',
|
||||
reorder: 'ઑટોમેટિક ગોઠવો',
|
||||
cancel: 'રદ કરો',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const gu: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'કોઈ enum મૂલ્યો વ્યાખ્યાયિત નથી',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const gu: LanguageTranslation = {
|
||||
show_all: 'બધું બતાવો',
|
||||
undo: 'અનડુ',
|
||||
redo: 'રીડુ',
|
||||
reorder_diagram: 'ડાયાગ્રામ ફરીથી વ્યવસ્થિત કરો',
|
||||
reorder_diagram: 'ડાયાગ્રામ ઑટોમેટિક ગોઠવો',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -315,7 +316,7 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'ડાયાગ્રામ ખોલો',
|
||||
title: 'ડેટાબેસ ખોલો',
|
||||
description: 'નીચેની યાદીમાંથી એક ડાયાગ્રામ પસંદ કરો.',
|
||||
table_columns: {
|
||||
name: 'નામ',
|
||||
@@ -329,7 +330,7 @@ export const gu: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'ખોલો',
|
||||
duplicate: 'ડુપ્લિકેટ',
|
||||
delete: 'ડાયાગ્રામ કાઢી નાખો',
|
||||
delete: 'કાઢી નાખો',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const hi: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'कार्य',
|
||||
new: 'नया आरेख',
|
||||
new: 'नया...',
|
||||
browse: 'ब्राउज़ करें...',
|
||||
save: 'सहेजें',
|
||||
import: 'डेटाबेस आयात करें',
|
||||
export_sql: 'SQL निर्यात करें',
|
||||
export_as: 'के रूप में निर्यात करें',
|
||||
delete_diagram: 'आरेख हटाएँ',
|
||||
delete_diagram: 'हटाएँ',
|
||||
},
|
||||
edit: {
|
||||
edit: 'संपादित करें',
|
||||
@@ -74,10 +74,10 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'आरेख पुनः व्यवस्थित करें',
|
||||
title: 'आरेख स्वचालित व्यवस्थित करें',
|
||||
description:
|
||||
'यह क्रिया आरेख में सभी तालिकाओं को पुनः व्यवस्थित कर देगी। क्या आप जारी रखना चाहते हैं?',
|
||||
reorder: 'पुनः व्यवस्थित करें',
|
||||
reorder: 'स्वचालित व्यवस्थित करें',
|
||||
cancel: 'रद्द करें',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const hi: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कोई enum मान परिभाषित नहीं',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const hi: LanguageTranslation = {
|
||||
show_all: 'सभी दिखाएँ',
|
||||
undo: 'पूर्ववत करें',
|
||||
redo: 'पुनः करें',
|
||||
reorder_diagram: 'आरेख पुनः व्यवस्थित करें',
|
||||
reorder_diagram: 'आरेख स्वचालित व्यवस्थित करें',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -317,7 +318,7 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'आरेख खोलें',
|
||||
title: 'डेटाबेस खोलें',
|
||||
description: 'नीचे दी गई सूची से एक आरेख चुनें।',
|
||||
table_columns: {
|
||||
name: 'नाम',
|
||||
@@ -331,7 +332,7 @@ export const hi: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'खोलें',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'डायग्राम हटाएं',
|
||||
delete: 'हटाएं',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const hr: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Akcije',
|
||||
new: 'Novi Dijagram',
|
||||
new: 'Novi...',
|
||||
browse: 'Pregledaj...',
|
||||
save: 'Spremi',
|
||||
import: 'Uvezi',
|
||||
export_sql: 'Izvezi SQL',
|
||||
export_as: 'Izvezi kao',
|
||||
delete_diagram: 'Izbriši dijagram',
|
||||
delete_diagram: 'Izbriši',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Uredi',
|
||||
@@ -73,10 +73,10 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Preuredi dijagram',
|
||||
title: 'Automatski preuredi dijagram',
|
||||
description:
|
||||
'Ova radnja će preurediti sve tablice u dijagramu. Želite li nastaviti?',
|
||||
reorder: 'Preuredi',
|
||||
reorder: 'Automatski preuredi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const hr: LanguageTranslation = {
|
||||
enum_values: 'Enum vrijednosti',
|
||||
composite_fields: 'Polja',
|
||||
no_fields: 'Nema definiranih polja',
|
||||
no_values: 'Nema definiranih enum vrijednosti',
|
||||
field_name_placeholder: 'Naziv polja',
|
||||
field_type_placeholder: 'Odaberi tip',
|
||||
add_field: 'Dodaj polje',
|
||||
@@ -268,7 +269,7 @@ export const hr: LanguageTranslation = {
|
||||
show_all: 'Prikaži sve',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
reorder_diagram: 'Preuredi dijagram',
|
||||
reorder_diagram: 'Automatski preuredi dijagram',
|
||||
highlight_overlapping_tables: 'Istakni preklapajuće tablice',
|
||||
clear_custom_type_highlight: 'Ukloni isticanje za "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -310,7 +311,7 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Otvori dijagram',
|
||||
title: 'Otvori bazu podataka',
|
||||
description: 'Odaberite dijagram za otvaranje iz popisa ispod.',
|
||||
table_columns: {
|
||||
name: 'Naziv',
|
||||
@@ -324,7 +325,7 @@ export const hr: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Otvori',
|
||||
duplicate: 'Dupliciraj',
|
||||
delete: 'Obriši dijagram',
|
||||
delete: 'Obriši',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const id_ID: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Aksi',
|
||||
new: 'Diagram Baru',
|
||||
new: 'Baru...',
|
||||
browse: 'Jelajahi...',
|
||||
save: 'Simpan',
|
||||
import: 'Impor Database',
|
||||
export_sql: 'Ekspor SQL',
|
||||
export_as: 'Ekspor Sebagai',
|
||||
delete_diagram: 'Hapus Diagram',
|
||||
delete_diagram: 'Hapus',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Ubah',
|
||||
@@ -74,10 +74,10 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Atur Ulang Diagram',
|
||||
title: 'Atur Otomatis Diagram',
|
||||
description:
|
||||
'Tindakan ini akan mengatur ulang semua tabel di diagram. Apakah Anda ingin melanjutkan?',
|
||||
reorder: 'Atur Ulang',
|
||||
reorder: 'Atur Otomatis',
|
||||
cancel: 'Batal',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const id_ID: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Tidak ada nilai enum yang ditentukan',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const id_ID: LanguageTranslation = {
|
||||
show_all: 'Tampilkan Semua',
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Atur Ulang Diagram',
|
||||
reorder_diagram: 'Atur Otomatis Diagram',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Buka Diagram',
|
||||
title: 'Buka Database',
|
||||
description: 'Pilih diagram untuk dibuka dari daftar di bawah.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -328,7 +329,7 @@ export const id_ID: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Buka',
|
||||
duplicate: 'Duplikat',
|
||||
delete: 'Hapus Diagram',
|
||||
delete: 'Hapus',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const ja: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'アクション',
|
||||
new: '新しいダイアグラム',
|
||||
new: '新規...',
|
||||
browse: '参照...',
|
||||
save: '保存',
|
||||
import: 'データベースをインポート',
|
||||
export_sql: 'SQLをエクスポート',
|
||||
export_as: '形式を指定してエクスポート',
|
||||
delete_diagram: 'ダイアグラムを削除',
|
||||
delete_diagram: '削除',
|
||||
},
|
||||
edit: {
|
||||
edit: '編集',
|
||||
@@ -76,10 +76,10 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ダイアグラムを並べ替え',
|
||||
title: 'ダイアグラムを自動配置',
|
||||
description:
|
||||
'この操作によりダイアグラム内のすべてのテーブルが再配置されます。続行しますか?',
|
||||
reorder: '並べ替え',
|
||||
reorder: '自動配置',
|
||||
cancel: 'キャンセル',
|
||||
},
|
||||
|
||||
@@ -253,6 +253,7 @@ export const ja: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '列挙値が定義されていません',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -275,7 +276,7 @@ export const ja: LanguageTranslation = {
|
||||
show_all: 'すべて表示',
|
||||
undo: '元に戻す',
|
||||
redo: 'やり直し',
|
||||
reorder_diagram: 'ダイアグラムを並べ替え',
|
||||
reorder_diagram: 'ダイアグラムを自動配置',
|
||||
// TODO: Translate
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
@@ -319,7 +320,7 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'ダイアグラムを開く',
|
||||
title: 'データベースを開く',
|
||||
description: '以下のリストからダイアグラムを選択してください。',
|
||||
table_columns: {
|
||||
name: '名前',
|
||||
@@ -333,7 +334,7 @@ export const ja: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: '開く',
|
||||
duplicate: '複製',
|
||||
delete: 'ダイアグラムを削除',
|
||||
delete: '削除',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const ko_KR: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: '작업',
|
||||
new: '새 다이어그램',
|
||||
new: '새로 만들기...',
|
||||
browse: '찾아보기...',
|
||||
save: '저장',
|
||||
import: '데이터베이스 가져오기',
|
||||
export_sql: 'SQL로 저장',
|
||||
export_as: '다른 형식으로 저장',
|
||||
delete_diagram: '다이어그램 삭제',
|
||||
delete_diagram: '삭제',
|
||||
},
|
||||
edit: {
|
||||
edit: '편집',
|
||||
@@ -74,10 +74,10 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '다이어그램 재정렬',
|
||||
title: '다이어그램 자동 정렬',
|
||||
description:
|
||||
'이 작업은 모든 다이어그램이 재정렬됩니다. 계속하시겠습니까?',
|
||||
reorder: '재정렬',
|
||||
reorder: '자동 정렬',
|
||||
cancel: '취소',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '정의된 열거형 값이 없습니다',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
show_all: '전체 저장',
|
||||
undo: '실행 취소',
|
||||
redo: '다시 실행',
|
||||
reorder_diagram: '다이어그램 재정렬',
|
||||
reorder_diagram: '다이어그램 자동 정렬',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '다이어그램 열기',
|
||||
title: '데이터베이스 열기',
|
||||
description: '아래의 목록에서 다이어그램을 선택하세요.',
|
||||
table_columns: {
|
||||
name: '이름',
|
||||
@@ -328,7 +329,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: '열기',
|
||||
duplicate: '복제',
|
||||
delete: '다이어그램 삭제',
|
||||
delete: '삭제',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const mr: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'क्रिया',
|
||||
new: 'नवीन आरेख',
|
||||
new: 'नवीन...',
|
||||
browse: 'ब्राउज करा...',
|
||||
save: 'जतन करा',
|
||||
import: 'डेटाबेस इम्पोर्ट करा',
|
||||
export_sql: 'SQL एक्स्पोर्ट करा',
|
||||
export_as: 'म्हणून एक्स्पोर्ट करा',
|
||||
delete_diagram: 'आरेख हटवा',
|
||||
delete_diagram: 'हटवा',
|
||||
},
|
||||
edit: {
|
||||
edit: 'संपादन करा',
|
||||
@@ -75,10 +75,10 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'आरेख पुनःक्रमित करा',
|
||||
title: 'आरेख स्वयंचलित व्यवस्थित करा',
|
||||
description:
|
||||
'ही क्रिया आरेखातील सर्व टेबल्सची पुनर्रचना करेल. तुम्हाला पुढे जायचे आहे का?',
|
||||
reorder: 'पुनःक्रमित करा',
|
||||
reorder: 'स्वयंचलित व्यवस्थित करा',
|
||||
cancel: 'रद्द करा',
|
||||
},
|
||||
|
||||
@@ -252,6 +252,7 @@ export const mr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कोणतीही enum मूल्ये परिभाषित नाहीत',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -274,7 +275,7 @@ export const mr: LanguageTranslation = {
|
||||
show_all: 'सर्व दाखवा',
|
||||
undo: 'पूर्ववत करा',
|
||||
redo: 'पुन्हा करा',
|
||||
reorder_diagram: 'आरेख पुनःक्रमित करा',
|
||||
reorder_diagram: 'आरेख स्वयंचलित व्यवस्थित करा',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -320,7 +321,7 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'आरेख उघडा',
|
||||
title: 'डेटाबेस उघडा',
|
||||
description: 'खालील यादीतून उघडण्यासाठी एक आरेख निवडा.',
|
||||
table_columns: {
|
||||
name: 'नाव',
|
||||
@@ -334,7 +335,7 @@ export const mr: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'उघडा',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'आरेख हटवा',
|
||||
delete: 'हटवा',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const ne: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'कार्यहरू',
|
||||
new: 'नयाँ डायाग्राम',
|
||||
new: 'नयाँ...',
|
||||
browse: 'ब्राउज गर्नुहोस्...',
|
||||
save: 'सुरक्षित गर्नुहोस्',
|
||||
import: 'डाटाबेस आयात गर्नुहोस्',
|
||||
export_sql: 'SQL निर्यात गर्नुहोस्',
|
||||
export_as: 'निर्यात गर्नुहोस्',
|
||||
delete_diagram: 'डायाग्राम हटाउनुहोस्',
|
||||
delete_diagram: 'हटाउनुहोस्',
|
||||
},
|
||||
edit: {
|
||||
edit: 'सम्पादन',
|
||||
@@ -75,10 +75,10 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'डायाग्राम पुनः क्रमबद्ध गर्नुहोस्',
|
||||
title: 'डायाग्राम स्वचालित मिलाउनुहोस्',
|
||||
description:
|
||||
'यो कार्य पूर्ववत गर्न सकिँदैन। यो डायाग्राम स्थायी रूपमा हटाउनेछ।',
|
||||
reorder: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
reorder: 'स्वचालित मिलाउनुहोस्',
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const ne: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कुनै enum मानहरू परिभाषित छैनन्',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const ne: LanguageTranslation = {
|
||||
show_all: 'सबै देखाउनुहोस्',
|
||||
undo: 'पूर्ववत',
|
||||
redo: 'पुनः गर्नुहोस्',
|
||||
reorder_diagram: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
reorder_diagram: 'डायाग्राम स्वचालित मिलाउनुहोस्',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -316,7 +317,7 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'डायाग्राम खोल्नुहोस्',
|
||||
title: 'डाटाबेस खोल्नुहोस्',
|
||||
description:
|
||||
'तलको सूचीबाट खोल्नका लागि एक डायाग्राम चयन गर्नुहोस्।',
|
||||
table_columns: {
|
||||
@@ -331,7 +332,7 @@ export const ne: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'खोल्नुहोस्',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'डायग्राम मेटाउनुहोस्',
|
||||
delete: 'मेटाउनुहोस्',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const pt_BR: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Ações',
|
||||
new: 'Novo Diagrama',
|
||||
new: 'Novo...',
|
||||
browse: 'Navegar...',
|
||||
save: 'Salvar',
|
||||
import: 'Importar Banco de Dados',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
delete_diagram: 'Excluir Diagrama',
|
||||
delete_diagram: 'Excluir',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Editar',
|
||||
@@ -75,10 +75,10 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reordenar Diagrama',
|
||||
title: 'Organizar Diagrama Automaticamente',
|
||||
description:
|
||||
'Esta ação reorganizará todas as tabelas no diagrama. Deseja continuar?',
|
||||
reorder: 'Reordenar',
|
||||
reorder: 'Organizar Automaticamente',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Nenhum valor de enum definido',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
show_all: 'Mostrar Tudo',
|
||||
undo: 'Desfazer',
|
||||
redo: 'Refazer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
reorder_diagram: 'Organizar Diagrama Automaticamente',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -316,7 +317,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Abrir Diagrama',
|
||||
title: 'Abrir Banco de Dados',
|
||||
description: 'Selecione um diagrama para abrir da lista abaixo.',
|
||||
table_columns: {
|
||||
name: 'Nome',
|
||||
@@ -330,7 +331,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Excluir Diagrama',
|
||||
delete: 'Excluir',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const ru: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Действия',
|
||||
new: 'Новая диаграмма',
|
||||
new: 'Новая...',
|
||||
browse: 'Обзор...',
|
||||
save: 'Сохранить',
|
||||
import: 'Импортировать базу данных',
|
||||
export_sql: 'Экспорт SQL',
|
||||
export_as: 'Экспортировать как',
|
||||
delete_diagram: 'Удалить диаграмму',
|
||||
delete_diagram: 'Удалить',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Изменение',
|
||||
@@ -73,10 +73,10 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Переупорядочить диаграмму',
|
||||
title: 'Автоматическая расстановка диаграммы',
|
||||
description:
|
||||
'Это действие переставит все таблицы на диаграмме. Хотите продолжить?',
|
||||
reorder: 'Изменить порядок',
|
||||
reorder: 'Автоматическая расстановка',
|
||||
cancel: 'Отменить',
|
||||
},
|
||||
|
||||
@@ -246,6 +246,7 @@ export const ru: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Значения перечисления не определены',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -268,7 +269,7 @@ export const ru: LanguageTranslation = {
|
||||
show_all: 'Показать все',
|
||||
undo: 'Отменить',
|
||||
redo: 'Вернуть',
|
||||
reorder_diagram: 'Переупорядочить диаграмму',
|
||||
reorder_diagram: 'Автоматическая расстановка диаграммы',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Открыть диаграмму',
|
||||
title: 'Открыть базу данных',
|
||||
description:
|
||||
'Выберите диаграмму, которую нужно открыть, из списка ниже.',
|
||||
table_columns: {
|
||||
@@ -327,7 +328,7 @@ export const ru: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Открыть',
|
||||
duplicate: 'Дублировать',
|
||||
delete: 'Удалить диаграмму',
|
||||
delete: 'Удалить',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const te: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'చర్యలు',
|
||||
new: 'కొత్త డైగ్రాం',
|
||||
new: 'కొత్తది...',
|
||||
browse: 'బ్రాఉజ్ చేయండి...',
|
||||
save: 'సేవ్',
|
||||
import: 'డేటాబేస్ను దిగుమతి చేసుకోండి',
|
||||
export_sql: 'SQL ఎగుమతి',
|
||||
export_as: 'వగా ఎగుమతి చేయండి',
|
||||
delete_diagram: 'చిత్రాన్ని తొలగించండి',
|
||||
delete_diagram: 'తొలగించండి',
|
||||
},
|
||||
edit: {
|
||||
edit: 'సవరించు',
|
||||
@@ -75,10 +75,10 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
title: 'చిత్రాన్ని స్వయంచాలకంగా అమర్చండి',
|
||||
description:
|
||||
'ఈ చర్య చిత్రంలోని అన్ని పట్టికలను పునఃస్థాపిస్తుంది. మీరు కొనసాగించాలనుకుంటున్నారా?',
|
||||
reorder: 'పునఃసరిచేయండి',
|
||||
reorder: 'స్వయంచాలకంగా అమర్చండి',
|
||||
cancel: 'రద్దు',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const te: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'ఏ enum విలువలు నిర్వచించబడలేదు',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const te: LanguageTranslation = {
|
||||
show_all: 'అన్ని చూపించు',
|
||||
undo: 'తిరిగి చేయు',
|
||||
redo: 'మరలా చేయు',
|
||||
reorder_diagram: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
reorder_diagram: 'చిత్రాన్ని స్వయంచాలకంగా అమర్చండి',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -317,7 +318,7 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'చిత్రం తెరవండి',
|
||||
title: 'డేటాబేస్ తెరవండి',
|
||||
description: 'కింద ఉన్న జాబితా నుండి చిత్రాన్ని ఎంచుకోండి.',
|
||||
table_columns: {
|
||||
name: 'పేరు',
|
||||
@@ -331,7 +332,7 @@ export const te: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'తెరవు',
|
||||
duplicate: 'నకలు',
|
||||
delete: 'డైగ్రామ్ తొలగించు',
|
||||
delete: 'తొలగించు',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const tr: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Eylemler',
|
||||
new: 'Yeni Diyagram',
|
||||
new: 'Yeni...',
|
||||
browse: 'Gözat...',
|
||||
save: 'Kaydet',
|
||||
import: 'Veritabanı İçe Aktar',
|
||||
export_sql: 'SQL Olarak Dışa Aktar',
|
||||
export_as: 'Olarak Dışa Aktar',
|
||||
delete_diagram: 'Diyagramı Sil',
|
||||
delete_diagram: 'Sil',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Düzenle',
|
||||
@@ -75,10 +75,10 @@ export const tr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Diyagramı Yeniden Sırala',
|
||||
title: 'Diyagramı Otomatik Düzenle',
|
||||
description:
|
||||
'Bu işlem tüm tabloları yeniden düzenleyecektir. Devam etmek istiyor musunuz?',
|
||||
reorder: 'Yeniden Sırala',
|
||||
reorder: 'Otomatik Düzenle',
|
||||
cancel: 'İptal',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const tr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Tanımlanmış enum değeri yok',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const tr: LanguageTranslation = {
|
||||
show_all: 'Hepsini Gör',
|
||||
undo: 'Geri Al',
|
||||
redo: 'Yinele',
|
||||
reorder_diagram: 'Diyagramı Yeniden Sırala',
|
||||
reorder_diagram: 'Diyagramı Otomatik Düzenle',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const tr: LanguageTranslation = {
|
||||
import: 'İçe Aktar',
|
||||
},
|
||||
open_diagram_dialog: {
|
||||
title: 'Diyagramı Aç',
|
||||
title: 'Veritabanı Aç',
|
||||
description: 'Aşağıdaki listeden açmak için bir diyagram seçin.',
|
||||
table_columns: {
|
||||
name: 'Ad',
|
||||
@@ -326,7 +327,7 @@ export const tr: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Aç',
|
||||
duplicate: 'Kopyala',
|
||||
delete: 'Diyagramı Sil',
|
||||
delete: 'Sil',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const uk: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Дії',
|
||||
new: 'Нова діаграма',
|
||||
new: 'Нова...',
|
||||
browse: 'Огляд...',
|
||||
save: 'Зберегти',
|
||||
import: 'Імпорт бази даних',
|
||||
export_sql: 'Експорт SQL',
|
||||
export_as: 'Експортувати як',
|
||||
delete_diagram: 'Видалити діаграму',
|
||||
delete_diagram: 'Видалити',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Редагувати',
|
||||
@@ -73,10 +73,10 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Перевпорядкувати діаграму',
|
||||
title: 'Автоматичне розміщення діаграми',
|
||||
description:
|
||||
'Ця дія перевпорядкує всі таблиці на діаграмі. Хочете продовжити?',
|
||||
reorder: 'Перевпорядкувати',
|
||||
reorder: 'Автоматичне розміщення',
|
||||
cancel: 'Скасувати',
|
||||
},
|
||||
|
||||
@@ -247,6 +247,7 @@ export const uk: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Значення переліку не визначені',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -269,7 +270,7 @@ export const uk: LanguageTranslation = {
|
||||
show_all: 'Показати все',
|
||||
undo: 'Скасувати',
|
||||
redo: 'Повторити',
|
||||
reorder_diagram: 'Перевпорядкувати діаграму',
|
||||
reorder_diagram: 'Автоматичне розміщення діаграми',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -313,7 +314,7 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Відкрити діаграму',
|
||||
title: 'Відкрити базу даних',
|
||||
description:
|
||||
'Виберіть діаграму, яку потрібно відкрити, зі списку нижче.',
|
||||
table_columns: {
|
||||
@@ -328,7 +329,7 @@ export const uk: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Відкрити',
|
||||
duplicate: 'Дублювати',
|
||||
delete: 'Видалити діаграму',
|
||||
delete: 'Видалити',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const vi: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: 'Hành động',
|
||||
new: 'Sơ đồ mới',
|
||||
new: 'Mới...',
|
||||
browse: 'Duyệt...',
|
||||
save: 'Lưu',
|
||||
import: 'Nhập cơ sở dữ liệu',
|
||||
export_sql: 'Xuất SQL',
|
||||
export_as: 'Xuất thành',
|
||||
delete_diagram: 'Xóa sơ đồ',
|
||||
delete_diagram: 'Xóa',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Sửa',
|
||||
@@ -74,10 +74,10 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Sắp xếp lại sơ đồ',
|
||||
title: 'Tự động sắp xếp sơ đồ',
|
||||
description:
|
||||
'Hành động này sẽ sắp xếp lại tất cả các bảng trong sơ đồ. Bạn có muốn tiếp tục không?',
|
||||
reorder: 'Sắp xếp',
|
||||
reorder: 'Tự động sắp xếp',
|
||||
cancel: 'Hủy',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const vi: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Không có giá trị enum được định nghĩa',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const vi: LanguageTranslation = {
|
||||
show_all: 'Hiển thị tất cả',
|
||||
undo: 'Hoàn tác',
|
||||
redo: 'Làm lại',
|
||||
reorder_diagram: 'Sắp xếp lại sơ đồ',
|
||||
reorder_diagram: 'Tự động sắp xếp sơ đồ',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Mở sơ đồ',
|
||||
title: 'Mở cơ sở dữ liệu',
|
||||
description: 'Chọn sơ đồ để mở từ danh sách bên dưới.',
|
||||
table_columns: {
|
||||
name: 'Tên',
|
||||
@@ -328,7 +329,7 @@ export const vi: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: 'Mở',
|
||||
duplicate: 'Nhân bản',
|
||||
delete: 'Xóa sơ đồ',
|
||||
delete: 'Xóa',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const zh_CN: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新建关系图',
|
||||
new: '新建...',
|
||||
browse: '浏览...',
|
||||
save: '保存',
|
||||
import: '导入数据库',
|
||||
export_sql: '导出 SQL 语句',
|
||||
export_as: '导出为',
|
||||
delete_diagram: '删除关系图',
|
||||
delete_diagram: '删除',
|
||||
},
|
||||
edit: {
|
||||
edit: '编辑',
|
||||
@@ -72,9 +72,9 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '重新排列关系图',
|
||||
title: '自动排列关系图',
|
||||
description: '此操作将重新排列关系图中的所有表。是否要继续?',
|
||||
reorder: '重新排列',
|
||||
reorder: '自动排列',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '没有定义枚举值',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -267,7 +268,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
show_all: '展示全部',
|
||||
undo: '撤销',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列关系图',
|
||||
reorder_diagram: '自动排列关系图',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -311,7 +312,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '打开关系图',
|
||||
title: '打开数据库',
|
||||
description: '从下面的列表中选择一个图表打开。',
|
||||
table_columns: {
|
||||
name: '名称',
|
||||
@@ -325,7 +326,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: '打开',
|
||||
duplicate: '复制',
|
||||
delete: '删除图表',
|
||||
delete: '删除',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ export const zh_TW: LanguageTranslation = {
|
||||
menu: {
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新增圖表',
|
||||
new: '新增...',
|
||||
browse: '瀏覽...',
|
||||
save: '儲存',
|
||||
import: '匯入資料庫',
|
||||
export_sql: '匯出 SQL',
|
||||
export_as: '匯出為特定格式',
|
||||
delete_diagram: '刪除圖表',
|
||||
delete_diagram: '刪除',
|
||||
},
|
||||
edit: {
|
||||
edit: '編輯',
|
||||
@@ -72,9 +72,9 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '重新排列圖表',
|
||||
title: '自動排列圖表',
|
||||
description: '此操作將重新排列圖表中的所有表格。是否繼續?',
|
||||
reorder: '重新排列',
|
||||
reorder: '自動排列',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '沒有定義列舉值',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -267,7 +268,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
show_all: '顯示全部',
|
||||
undo: '復原',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列圖表',
|
||||
reorder_diagram: '自動排列圖表',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -310,7 +311,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '開啟圖表',
|
||||
title: '開啟資料庫',
|
||||
description: '請從以下列表中選擇一個圖表。',
|
||||
table_columns: {
|
||||
name: '名稱',
|
||||
@@ -324,7 +325,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
diagram_actions: {
|
||||
open: '開啟',
|
||||
duplicate: '複製',
|
||||
delete: '刪除圖表',
|
||||
delete: '刪除',
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -18,4 +18,7 @@
|
||||
|
||||
.marker-definitions {
|
||||
}
|
||||
|
||||
.nodrag {
|
||||
}
|
||||
}
|
||||
|
||||
21
src/lib/data/import-metadata/import/custom-types.ts
Normal file
21
src/lib/data/import-metadata/import/custom-types.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { DBCustomType, DBCustomTypeKind } from '@/lib/domain';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { DBCustomTypeInfo } from '../metadata-types/custom-type-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createCustomTypesFromMetadata = ({
|
||||
customTypes,
|
||||
}: {
|
||||
customTypes: DBCustomTypeInfo[];
|
||||
}): DBCustomType[] => {
|
||||
return customTypes.map((customType) => {
|
||||
return {
|
||||
id: generateId(),
|
||||
schema: schemaNameToDomainSchemaName(customType.schema),
|
||||
name: customType.type,
|
||||
kind: customType.kind as DBCustomTypeKind,
|
||||
values: customType.values,
|
||||
fields: customType.fields,
|
||||
};
|
||||
});
|
||||
};
|
||||
351
src/lib/data/import-metadata/import/dependencies.ts
Normal file
351
src/lib/data/import-metadata/import/dependencies.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type { AST } from 'node-sql-parser';
|
||||
import type { DBDependency, DBTable } from '@/lib/domain';
|
||||
import { DatabaseType, schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { ViewInfo } from '../metadata-types/view-info';
|
||||
import { decodeViewDefinition } from './tables';
|
||||
|
||||
const astDatabaseTypes: Record<DatabaseType, string> = {
|
||||
[DatabaseType.POSTGRESQL]: 'postgresql',
|
||||
[DatabaseType.MYSQL]: 'postgresql',
|
||||
[DatabaseType.MARIADB]: 'postgresql',
|
||||
[DatabaseType.GENERIC]: 'postgresql',
|
||||
[DatabaseType.SQLITE]: 'postgresql',
|
||||
[DatabaseType.SQL_SERVER]: 'postgresql',
|
||||
[DatabaseType.CLICKHOUSE]: 'postgresql',
|
||||
[DatabaseType.COCKROACHDB]: 'postgresql',
|
||||
[DatabaseType.ORACLE]: 'postgresql',
|
||||
};
|
||||
|
||||
export const createDependenciesFromMetadata = async ({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
}: {
|
||||
views: ViewInfo[];
|
||||
tables: DBTable[];
|
||||
databaseType: DatabaseType;
|
||||
}): Promise<DBDependency[]> => {
|
||||
if (!views || views.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
|
||||
const dependencies = views
|
||||
.flatMap((view) => {
|
||||
const viewSchema = schemaNameToDomainSchemaName(view.schema);
|
||||
const viewTable = tables.find(
|
||||
(table) =>
|
||||
table.name === view.view_name && viewSchema === table.schema
|
||||
);
|
||||
|
||||
if (!viewTable) {
|
||||
console.warn(
|
||||
`Source table for view ${view.view_name} not found (schema: ${viewSchema})`
|
||||
);
|
||||
return []; // Skip this view and proceed to the next
|
||||
}
|
||||
|
||||
if (view.view_definition) {
|
||||
try {
|
||||
const decodedViewDefinition = decodeViewDefinition(
|
||||
databaseType,
|
||||
view.view_definition
|
||||
);
|
||||
|
||||
let modifiedViewDefinition = '';
|
||||
if (
|
||||
databaseType === DatabaseType.MYSQL ||
|
||||
databaseType === DatabaseType.MARIADB
|
||||
) {
|
||||
modifiedViewDefinition = preprocessViewDefinitionMySQL(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
modifiedViewDefinition =
|
||||
preprocessViewDefinitionSQLServer(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else {
|
||||
modifiedViewDefinition = preprocessViewDefinition(
|
||||
decodedViewDefinition
|
||||
);
|
||||
}
|
||||
|
||||
// Parse using the appropriate dialect
|
||||
const ast = parser.astify(modifiedViewDefinition, {
|
||||
database: astDatabaseTypes[databaseType],
|
||||
type: 'select', // Parsing a SELECT statement
|
||||
});
|
||||
|
||||
let relatedTables = extractTablesFromAST(ast);
|
||||
|
||||
// Filter out duplicate tables without schema
|
||||
relatedTables = filterDuplicateTables(relatedTables);
|
||||
|
||||
return relatedTables.map((relTable) => {
|
||||
const relSchema = relTable.schema || view.schema; // Use view's schema if relSchema is undefined
|
||||
const relTableName = relTable.tableName;
|
||||
|
||||
const table = tables.find(
|
||||
(table) =>
|
||||
table.name === relTableName &&
|
||||
(table.schema || '') === relSchema
|
||||
);
|
||||
|
||||
if (table) {
|
||||
const dependency: DBDependency = {
|
||||
id: generateId(),
|
||||
schema: view.schema,
|
||||
tableId: table.id, // related table
|
||||
dependentSchema: table.schema,
|
||||
dependentTableId: viewTable.id, // dependent view
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
return dependency;
|
||||
} else {
|
||||
console.warn(
|
||||
`Dependent table ${relSchema}.${relTableName} not found for view ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error parsing view ${view.schema}.${view.view_name}:`,
|
||||
error
|
||||
);
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
console.warn(
|
||||
`View definition missing for ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
})
|
||||
.filter((dependency) => dependency !== null);
|
||||
|
||||
return dependencies;
|
||||
};
|
||||
|
||||
// Add this new function to filter out duplicate tables
|
||||
function filterDuplicateTables(
|
||||
tables: { schema?: string; tableName: string }[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tableMap = new Map<string, { schema?: string; tableName: string }>();
|
||||
|
||||
for (const table of tables) {
|
||||
const key = table.tableName;
|
||||
const existingTable = tableMap.get(key);
|
||||
|
||||
if (!existingTable || (table.schema && !existingTable.schema)) {
|
||||
tableMap.set(key, table);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(tableMap.values());
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinition(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove leading and trailing whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Replace escaped double quotes with regular ones
|
||||
viewDefinition = viewDefinition.replace(/\\"/g, '"');
|
||||
|
||||
// Replace 'CREATE MATERIALIZED VIEW' with 'CREATE VIEW'
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/CREATE\s+MATERIALIZED\s+VIEW/i,
|
||||
'CREATE VIEW'
|
||||
);
|
||||
|
||||
// Regular expression to match 'CREATE VIEW [schema.]view_name [ (column definitions) ] AS'
|
||||
// This regex captures the view name and skips any content between the view name and 'AS'
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:(?:`[^`]+`|"[^"]+"|\w+)\.)?(?:`([^`]+)`|"([^"]+)"|(\w+))[\s\S]*?\bAS\b\s+/i;
|
||||
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[1] || match[2] || match[3];
|
||||
// Extract the SQL after the 'AS' keyword
|
||||
const restOfDefinition = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Replace double-quoted identifiers with unquoted ones
|
||||
let modifiedSQL = restOfDefinition.replace(/"(\w+)"/g, '$1');
|
||||
|
||||
// Replace '::' type casts with 'CAST' expressions
|
||||
modifiedSQL = modifiedSQL.replace(
|
||||
/\(([^()]+)\)::(\w+)/g,
|
||||
'CAST($1 AS $2)'
|
||||
);
|
||||
|
||||
// Remove ClickHouse-specific syntax that may still be present
|
||||
// For example, remove SETTINGS clauses inside the SELECT statement
|
||||
modifiedSQL = modifiedSQL.replace(/\bSETTINGS\b[\s\S]*$/i, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW ${viewName} AS ${modifiedSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition for SQL Server
|
||||
function preprocessViewDefinitionSQLServer(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove BOM if present
|
||||
viewDefinition = viewDefinition.replace(/^\uFEFF/, '');
|
||||
|
||||
// Normalize whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Remove square brackets and replace with double quotes
|
||||
viewDefinition = viewDefinition.replace(/\[([^\]]+)\]/g, '"$1"');
|
||||
|
||||
// Remove database names from fully qualified identifiers
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/"([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"/g,
|
||||
'"$2"."$3"'
|
||||
);
|
||||
|
||||
// Replace SQL Server functions with PostgreSQL equivalents
|
||||
viewDefinition = viewDefinition.replace(/\bGETDATE\(\)/gi, 'NOW()');
|
||||
viewDefinition = viewDefinition.replace(/\bISNULL\(/gi, 'COALESCE(');
|
||||
|
||||
// Replace 'TOP N' with 'LIMIT N' at the end of the query
|
||||
const topMatch = viewDefinition.match(/SELECT\s+TOP\s+(\d+)/i);
|
||||
if (topMatch) {
|
||||
const topN = topMatch[1];
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/SELECT\s+TOP\s+\d+/i,
|
||||
'SELECT'
|
||||
);
|
||||
viewDefinition = viewDefinition.replace(/;+\s*$/, ''); // Remove semicolons at the end
|
||||
viewDefinition += ` LIMIT ${topN}`;
|
||||
}
|
||||
|
||||
viewDefinition = viewDefinition.replace(/\n/g, ''); // Remove newlines
|
||||
|
||||
// Adjust CREATE VIEW syntax
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:"?([^".\s]+)"?\.)?"?([^".\s]+)"?\s+AS\s+/i;
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[2];
|
||||
const modifiedSQL = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Remove semicolons at the end
|
||||
const finalSQL = modifiedSQL.replace(/;+\s*$/, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW "${viewName}" AS ${finalSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinitionMySQL(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove any trailing semicolons
|
||||
viewDefinition = viewDefinition.replace(/;\s*$/, '');
|
||||
|
||||
// Remove backticks from identifiers
|
||||
viewDefinition = viewDefinition.replace(/`/g, '');
|
||||
|
||||
// Remove unnecessary parentheses around joins and ON clauses
|
||||
viewDefinition = removeRedundantParentheses(viewDefinition);
|
||||
|
||||
return viewDefinition;
|
||||
}
|
||||
|
||||
function removeRedundantParentheses(sql: string): string {
|
||||
// Regular expressions to match unnecessary parentheses
|
||||
const patterns = [
|
||||
/\(\s*(JOIN\s+[^()]+?)\s*\)/gi,
|
||||
/\(\s*(ON\s+[^()]+?)\s*\)/gi,
|
||||
// Additional patterns if necessary
|
||||
];
|
||||
|
||||
let prevSql;
|
||||
do {
|
||||
prevSql = sql;
|
||||
patterns.forEach((pattern) => {
|
||||
sql = sql.replace(pattern, '$1');
|
||||
});
|
||||
} while (sql !== prevSql);
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
function extractTablesFromAST(
|
||||
ast: AST | AST[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tablesMap = new Map<string, { schema: string; tableName: string }>();
|
||||
const visitedNodes = new Set();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function traverse(node: any) {
|
||||
if (!node || visitedNodes.has(node)) return;
|
||||
visitedNodes.add(node);
|
||||
|
||||
if (Array.isArray(node)) {
|
||||
node.forEach(traverse);
|
||||
} else if (typeof node === 'object') {
|
||||
// Check if node represents a table
|
||||
if (
|
||||
Object.hasOwnProperty.call(node, 'table') &&
|
||||
typeof node.table === 'string'
|
||||
) {
|
||||
let schema = node.db || node.schema;
|
||||
const tableName = node.table;
|
||||
if (tableName) {
|
||||
// Assign default schema if undefined
|
||||
schema = schemaNameToDomainSchemaName(schema) || '';
|
||||
const key = `${schema}.${tableName}`;
|
||||
if (!tablesMap.has(key)) {
|
||||
tablesMap.set(key, { schema, tableName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively traverse all properties
|
||||
for (const key in node) {
|
||||
if (Object.hasOwnProperty.call(node, key)) {
|
||||
traverse(node[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
traverse(ast);
|
||||
|
||||
return Array.from(tablesMap.values());
|
||||
}
|
||||
64
src/lib/data/import-metadata/import/fields.ts
Normal file
64
src/lib/data/import-metadata/import/fields.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import type { DBField } from '@/lib/domain';
|
||||
import type { ColumnInfo } from '../metadata-types/column-info';
|
||||
import type { AggregatedIndexInfo } from '../metadata-types/index-info';
|
||||
import type { PrimaryKeyInfo } from '../metadata-types/primary-key-info';
|
||||
import type { TableInfo } from '../metadata-types/table-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createFieldsFromMetadata = ({
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
aggregatedIndexes,
|
||||
}: {
|
||||
tableColumns: ColumnInfo[];
|
||||
tableSchema?: string;
|
||||
tableInfo: TableInfo;
|
||||
tablePrimaryKeys: PrimaryKeyInfo[];
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
}) => {
|
||||
const uniqueColumns = tableColumns.reduce((acc, col) => {
|
||||
if (!acc.has(col.name)) {
|
||||
acc.set(col.name, col);
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, ColumnInfo>());
|
||||
|
||||
const sortedColumns = Array.from(uniqueColumns.values()).sort(
|
||||
(a, b) => a.ordinal_position - b.ordinal_position
|
||||
);
|
||||
|
||||
const tablePrimaryKeysColumns = tablePrimaryKeys.map((pk) =>
|
||||
pk.column.trim()
|
||||
);
|
||||
|
||||
return sortedColumns.map(
|
||||
(col: ColumnInfo): DBField => ({
|
||||
id: generateId(),
|
||||
name: col.name,
|
||||
type: {
|
||||
id: col.type.split(' ').join('_').toLowerCase(),
|
||||
name: col.type.toLowerCase(),
|
||||
},
|
||||
primaryKey: tablePrimaryKeysColumns.includes(col.name),
|
||||
unique: Object.values(aggregatedIndexes).some(
|
||||
(idx) =>
|
||||
idx.unique &&
|
||||
idx.columns.length === 1 &&
|
||||
idx.columns[0].name === col.name
|
||||
),
|
||||
nullable: Boolean(col.nullable),
|
||||
...(col.character_maximum_length &&
|
||||
col.character_maximum_length !== 'null'
|
||||
? { characterMaximumLength: col.character_maximum_length }
|
||||
: {}),
|
||||
...(col.precision?.precision
|
||||
? { precision: col.precision.precision }
|
||||
: {}),
|
||||
...(col.precision?.scale ? { scale: col.precision.scale } : {}),
|
||||
...(col.default ? { default: col.default } : {}),
|
||||
...(col.collation ? { collation: col.collation } : {}),
|
||||
createdAt: Date.now(),
|
||||
comments: col.comment ? col.comment : undefined,
|
||||
})
|
||||
);
|
||||
};
|
||||
82
src/lib/data/import-metadata/import/index.ts
Normal file
82
src/lib/data/import-metadata/import/index.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import type { DatabaseEdition, Diagram } from '@/lib/domain';
|
||||
import { adjustTablePositions, DatabaseType } from '@/lib/domain';
|
||||
import { generateDiagramId } from '@/lib/utils';
|
||||
import type { DatabaseMetadata } from '../metadata-types/database-metadata';
|
||||
import { createCustomTypesFromMetadata } from './custom-types';
|
||||
import { createRelationshipsFromMetadata } from './relationships';
|
||||
import { createTablesFromMetadata } from './tables';
|
||||
import { createDependenciesFromMetadata } from './dependencies';
|
||||
|
||||
export const loadFromDatabaseMetadata = async ({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition,
|
||||
}: {
|
||||
databaseType: DatabaseType;
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
diagramNumber?: number;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}): Promise<Diagram> => {
|
||||
const {
|
||||
fk_info: foreignKeys,
|
||||
views: views,
|
||||
custom_types: customTypes,
|
||||
} = databaseMetadata;
|
||||
|
||||
const tables = createTablesFromMetadata({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const relationships = createRelationshipsFromMetadata({
|
||||
foreignKeys,
|
||||
tables,
|
||||
});
|
||||
|
||||
const dependencies = await createDependenciesFromMetadata({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const dbCustomTypes = customTypes
|
||||
? createCustomTypesFromMetadata({
|
||||
customTypes,
|
||||
})
|
||||
: [];
|
||||
|
||||
const adjustedTables = adjustTablePositions({
|
||||
tables,
|
||||
relationships,
|
||||
mode: 'perSchema',
|
||||
});
|
||||
|
||||
const sortedTables = adjustedTables.sort((a, b) => {
|
||||
if (a.isView === b.isView) {
|
||||
// Both are either tables or views, so sort alphabetically by name
|
||||
return a.name.localeCompare(b.name);
|
||||
}
|
||||
// If one is a view and the other is not, put tables first
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
const diagram: Diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: databaseMetadata.database_name
|
||||
? `${databaseMetadata.database_name}-db`
|
||||
: diagramNumber
|
||||
? `Diagram ${diagramNumber}`
|
||||
: 'New Diagram',
|
||||
databaseType: databaseType ?? DatabaseType.GENERIC,
|
||||
databaseEdition,
|
||||
tables: sortedTables,
|
||||
relationships,
|
||||
dependencies,
|
||||
customTypes: dbCustomTypes,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
return diagram;
|
||||
};
|
||||
24
src/lib/data/import-metadata/import/indexes.ts
Normal file
24
src/lib/data/import-metadata/import/indexes.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { DBField, DBIndex, IndexType } from '@/lib/domain';
|
||||
import type { AggregatedIndexInfo } from '../metadata-types/index-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createIndexesFromMetadata = ({
|
||||
aggregatedIndexes,
|
||||
fields,
|
||||
}: {
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
fields: DBField[];
|
||||
}): DBIndex[] =>
|
||||
aggregatedIndexes.map(
|
||||
(idx): DBIndex => ({
|
||||
id: generateId(),
|
||||
name: idx.name,
|
||||
unique: Boolean(idx.unique),
|
||||
fieldIds: idx.columns
|
||||
.sort((a, b) => a.position - b.position)
|
||||
.map((c) => fields.find((f) => f.name === c.name)?.id)
|
||||
.filter((id): id is string => id !== undefined),
|
||||
createdAt: Date.now(),
|
||||
type: idx.index_type?.toLowerCase() as IndexType,
|
||||
})
|
||||
);
|
||||
85
src/lib/data/import-metadata/import/relationships.ts
Normal file
85
src/lib/data/import-metadata/import/relationships.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type {
|
||||
Cardinality,
|
||||
DBField,
|
||||
DBRelationship,
|
||||
DBTable,
|
||||
} from '@/lib/domain';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { ForeignKeyInfo } from '../metadata-types/foreign-key-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
const determineCardinality = (
|
||||
field: DBField,
|
||||
isTablePKComplex: boolean
|
||||
): Cardinality => {
|
||||
return field.unique || (field.primaryKey && !isTablePKComplex)
|
||||
? 'one'
|
||||
: 'many';
|
||||
};
|
||||
|
||||
export const createRelationshipsFromMetadata = ({
|
||||
foreignKeys,
|
||||
tables,
|
||||
}: {
|
||||
foreignKeys: ForeignKeyInfo[];
|
||||
tables: DBTable[];
|
||||
}): DBRelationship[] => {
|
||||
return foreignKeys
|
||||
.map((fk: ForeignKeyInfo): DBRelationship | null => {
|
||||
const schema = schemaNameToDomainSchemaName(fk.schema);
|
||||
const sourceTable = tables.find(
|
||||
(table) => table.name === fk.table && table.schema === schema
|
||||
);
|
||||
|
||||
const targetSchema = schemaNameToDomainSchemaName(
|
||||
fk.reference_schema
|
||||
);
|
||||
|
||||
const targetTable = tables.find(
|
||||
(table) =>
|
||||
table.name === fk.reference_table &&
|
||||
table.schema === targetSchema
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(field) => field.name === fk.column
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(field) => field.name === fk.reference_column
|
||||
);
|
||||
|
||||
const isSourceTablePKComplex =
|
||||
(sourceTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
const isTargetTablePKComplex =
|
||||
(targetTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
|
||||
if (sourceTable && targetTable && sourceField && targetField) {
|
||||
const sourceCardinality = determineCardinality(
|
||||
sourceField,
|
||||
isSourceTablePKComplex
|
||||
);
|
||||
const targetCardinality = determineCardinality(
|
||||
targetField,
|
||||
isTargetTablePKComplex
|
||||
);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: fk.foreign_key_name,
|
||||
sourceSchema: schema,
|
||||
targetSchema: targetSchema,
|
||||
sourceTableId: sourceTable.id,
|
||||
targetTableId: targetTable.id,
|
||||
sourceFieldId: sourceField.id,
|
||||
targetFieldId: targetField.id,
|
||||
sourceCardinality,
|
||||
targetCardinality,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((rel) => rel !== null) as DBRelationship[];
|
||||
};
|
||||
228
src/lib/data/import-metadata/import/tables.ts
Normal file
228
src/lib/data/import-metadata/import/tables.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import type { DBIndex, DBTable } from '@/lib/domain';
|
||||
import {
|
||||
DatabaseType,
|
||||
generateTableKey,
|
||||
schemaNameToDomainSchemaName,
|
||||
} from '@/lib/domain';
|
||||
import type { DatabaseMetadata } from '../metadata-types/database-metadata';
|
||||
import type { TableInfo } from '../metadata-types/table-info';
|
||||
import { createAggregatedIndexes } from '../metadata-types/index-info';
|
||||
import {
|
||||
decodeBase64ToUtf16LE,
|
||||
decodeBase64ToUtf8,
|
||||
generateId,
|
||||
} from '@/lib/utils';
|
||||
import {
|
||||
defaultTableColor,
|
||||
materializedViewColor,
|
||||
viewColor,
|
||||
} from '@/lib/colors';
|
||||
import { createFieldsFromMetadata } from './fields';
|
||||
import { createIndexesFromMetadata } from './indexes';
|
||||
|
||||
export const decodeViewDefinition = (
|
||||
databaseType: DatabaseType,
|
||||
viewDefinition?: string
|
||||
): string => {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let decodedViewDefinition: string;
|
||||
if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
decodedViewDefinition = decodeBase64ToUtf16LE(viewDefinition);
|
||||
} else {
|
||||
decodedViewDefinition = decodeBase64ToUtf8(viewDefinition);
|
||||
}
|
||||
|
||||
return decodedViewDefinition;
|
||||
};
|
||||
|
||||
export const createTablesFromMetadata = ({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
}: {
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
databaseType: DatabaseType;
|
||||
}): DBTable[] => {
|
||||
const {
|
||||
tables: tableInfos,
|
||||
pk_info: primaryKeys,
|
||||
columns,
|
||||
indexes,
|
||||
views: views,
|
||||
} = databaseMetadata;
|
||||
|
||||
// Pre-compute view names for faster lookup if there are views
|
||||
const viewNamesSet = new Set<string>();
|
||||
const materializedViewNamesSet = new Set<string>();
|
||||
|
||||
if (views && views.length > 0) {
|
||||
views.forEach((view) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: view.schema,
|
||||
tableName: view.view_name,
|
||||
});
|
||||
viewNamesSet.add(key);
|
||||
|
||||
if (
|
||||
view.view_definition &&
|
||||
decodeViewDefinition(databaseType, view.view_definition)
|
||||
.toLowerCase()
|
||||
.includes('materialized')
|
||||
) {
|
||||
materializedViewNamesSet.add(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Pre-compute lookup maps for better performance
|
||||
const columnsByTable = new Map<string, (typeof columns)[0][]>();
|
||||
const indexesByTable = new Map<string, (typeof indexes)[0][]>();
|
||||
const primaryKeysByTable = new Map<string, (typeof primaryKeys)[0][]>();
|
||||
|
||||
// Group columns by table
|
||||
columns.forEach((col) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: col.schema,
|
||||
tableName: col.table,
|
||||
});
|
||||
if (!columnsByTable.has(key)) {
|
||||
columnsByTable.set(key, []);
|
||||
}
|
||||
columnsByTable.get(key)!.push(col);
|
||||
});
|
||||
|
||||
// Group indexes by table
|
||||
indexes.forEach((idx) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: idx.schema,
|
||||
tableName: idx.table,
|
||||
});
|
||||
if (!indexesByTable.has(key)) {
|
||||
indexesByTable.set(key, []);
|
||||
}
|
||||
indexesByTable.get(key)!.push(idx);
|
||||
});
|
||||
|
||||
// Group primary keys by table
|
||||
primaryKeys.forEach((pk) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: pk.schema,
|
||||
tableName: pk.table,
|
||||
});
|
||||
if (!primaryKeysByTable.has(key)) {
|
||||
primaryKeysByTable.set(key, []);
|
||||
}
|
||||
primaryKeysByTable.get(key)!.push(pk);
|
||||
});
|
||||
|
||||
const result = tableInfos.map((tableInfo: TableInfo) => {
|
||||
const tableSchema = schemaNameToDomainSchemaName(tableInfo.schema);
|
||||
const tableKey = generateTableKey({
|
||||
schemaName: tableInfo.schema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
|
||||
// Use pre-computed lookups instead of filtering entire arrays
|
||||
const tableIndexes = indexesByTable.get(tableKey) || [];
|
||||
const tablePrimaryKeys = primaryKeysByTable.get(tableKey) || [];
|
||||
const tableColumns = columnsByTable.get(tableKey) || [];
|
||||
|
||||
// Aggregate indexes with multiple columns
|
||||
const aggregatedIndexes = createAggregatedIndexes({
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
tableIndexes,
|
||||
});
|
||||
|
||||
const fields = createFieldsFromMetadata({
|
||||
aggregatedIndexes,
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
// Check for composite primary key and find matching index name
|
||||
const primaryKeyFields = fields.filter((f) => f.primaryKey);
|
||||
let pkMatchingIndexName: string | undefined;
|
||||
let pkIndex: DBIndex | undefined;
|
||||
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// We have a composite primary key, look for an index that matches all PK columns
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).sort();
|
||||
|
||||
// Find an index that matches the primary key columns exactly
|
||||
const matchingIndex = aggregatedIndexes.find((index) => {
|
||||
const indexColumnNames = index.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
return (
|
||||
indexColumnNames.length === pkFieldNames.length &&
|
||||
indexColumnNames.every((col, i) => col === pkFieldNames[i])
|
||||
);
|
||||
});
|
||||
|
||||
if (matchingIndex) {
|
||||
pkMatchingIndexName = matchingIndex.name;
|
||||
// Create a special PK index
|
||||
pkIndex = {
|
||||
id: generateId(),
|
||||
name: matchingIndex.name,
|
||||
unique: true,
|
||||
fieldIds: primaryKeyFields.map((f) => f.id),
|
||||
createdAt: Date.now(),
|
||||
isPrimaryKey: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the index that matches the composite PK (to avoid duplication)
|
||||
const filteredAggregatedIndexes = pkMatchingIndexName
|
||||
? aggregatedIndexes.filter(
|
||||
(idx) => idx.name !== pkMatchingIndexName
|
||||
)
|
||||
: aggregatedIndexes;
|
||||
|
||||
const dbIndexes = createIndexesFromMetadata({
|
||||
aggregatedIndexes: filteredAggregatedIndexes,
|
||||
fields,
|
||||
});
|
||||
|
||||
// Add the PK index if it exists
|
||||
if (pkIndex) {
|
||||
dbIndexes.push(pkIndex);
|
||||
}
|
||||
|
||||
// Determine if the current table is a view by checking against pre-computed sets
|
||||
const viewKey = generateTableKey({
|
||||
schemaName: tableSchema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
const isView = viewNamesSet.has(viewKey);
|
||||
const isMaterializedView = materializedViewNamesSet.has(viewKey);
|
||||
|
||||
// Initial random positions; these will be adjusted later
|
||||
return {
|
||||
id: generateId(),
|
||||
name: tableInfo.table,
|
||||
schema: tableSchema,
|
||||
x: Math.random() * 1000, // Placeholder X
|
||||
y: Math.random() * 800, // Placeholder Y
|
||||
fields,
|
||||
indexes: dbIndexes,
|
||||
color: isMaterializedView
|
||||
? materializedViewColor
|
||||
: isView
|
||||
? viewColor
|
||||
: defaultTableColor,
|
||||
isView: isView,
|
||||
isMaterializedView: isMaterializedView,
|
||||
createdAt: Date.now(),
|
||||
comments: tableInfo.comment ? tableInfo.comment : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
@@ -106,7 +106,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should contain composite primary key syntax
|
||||
expect(sql).toContain('PRIMARY KEY (spell_id, component_id)');
|
||||
expect(sql).toContain('PRIMARY KEY ("spell_id", "component_id")');
|
||||
// Should NOT contain individual PRIMARY KEY constraints
|
||||
expect(sql).not.toMatch(/spell_id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
expect(sql).not.toMatch(
|
||||
@@ -192,7 +192,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
|
||||
// Should contain composite primary key constraint
|
||||
expect(sql).toContain(
|
||||
'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)'
|
||||
'PRIMARY KEY ("master_user_id", "tenant_id", "tenant_user_id")'
|
||||
);
|
||||
|
||||
// Should NOT contain the duplicate index for the primary key fields
|
||||
@@ -245,7 +245,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should contain inline PRIMARY KEY
|
||||
expect(sql).toMatch(/id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
expect(sql).toMatch(/"id"\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
// Should NOT contain separate PRIMARY KEY constraint
|
||||
expect(sql).not.toContain('PRIMARY KEY (id)');
|
||||
});
|
||||
@@ -306,8 +306,8 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
expect(sql).not.toContain('DEFAULT has default');
|
||||
expect(sql).not.toContain('DEFAULT DEFAULT has default');
|
||||
// The fields should still be in the table
|
||||
expect(sql).toContain('is_active boolean');
|
||||
expect(sql).toContain('stock_count integer NOT NULL'); // integer gets simplified to int
|
||||
expect(sql).toContain('"is_active" boolean');
|
||||
expect(sql).toContain('"stock_count" integer NOT NULL'); // integer gets simplified to int
|
||||
});
|
||||
|
||||
it('should handle valid default values correctly', () => {
|
||||
@@ -429,8 +429,8 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should convert NOW to NOW() and ('now') to now()
|
||||
expect(sql).toContain('created_at timestamp DEFAULT NOW');
|
||||
expect(sql).toContain('updated_at timestamp DEFAULT now()');
|
||||
expect(sql).toContain('"created_at" timestamp DEFAULT NOW');
|
||||
expect(sql).toContain('"updated_at" timestamp DEFAULT now()');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -485,9 +485,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should handle char with explicit length
|
||||
expect(sql).toContain('element_code char(2)');
|
||||
expect(sql).toContain('"element_code" char(2)');
|
||||
// Should add default length for char without length
|
||||
expect(sql).toContain('status char(1)');
|
||||
expect(sql).toContain('"status" char(1)');
|
||||
});
|
||||
|
||||
it('should not have spaces between char and parentheses', () => {
|
||||
@@ -715,7 +715,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
expect(sql).toContain('CREATE TABLE "guild_members"');
|
||||
// Should create foreign key
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE "guild_members" ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY (guild_id) REFERENCES "guilds" (id);'
|
||||
'ALTER TABLE "guild_members" ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY ("guild_id") REFERENCES "guilds" ("id");'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -939,9 +939,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should include precision and scale
|
||||
expect(sql).toContain('amount numeric(15, 2)');
|
||||
expect(sql).toContain('"amount" numeric(15, 2)');
|
||||
// Should include precision only when scale is not provided
|
||||
expect(sql).toContain('interest_rate numeric(5)');
|
||||
expect(sql).toContain('"interest_rate" numeric(5)');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -424,7 +424,7 @@ describe('Quoted Identifiers - Special Characters Handling', () => {
|
||||
});
|
||||
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE "user-profiles" ADD CONSTRAINT fk_profiles_accounts FOREIGN KEY (account_id) REFERENCES "user-accounts" (id)'
|
||||
'ALTER TABLE "user-profiles" ADD CONSTRAINT fk_profiles_accounts FOREIGN KEY ("account_id") REFERENCES "user-accounts" ("id")'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -493,7 +493,7 @@ describe('Quoted Identifiers - Special Characters Handling', () => {
|
||||
});
|
||||
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE "app-data"."user profiles" ADD CONSTRAINT fk_profiles_accounts FOREIGN KEY (account_id) REFERENCES "auth-db"."user accounts" (id)'
|
||||
'ALTER TABLE "app-data"."user profiles" ADD CONSTRAINT fk_profiles_accounts FOREIGN KEY ("account_id") REFERENCES "auth-db"."user accounts" ("id")'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -52,6 +52,29 @@ const getQuotedTableName = (
|
||||
}
|
||||
};
|
||||
|
||||
const getQuotedFieldName = (
|
||||
fieldName: string,
|
||||
isDBMLFlow: boolean = false
|
||||
): string => {
|
||||
// Check if a name is already quoted
|
||||
const isAlreadyQuoted = (name: string) => {
|
||||
return (
|
||||
(name.startsWith('"') && name.endsWith('"')) ||
|
||||
(name.startsWith('`') && name.endsWith('`')) ||
|
||||
(name.startsWith('[') && name.endsWith(']'))
|
||||
);
|
||||
};
|
||||
|
||||
if (isAlreadyQuoted(fieldName)) {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
// For DBML flow, always quote field names
|
||||
// Otherwise, only quote if it contains special characters
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(fieldName) || isDBMLFlow;
|
||||
return needsQuoting ? `"${fieldName}"` : fieldName;
|
||||
};
|
||||
|
||||
export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
@@ -270,7 +293,8 @@ export const exportBaseSQL = ({
|
||||
typeName = 'char';
|
||||
}
|
||||
|
||||
sqlScript += ` ${field.name} ${typeName}`;
|
||||
const quotedFieldName = getQuotedFieldName(field.name, isDBMLFlow);
|
||||
sqlScript += ` ${quotedFieldName} ${typeName}`;
|
||||
|
||||
// Add size for character types
|
||||
if (
|
||||
@@ -367,7 +391,9 @@ export const exportBaseSQL = ({
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name)
|
||||
) {
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).join(', ');
|
||||
const pkFieldNames = primaryKeyFields
|
||||
.map((f) => getQuotedFieldName(f.name, isDBMLFlow))
|
||||
.join(', ');
|
||||
if (pkIndex?.name) {
|
||||
sqlScript += `\n CONSTRAINT ${pkIndex.name} PRIMARY KEY (${pkFieldNames})`;
|
||||
} else {
|
||||
@@ -388,7 +414,11 @@ export const exportBaseSQL = ({
|
||||
table.fields.forEach((field) => {
|
||||
// Add column comment (only for databases that support COMMENT ON syntax)
|
||||
if (field.comments && supportsCommentOn) {
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${field.name} IS '${escapeSQLComment(field.comments)}';\n`;
|
||||
const quotedFieldName = getQuotedFieldName(
|
||||
field.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${quotedFieldName} IS '${escapeSQLComment(field.comments)}';\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -420,7 +450,7 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
|
||||
const fieldNames = indexFields
|
||||
.map((field) => field.name)
|
||||
.map((field) => getQuotedFieldName(field.name, isDBMLFlow))
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
@@ -500,8 +530,16 @@ export const exportBaseSQL = ({
|
||||
|
||||
const fkTableName = getQuotedTableName(fkTable, isDBMLFlow);
|
||||
const refTableName = getQuotedTableName(refTable, isDBMLFlow);
|
||||
const quotedFkFieldName = getQuotedFieldName(
|
||||
fkField.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
const quotedRefFieldName = getQuotedFieldName(
|
||||
refField.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${fkField.name}) REFERENCES ${refTableName} (${refField.name});\n`;
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${quotedFkFieldName}) REFERENCES ${refTableName} (${quotedRefFieldName});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -86,7 +86,7 @@ export interface SQLBinaryExpr extends SQLASTNode {
|
||||
|
||||
export interface SQLFunctionNode extends SQLASTNode {
|
||||
type: 'function';
|
||||
name: string;
|
||||
name: string | { name: Array<{ value: string }> };
|
||||
args?: {
|
||||
value: SQLASTArg[];
|
||||
};
|
||||
@@ -108,6 +108,31 @@ export interface SQLStringLiteral extends SQLASTNode {
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface SQLDefaultNode extends SQLASTNode {
|
||||
type: 'default';
|
||||
value: SQLASTNode;
|
||||
}
|
||||
|
||||
export interface SQLCastNode extends SQLASTNode {
|
||||
type: 'cast';
|
||||
expr: SQLASTNode;
|
||||
target: Array<{ dataType: string }>;
|
||||
}
|
||||
|
||||
export interface SQLBooleanNode extends SQLASTNode {
|
||||
type: 'bool';
|
||||
value: boolean;
|
||||
}
|
||||
|
||||
export interface SQLNullNode extends SQLASTNode {
|
||||
type: 'null';
|
||||
}
|
||||
|
||||
export interface SQLNumberNode extends SQLASTNode {
|
||||
type: 'number';
|
||||
value: number;
|
||||
}
|
||||
|
||||
export type SQLASTArg =
|
||||
| SQLColumnRef
|
||||
| SQLStringLiteral
|
||||
@@ -146,6 +171,22 @@ export function buildSQLFromAST(
|
||||
): string {
|
||||
if (!ast) return '';
|
||||
|
||||
// Handle default value wrapper
|
||||
if (ast.type === 'default' && 'value' in ast) {
|
||||
const defaultNode = ast as SQLDefaultNode;
|
||||
return buildSQLFromAST(defaultNode.value, dbType);
|
||||
}
|
||||
|
||||
// Handle PostgreSQL cast expressions (e.g., 'value'::type)
|
||||
if (ast.type === 'cast' && 'expr' in ast && 'target' in ast) {
|
||||
const castNode = ast as SQLCastNode;
|
||||
const expr = buildSQLFromAST(castNode.expr, dbType);
|
||||
if (castNode.target.length > 0 && castNode.target[0].dataType) {
|
||||
return `${expr}::${castNode.target[0].dataType.toLowerCase()}`;
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
if (ast.type === 'binary_expr') {
|
||||
const expr = ast as SQLBinaryExpr;
|
||||
const leftSQL = buildSQLFromAST(expr.left, dbType);
|
||||
@@ -155,7 +196,59 @@ export function buildSQLFromAST(
|
||||
|
||||
if (ast.type === 'function') {
|
||||
const func = ast as SQLFunctionNode;
|
||||
let expr = func.name;
|
||||
let funcName = '';
|
||||
|
||||
// Handle nested function name structure
|
||||
if (typeof func.name === 'object' && func.name && 'name' in func.name) {
|
||||
const nameObj = func.name as { name: Array<{ value: string }> };
|
||||
if (nameObj.name.length > 0) {
|
||||
funcName = nameObj.name[0].value || '';
|
||||
}
|
||||
} else if (typeof func.name === 'string') {
|
||||
funcName = func.name;
|
||||
}
|
||||
|
||||
if (!funcName) return '';
|
||||
|
||||
// Normalize PostgreSQL function names to uppercase for consistency
|
||||
if (dbType === DatabaseType.POSTGRESQL) {
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (pgFunctions.includes(funcName.toLowerCase())) {
|
||||
funcName = funcName.toUpperCase();
|
||||
}
|
||||
}
|
||||
|
||||
// Some PostgreSQL functions don't have parentheses (like CURRENT_TIMESTAMP)
|
||||
if (funcName === 'CURRENT_TIMESTAMP' && !func.args) {
|
||||
return funcName;
|
||||
}
|
||||
|
||||
// Handle SQL Server function defaults that were preprocessed as strings
|
||||
// The preprocessor converts NEWID() to 'newid', GETDATE() to 'getdate', etc.
|
||||
if (dbType === DatabaseType.SQL_SERVER) {
|
||||
const sqlServerFunctions: Record<string, string> = {
|
||||
newid: 'NEWID()',
|
||||
newsequentialid: 'NEWSEQUENTIALID()',
|
||||
getdate: 'GETDATE()',
|
||||
sysdatetime: 'SYSDATETIME()',
|
||||
};
|
||||
|
||||
const lowerFuncName = funcName.toLowerCase();
|
||||
if (sqlServerFunctions[lowerFuncName]) {
|
||||
return sqlServerFunctions[lowerFuncName];
|
||||
}
|
||||
}
|
||||
|
||||
let expr = funcName;
|
||||
if (func.args) {
|
||||
expr +=
|
||||
'(' +
|
||||
@@ -175,12 +268,31 @@ export function buildSQLFromAST(
|
||||
})
|
||||
.join(', ') +
|
||||
')';
|
||||
} else {
|
||||
expr += '()';
|
||||
}
|
||||
return expr;
|
||||
} else if (ast.type === 'column_ref') {
|
||||
return quoteIdentifier((ast as SQLColumnRef).column, dbType);
|
||||
} else if (ast.type === 'expr_list') {
|
||||
return (ast as SQLExprList).value.map((v) => v.value).join(' AND ');
|
||||
} else if (ast.type === 'single_quote_string') {
|
||||
// String literal with single quotes
|
||||
const strNode = ast as SQLStringLiteral;
|
||||
return `'${strNode.value}'`;
|
||||
} else if (ast.type === 'double_quote_string') {
|
||||
// String literal with double quotes
|
||||
const strNode = ast as SQLStringLiteral;
|
||||
return `"${strNode.value}"`;
|
||||
} else if (ast.type === 'bool') {
|
||||
// Boolean value
|
||||
const boolNode = ast as SQLBooleanNode;
|
||||
return boolNode.value ? 'TRUE' : 'FALSE';
|
||||
} else if (ast.type === 'null') {
|
||||
return 'NULL';
|
||||
} else if (ast.type === 'number') {
|
||||
const numNode = ast as SQLNumberNode;
|
||||
return String(numNode.value);
|
||||
} else {
|
||||
const valueNode = ast as { type: string; value: string | number };
|
||||
return typeof valueNode.value === 'string'
|
||||
@@ -779,10 +891,10 @@ export function convertToChartDBDiagram(
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.name === rel.sourceColumn
|
||||
(f) => f.name.toLowerCase() === rel.sourceColumn.toLowerCase()
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.name === rel.targetColumn
|
||||
(f) => f.name.toLowerCase() === rel.targetColumn.toLowerCase()
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
|
||||
describe('MySQL Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE tavern_patrons (
|
||||
patron_id INT NOT NULL,
|
||||
membership_status VARCHAR(50) DEFAULT 'regular',
|
||||
PRIMARY KEY (patron_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const statusColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'membership_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'regular'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with escaped quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizard_spellbooks (
|
||||
spellbook_id INT NOT NULL,
|
||||
incantation VARCHAR(255) DEFAULT 'Dragon\\'s flame',
|
||||
spell_metadata TEXT DEFAULT '{"type": "fire"}',
|
||||
PRIMARY KEY (spellbook_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const incantationColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'incantation'
|
||||
);
|
||||
expect(incantationColumn?.default).toBeTruthy();
|
||||
const metadataColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'spell_metadata'
|
||||
);
|
||||
expect(metadataColumn?.default).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dungeon_levels (
|
||||
level_id INT NOT NULL,
|
||||
monster_count INT DEFAULT 0,
|
||||
max_treasure INT DEFAULT 1000,
|
||||
PRIMARY KEY (level_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const monsterColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'monster_count'
|
||||
);
|
||||
expect(monsterColumn?.default).toBe('0');
|
||||
const treasureColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_treasure'
|
||||
);
|
||||
expect(treasureColumn?.default).toBe('1000');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE merchant_inventory (
|
||||
item_id INT NOT NULL,
|
||||
base_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
loyalty_discount FLOAT DEFAULT 0.15,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'base_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const discountColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'loyalty_discount'
|
||||
);
|
||||
expect(discountColumn?.default).toBe('0.15');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse boolean defaults in MySQL (using TINYINT)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE character_status (
|
||||
character_id INT NOT NULL,
|
||||
is_alive TINYINT(1) DEFAULT 1,
|
||||
is_cursed TINYINT(1) DEFAULT 0,
|
||||
has_magic BOOLEAN DEFAULT TRUE,
|
||||
PRIMARY KEY (character_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const aliveColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_alive'
|
||||
);
|
||||
expect(aliveColumn?.default).toBe('1');
|
||||
const cursedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_cursed'
|
||||
);
|
||||
expect(cursedColumn?.default).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('NULL Default Values', () => {
|
||||
it('should parse NULL defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE companion_animals (
|
||||
companion_id INT NOT NULL,
|
||||
special_trait VARCHAR(255) DEFAULT NULL,
|
||||
PRIMARY KEY (companion_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const traitColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'special_trait'
|
||||
);
|
||||
expect(traitColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Function Default Values', () => {
|
||||
it('should parse function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE quest_entries (
|
||||
entry_id INT NOT NULL AUTO_INCREMENT,
|
||||
quest_accepted TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
quest_uuid VARCHAR(36) DEFAULT (UUID()),
|
||||
PRIMARY KEY (entry_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const acceptedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_accepted'
|
||||
);
|
||||
expect(acceptedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
const updatedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_updated'
|
||||
);
|
||||
expect(updatedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
});
|
||||
});
|
||||
|
||||
describe('AUTO_INCREMENT', () => {
|
||||
it('should handle AUTO_INCREMENT columns correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE hero_registry (
|
||||
hero_id INT NOT NULL AUTO_INCREMENT,
|
||||
hero_name VARCHAR(100),
|
||||
PRIMARY KEY (hero_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const idColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'hero_id'
|
||||
);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
// AUTO_INCREMENT columns typically don't have a default value
|
||||
expect(idColumn?.default).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example', () => {
|
||||
it('should handle complex table with multiple default types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE adventurer_profiles (
|
||||
adventurer_id BIGINT NOT NULL AUTO_INCREMENT,
|
||||
character_name VARCHAR(50) NOT NULL,
|
||||
guild_email VARCHAR(255) NOT NULL,
|
||||
rank VARCHAR(20) DEFAULT 'novice',
|
||||
is_guild_verified TINYINT(1) DEFAULT 0,
|
||||
gold_coins INT DEFAULT 100,
|
||||
account_balance DECIMAL(10, 2) DEFAULT 0.00,
|
||||
joined_realm TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_quest TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
inventory_data JSON DEFAULT NULL,
|
||||
PRIMARY KEY (adventurer_id),
|
||||
UNIQUE KEY uk_guild_email (guild_email),
|
||||
INDEX idx_rank (rank)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQL(sql);
|
||||
const table = result.tables[0];
|
||||
expect(table).toBeDefined();
|
||||
|
||||
// Check various default values
|
||||
const rankColumn = table.columns.find((c) => c.name === 'rank');
|
||||
expect(rankColumn?.default).toBe("'novice'");
|
||||
|
||||
const verifiedColumn = table.columns.find(
|
||||
(c) => c.name === 'is_guild_verified'
|
||||
);
|
||||
expect(verifiedColumn?.default).toBe('0');
|
||||
|
||||
const goldColumn = table.columns.find(
|
||||
(c) => c.name === 'gold_coins'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('100');
|
||||
|
||||
const balanceColumn = table.columns.find(
|
||||
(c) => c.name === 'account_balance'
|
||||
);
|
||||
expect(balanceColumn?.default).toBe('0.00');
|
||||
|
||||
const joinedColumn = table.columns.find(
|
||||
(c) => c.name === 'joined_realm'
|
||||
);
|
||||
expect(joinedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
|
||||
const inventoryColumn = table.columns.find(
|
||||
(c) => c.name === 'inventory_data'
|
||||
);
|
||||
expect(inventoryColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -101,12 +101,28 @@ function extractColumnsFromCreateTable(statement: string): SQLColumn[] {
|
||||
const typeMatch = definition.match(/^([^\s(]+)(?:\(([^)]+)\))?/);
|
||||
const dataType = typeMatch ? typeMatch[1] : '';
|
||||
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
const defaultMatch = definition.match(
|
||||
/DEFAULT\s+('[^']*'|"[^"]*"|NULL|CURRENT_TIMESTAMP|\S+)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1];
|
||||
}
|
||||
|
||||
// Check for AUTO_INCREMENT
|
||||
const increment = definition
|
||||
.toUpperCase()
|
||||
.includes('AUTO_INCREMENT');
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable,
|
||||
primaryKey,
|
||||
unique: definition.toUpperCase().includes('UNIQUE'),
|
||||
default: defaultValue,
|
||||
increment,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -721,7 +737,28 @@ export async function fromMySQL(sqlContent: string): Promise<SQLParserResult> {
|
||||
parseError
|
||||
);
|
||||
|
||||
// Error handling without logging
|
||||
// Try fallback parser when main parser fails
|
||||
const tableMatch = trimmedStmt.match(
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?`?([^`\s(]+)`?\s*\(/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const tableName = tableMatch[1];
|
||||
const tableId = generateId();
|
||||
tableMap[tableName] = tableId;
|
||||
|
||||
const extractedColumns =
|
||||
extractColumnsFromCreateTable(trimmedStmt);
|
||||
if (extractedColumns.length > 0) {
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: undefined,
|
||||
columns: extractedColumns,
|
||||
indexes: [],
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,215 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE ADD COLUMN Tests', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN statements', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."location" (
|
||||
"id" bigint NOT NULL,
|
||||
CONSTRAINT "pk_table_7_id" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Add new fields to existing location table
|
||||
ALTER TABLE location ADD COLUMN country_id INT;
|
||||
ALTER TABLE location ADD COLUMN state_id INT;
|
||||
ALTER TABLE location ADD COLUMN location_type_id INT;
|
||||
ALTER TABLE location ADD COLUMN city_id INT;
|
||||
ALTER TABLE location ADD COLUMN street TEXT;
|
||||
ALTER TABLE location ADD COLUMN block TEXT;
|
||||
ALTER TABLE location ADD COLUMN building TEXT;
|
||||
ALTER TABLE location ADD COLUMN floor TEXT;
|
||||
ALTER TABLE location ADD COLUMN apartment TEXT;
|
||||
ALTER TABLE location ADD COLUMN lat INT;
|
||||
ALTER TABLE location ADD COLUMN long INT;
|
||||
ALTER TABLE location ADD COLUMN elevation INT;
|
||||
ALTER TABLE location ADD COLUMN erp_site_id INT;
|
||||
ALTER TABLE location ADD COLUMN is_active TEXT;
|
||||
ALTER TABLE location ADD COLUMN remarks TEXT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const locationTable = result.tables[0];
|
||||
|
||||
expect(locationTable.name).toBe('location');
|
||||
expect(locationTable.schema).toBe('public');
|
||||
|
||||
// Should have the original id column plus all the added columns
|
||||
expect(locationTable.columns).toHaveLength(16);
|
||||
|
||||
// Check that the id column is present
|
||||
const idColumn = locationTable.columns.find((col) => col.name === 'id');
|
||||
expect(idColumn).toBeDefined();
|
||||
expect(idColumn?.type).toBe('BIGINT');
|
||||
expect(idColumn?.primaryKey).toBe(true);
|
||||
|
||||
// Check some of the added columns
|
||||
const countryIdColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'country_id'
|
||||
);
|
||||
expect(countryIdColumn).toBeDefined();
|
||||
expect(countryIdColumn?.type).toBe('INTEGER');
|
||||
|
||||
const streetColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'street'
|
||||
);
|
||||
expect(streetColumn).toBeDefined();
|
||||
expect(streetColumn?.type).toBe('TEXT');
|
||||
|
||||
const remarksColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'remarks'
|
||||
);
|
||||
expect(remarksColumn).toBeDefined();
|
||||
expect(remarksColumn?.type).toBe('TEXT');
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD COLUMN with schema qualification', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE public.users (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE public.users ADD COLUMN email VARCHAR(255);
|
||||
ALTER TABLE public.users ADD COLUMN created_at TIMESTAMP;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const usersTable = result.tables[0];
|
||||
|
||||
expect(usersTable.columns).toHaveLength(3);
|
||||
|
||||
const emailColumn = usersTable.columns.find(
|
||||
(col) => col.name === 'email'
|
||||
);
|
||||
expect(emailColumn).toBeDefined();
|
||||
expect(emailColumn?.type).toBe('VARCHAR(255)');
|
||||
|
||||
const createdAtColumn = usersTable.columns.find(
|
||||
(col) => col.name === 'created_at'
|
||||
);
|
||||
expect(createdAtColumn).toBeDefined();
|
||||
expect(createdAtColumn?.type).toBe('TIMESTAMP');
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD COLUMN with constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE products (
|
||||
id SERIAL PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE products ADD COLUMN name VARCHAR(100) NOT NULL;
|
||||
ALTER TABLE products ADD COLUMN sku VARCHAR(50) UNIQUE;
|
||||
ALTER TABLE products ADD COLUMN price DECIMAL(10,2) DEFAULT 0.00;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const productsTable = result.tables[0];
|
||||
|
||||
expect(productsTable.columns).toHaveLength(4);
|
||||
|
||||
const nameColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'name'
|
||||
);
|
||||
expect(nameColumn).toBeDefined();
|
||||
expect(nameColumn?.nullable).toBe(false);
|
||||
|
||||
const skuColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'sku'
|
||||
);
|
||||
expect(skuColumn).toBeDefined();
|
||||
expect(skuColumn?.unique).toBe(true);
|
||||
|
||||
const priceColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'price'
|
||||
);
|
||||
expect(priceColumn).toBeDefined();
|
||||
expect(priceColumn?.default).toBe('0');
|
||||
});
|
||||
|
||||
it('should not add duplicate columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
ALTER TABLE items ADD COLUMN description TEXT;
|
||||
ALTER TABLE items ADD COLUMN name VARCHAR(200); -- Should not be added as duplicate
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const itemsTable = result.tables[0];
|
||||
|
||||
// Should only have 3 columns: id, name (original), and description
|
||||
expect(itemsTable.columns).toHaveLength(3);
|
||||
|
||||
const nameColumns = itemsTable.columns.filter(
|
||||
(col) => col.name === 'name'
|
||||
);
|
||||
expect(nameColumns).toHaveLength(1);
|
||||
expect(nameColumns[0].type).toBe('VARCHAR(100)'); // Should keep original type
|
||||
});
|
||||
|
||||
it('should use default schema when not specified', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE test_table ADD COLUMN value TEXT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const testTable = result.tables[0];
|
||||
|
||||
expect(testTable.schema).toBe('public');
|
||||
expect(testTable.columns).toHaveLength(2);
|
||||
|
||||
const valueColumn = testTable.columns.find(
|
||||
(col) => col.name === 'value'
|
||||
);
|
||||
expect(valueColumn).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle quoted identifiers in ALTER TABLE ADD COLUMN', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "my-table" (
|
||||
"id" INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE "my-table" ADD COLUMN "my-column" VARCHAR(50);
|
||||
ALTER TABLE "my-table" ADD COLUMN "another-column" INTEGER;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const myTable = result.tables[0];
|
||||
|
||||
expect(myTable.name).toBe('my-table');
|
||||
expect(myTable.columns).toHaveLength(3);
|
||||
|
||||
const myColumn = myTable.columns.find(
|
||||
(col) => col.name === 'my-column'
|
||||
);
|
||||
expect(myColumn).toBeDefined();
|
||||
expect(myColumn?.type).toBe('VARCHAR(50)');
|
||||
|
||||
const anotherColumn = myTable.columns.find(
|
||||
(col) => col.name === 'another-column'
|
||||
);
|
||||
expect(anotherColumn).toBeDefined();
|
||||
expect(anotherColumn?.type).toBe('INTEGER');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,118 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE ALTER COLUMN TYPE', () => {
|
||||
it('should handle ALTER TABLE ALTER COLUMN TYPE statements', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."table_12" (
|
||||
"id" SERIAL,
|
||||
"field1" varchar(200),
|
||||
"field2" varchar(200),
|
||||
"field3" varchar(200),
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field2 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field3 TYPE VARCHAR(254);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
expect(table.name).toBe('table_12');
|
||||
expect(table.columns).toHaveLength(4); // id, field1, field2, field3
|
||||
|
||||
// Check that the columns have the updated type
|
||||
const field1 = table.columns.find((col) => col.name === 'field1');
|
||||
expect(field1).toBeDefined();
|
||||
expect(field1?.type).toBe('VARCHAR(254)'); // Should be updated from 200 to 254
|
||||
|
||||
const field2 = table.columns.find((col) => col.name === 'field2');
|
||||
expect(field2).toBeDefined();
|
||||
expect(field2?.type).toBe('VARCHAR(254)');
|
||||
|
||||
const field3 = table.columns.find((col) => col.name === 'field3');
|
||||
expect(field3).toBeDefined();
|
||||
expect(field3?.type).toBe('VARCHAR(254)');
|
||||
});
|
||||
|
||||
it('should handle various ALTER COLUMN TYPE scenarios', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(50),
|
||||
age SMALLINT,
|
||||
score NUMERIC(5,2)
|
||||
);
|
||||
|
||||
-- Change varchar length
|
||||
ALTER TABLE test_table ALTER COLUMN name TYPE VARCHAR(100);
|
||||
|
||||
-- Change numeric type
|
||||
ALTER TABLE test_table ALTER COLUMN age TYPE INTEGER;
|
||||
|
||||
-- Change precision
|
||||
ALTER TABLE test_table ALTER COLUMN score TYPE NUMERIC(10,4);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
|
||||
const nameCol = table.columns.find((col) => col.name === 'name');
|
||||
expect(nameCol?.type).toBe('VARCHAR(100)');
|
||||
|
||||
const ageCol = table.columns.find((col) => col.name === 'age');
|
||||
expect(ageCol?.type).toBe('INTEGER');
|
||||
|
||||
const scoreCol = table.columns.find((col) => col.name === 'score');
|
||||
expect(scoreCol?.type).toBe('NUMERIC(10,4)');
|
||||
});
|
||||
|
||||
it('should handle multiple type changes on the same column', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."table_12" (
|
||||
"id" SERIAL,
|
||||
"field1" varchar(200),
|
||||
"field2" varchar(200),
|
||||
"field3" varchar(200),
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field2 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field3 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE BIGINT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
expect(table.name).toBe('table_12');
|
||||
expect(table.schema).toBe('public');
|
||||
expect(table.columns).toHaveLength(4);
|
||||
|
||||
// Check that field1 has the final type (BIGINT), not the intermediate VARCHAR(254)
|
||||
const field1 = table.columns.find((col) => col.name === 'field1');
|
||||
expect(field1).toBeDefined();
|
||||
expect(field1?.type).toBe('BIGINT'); // Should be BIGINT, not VARCHAR(254)
|
||||
|
||||
// Check that field2 and field3 still have VARCHAR(254)
|
||||
const field2 = table.columns.find((col) => col.name === 'field2');
|
||||
expect(field2).toBeDefined();
|
||||
expect(field2?.type).toBe('VARCHAR(254)');
|
||||
|
||||
const field3 = table.columns.find((col) => col.name === 'field3');
|
||||
expect(field3).toBeDefined();
|
||||
expect(field3?.type).toBe('VARCHAR(254)');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,117 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE with Foreign Keys', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN followed by ALTER TABLE ADD FOREIGN KEY', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."location" (
|
||||
"id" bigint NOT NULL,
|
||||
CONSTRAINT "pk_table_7_id" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Add new fields to existing location table
|
||||
ALTER TABLE location ADD COLUMN country_id INT;
|
||||
ALTER TABLE location ADD COLUMN state_id INT;
|
||||
ALTER TABLE location ADD COLUMN location_type_id INT;
|
||||
ALTER TABLE location ADD COLUMN city_id INT;
|
||||
ALTER TABLE location ADD COLUMN street TEXT;
|
||||
ALTER TABLE location ADD COLUMN block TEXT;
|
||||
ALTER TABLE location ADD COLUMN building TEXT;
|
||||
ALTER TABLE location ADD COLUMN floor TEXT;
|
||||
ALTER TABLE location ADD COLUMN apartment TEXT;
|
||||
ALTER TABLE location ADD COLUMN lat INT;
|
||||
ALTER TABLE location ADD COLUMN long INT;
|
||||
ALTER TABLE location ADD COLUMN elevation INT;
|
||||
ALTER TABLE location ADD COLUMN erp_site_id INT;
|
||||
ALTER TABLE location ADD COLUMN is_active TEXT;
|
||||
ALTER TABLE location ADD COLUMN remarks TEXT;
|
||||
|
||||
-- Create lookup tables
|
||||
CREATE TABLE country (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
code VARCHAR(3) UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE state (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
country_id INT NOT NULL,
|
||||
FOREIGN KEY (country_id) REFERENCES country(id)
|
||||
);
|
||||
|
||||
CREATE TABLE location_type (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE city (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
state_id INT NOT NULL,
|
||||
FOREIGN KEY (state_id) REFERENCES state(id)
|
||||
);
|
||||
|
||||
-- Add foreign key constraints from location to lookup tables
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_country
|
||||
FOREIGN KEY (country_id) REFERENCES country(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_state
|
||||
FOREIGN KEY (state_id) REFERENCES state(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_location_type
|
||||
FOREIGN KEY (location_type_id) REFERENCES location_type(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_city
|
||||
FOREIGN KEY (city_id) REFERENCES city(id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
const locationTable = result.tables.find((t) => t.name === 'location');
|
||||
|
||||
// Check tables
|
||||
expect(result.tables).toHaveLength(5); // location, country, state, location_type, city
|
||||
|
||||
// Check location table has all columns
|
||||
expect(locationTable).toBeDefined();
|
||||
expect(locationTable?.columns).toHaveLength(16); // id + 15 added columns
|
||||
|
||||
// Check foreign key relationships
|
||||
const locationRelationships = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'location'
|
||||
);
|
||||
|
||||
// Should have 4 FKs from location to lookup tables + 2 from state/city
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(6);
|
||||
|
||||
// Check specific foreign keys from location
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'country_id' &&
|
||||
r.targetTable === 'country'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'state_id' && r.targetTable === 'state'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'location_type_id' &&
|
||||
r.targetTable === 'location_type'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) => r.sourceColumn === 'city_id' && r.targetTable === 'city'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,395 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE heroes (
|
||||
hero_id INTEGER NOT NULL,
|
||||
hero_status CHARACTER VARYING DEFAULT 'questing',
|
||||
PRIMARY KEY (hero_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const statusColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'hero_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'questing'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with special characters that need escaping', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_scrolls (
|
||||
scroll_id INTEGER NOT NULL,
|
||||
incantation CHARACTER VARYING DEFAULT 'Dragon''s breath',
|
||||
rune_inscription TEXT DEFAULT 'Ancient rune
|
||||
Sacred symbol',
|
||||
PRIMARY KEY (scroll_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const incantationColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'incantation'
|
||||
);
|
||||
expect(incantationColumn?.default).toBe("'Dragon''s breath'");
|
||||
});
|
||||
|
||||
it('should parse elvish text default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE elven_greetings (
|
||||
greeting_id INTEGER NOT NULL,
|
||||
elvish_welcome CHARACTER VARYING DEFAULT 'Mae govannen',
|
||||
PRIMARY KEY (greeting_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const greetingColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'elvish_welcome'
|
||||
);
|
||||
expect(greetingColumn?.default).toBe("'Mae govannen'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon_hoards (
|
||||
hoard_id INTEGER NOT NULL,
|
||||
gold_pieces INTEGER DEFAULT 0,
|
||||
max_treasure_value INTEGER DEFAULT 10000,
|
||||
PRIMARY KEY (hoard_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const goldColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'gold_pieces'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('0');
|
||||
const treasureColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_treasure_value'
|
||||
);
|
||||
expect(treasureColumn?.default).toBe('10000');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_items (
|
||||
item_id INTEGER NOT NULL,
|
||||
market_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
magic_power_rating NUMERIC DEFAULT 0.85,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'market_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const powerColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'magic_power_rating'
|
||||
);
|
||||
expect(powerColumn?.default).toBe('0.85');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse boolean defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magical_artifacts (
|
||||
artifact_id INTEGER NOT NULL,
|
||||
is_cursed BOOLEAN DEFAULT TRUE,
|
||||
is_destroyed BOOLEAN DEFAULT FALSE,
|
||||
is_legendary BOOLEAN DEFAULT '1',
|
||||
is_identified BOOLEAN DEFAULT '0',
|
||||
PRIMARY KEY (artifact_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const cursedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_cursed'
|
||||
);
|
||||
expect(cursedColumn?.default).toBe('TRUE');
|
||||
const destroyedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_destroyed'
|
||||
);
|
||||
expect(destroyedColumn?.default).toBe('FALSE');
|
||||
const legendaryColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_legendary'
|
||||
);
|
||||
expect(legendaryColumn?.default).toBe("'1'");
|
||||
const identifiedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_identified'
|
||||
);
|
||||
expect(identifiedColumn?.default).toBe("'0'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('NULL Default Values', () => {
|
||||
it('should parse NULL defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizard_familiars (
|
||||
familiar_id INTEGER NOT NULL,
|
||||
special_ability CHARACTER VARYING DEFAULT NULL,
|
||||
PRIMARY KEY (familiar_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const abilityColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'special_ability'
|
||||
);
|
||||
expect(abilityColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Function Default Values', () => {
|
||||
it('should parse function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE quest_logs (
|
||||
quest_id UUID DEFAULT gen_random_uuid(),
|
||||
quest_started TIMESTAMP DEFAULT NOW(),
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
difficulty_roll INTEGER DEFAULT random()
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const questIdColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_id'
|
||||
);
|
||||
expect(questIdColumn?.default).toBe('GEN_RANDOM_UUID()');
|
||||
const startedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_started'
|
||||
);
|
||||
expect(startedColumn?.default).toBe('NOW()');
|
||||
const updatedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_updated'
|
||||
);
|
||||
expect(updatedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
const difficultyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'difficulty_roll'
|
||||
);
|
||||
expect(difficultyColumn?.default).toBe('RANDOM()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example', () => {
|
||||
it('should handle a complex guild management table correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "realm"(
|
||||
"realm_id" integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "guild"(
|
||||
"guild_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
PRIMARY KEY ("guild_id")
|
||||
);
|
||||
|
||||
CREATE TABLE "guild_schedule"(
|
||||
"schedule_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
PRIMARY KEY ("schedule_id")
|
||||
);
|
||||
|
||||
CREATE TABLE "guild_quests"(
|
||||
"is_active" CHARACTER VARYING NOT NULL DEFAULT 'active',
|
||||
"quest_description" CHARACTER VARYING,
|
||||
"quest_type" CHARACTER VARYING,
|
||||
"quest_status" CHARACTER VARYING DEFAULT 'pending',
|
||||
"quest_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
"reward_gold" CHARACTER VARYING,
|
||||
"quest_giver" CHARACTER VARYING,
|
||||
"party_size" CHARACTER VARYING,
|
||||
"difficulty_level" CHARACTER VARYING,
|
||||
"monster_type" CHARACTER VARYING,
|
||||
"dungeon_location" CHARACTER VARYING,
|
||||
"main_guild_ref" CHARACTER VARYING NOT NULL,
|
||||
"schedule_ref" CHARACTER VARYING,
|
||||
"last_attempt" CHARACTER VARYING,
|
||||
"max_attempts" INTEGER,
|
||||
"failed_attempts" INTEGER,
|
||||
"party_members" INTEGER,
|
||||
"loot_distributor" CHARACTER VARYING,
|
||||
"quest_validator" CHARACTER VARYING,
|
||||
"scout_report" CHARACTER VARYING,
|
||||
"completion_xp" INTEGER,
|
||||
"bonus_xp" INTEGER,
|
||||
"map_coordinates" CHARACTER VARYING,
|
||||
"quest_correlation" CHARACTER VARYING,
|
||||
"is_completed" BOOLEAN NOT NULL DEFAULT '0',
|
||||
"reward_items" CHARACTER VARYING,
|
||||
"quest_priority" INTEGER,
|
||||
"started_at" CHARACTER VARYING,
|
||||
"status" CHARACTER VARYING,
|
||||
"completed_at" CHARACTER VARYING,
|
||||
"party_level" INTEGER,
|
||||
"quest_master" CHARACTER VARYING,
|
||||
PRIMARY KEY ("quest_id"),
|
||||
FOREIGN KEY ("main_guild_ref") REFERENCES "guild"("guild_id"),
|
||||
FOREIGN KEY ("schedule_ref") REFERENCES "guild_schedule"("schedule_id")
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Find the guild_quests table
|
||||
const questTable = result.tables.find(
|
||||
(t) => t.name === 'guild_quests'
|
||||
);
|
||||
expect(questTable).toBeDefined();
|
||||
|
||||
// Check specific default values
|
||||
const activeColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'is_active'
|
||||
);
|
||||
expect(activeColumn?.default).toBe("'active'");
|
||||
|
||||
const statusColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'quest_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'pending'");
|
||||
|
||||
const completedColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'is_completed'
|
||||
);
|
||||
expect(completedColumn?.default).toBe("'0'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('ALTER TABLE ADD COLUMN with defaults', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN with default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE adventurers (
|
||||
adventurer_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (adventurer_id)
|
||||
);
|
||||
|
||||
ALTER TABLE adventurers ADD COLUMN class_type VARCHAR(50) DEFAULT 'warrior';
|
||||
ALTER TABLE adventurers ADD COLUMN experience_points INTEGER DEFAULT 0;
|
||||
ALTER TABLE adventurers ADD COLUMN is_guild_member BOOLEAN DEFAULT TRUE;
|
||||
ALTER TABLE adventurers ADD COLUMN joined_at TIMESTAMP DEFAULT NOW();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const classColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'class_type'
|
||||
);
|
||||
expect(classColumn?.default).toBe("'warrior'");
|
||||
|
||||
const xpColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'experience_points'
|
||||
);
|
||||
expect(xpColumn?.default).toBe('0');
|
||||
|
||||
const guildColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_guild_member'
|
||||
);
|
||||
expect(guildColumn?.default).toBe('TRUE');
|
||||
|
||||
const joinedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'joined_at'
|
||||
);
|
||||
expect(joinedColumn?.default).toBe('NOW()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Special Characters', () => {
|
||||
it('should handle defaults with parentheses in strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_formulas (
|
||||
formula_id INTEGER NOT NULL,
|
||||
damage_calculation VARCHAR DEFAULT '(strength + magic) * 2',
|
||||
mana_cost TEXT DEFAULT 'cast(level * 10 - wisdom)',
|
||||
PRIMARY KEY (formula_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const damageColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'damage_calculation'
|
||||
);
|
||||
expect(damageColumn?.default).toBe("'(strength + magic) * 2'");
|
||||
const manaColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'mana_cost'
|
||||
);
|
||||
expect(manaColumn?.default).toBe("'cast(level * 10 - wisdom)'");
|
||||
});
|
||||
|
||||
it('should handle defaults with JSON strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE item_enchantments (
|
||||
enchantment_id INTEGER NOT NULL,
|
||||
properties JSON DEFAULT '{"element": "fire"}',
|
||||
modifiers JSONB DEFAULT '[]',
|
||||
PRIMARY KEY (enchantment_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const propertiesColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'properties'
|
||||
);
|
||||
expect(propertiesColumn?.default).toBe(`'{"element": "fire"}'`);
|
||||
const modifiersColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'modifiers'
|
||||
);
|
||||
expect(modifiersColumn?.default).toBe("'[]'");
|
||||
});
|
||||
|
||||
it('should handle casting in defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_runes (
|
||||
rune_id INTEGER NOT NULL,
|
||||
rune_type VARCHAR DEFAULT 'healing'::text,
|
||||
PRIMARY KEY (rune_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const runeColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'rune_type'
|
||||
);
|
||||
expect(runeColumn?.default).toBe("'healing'::text");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Serial Types', () => {
|
||||
it('should not set default for SERIAL types as they auto-increment', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE monster_spawns (
|
||||
spawn_id SERIAL PRIMARY KEY,
|
||||
minion_id SMALLSERIAL,
|
||||
boss_id BIGSERIAL
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const spawnColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'spawn_id'
|
||||
);
|
||||
expect(spawnColumn?.default).toBeUndefined();
|
||||
expect(spawnColumn?.increment).toBe(true);
|
||||
|
||||
const minionColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'minion_id'
|
||||
);
|
||||
expect(minionColumn?.default).toBeUndefined();
|
||||
expect(minionColumn?.increment).toBe(true);
|
||||
|
||||
const bossColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'boss_id'
|
||||
);
|
||||
expect(bossColumn?.default).toBeUndefined();
|
||||
expect(bossColumn?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -91,7 +91,38 @@ export interface AlterTableExprItem {
|
||||
action: string;
|
||||
resource?: string;
|
||||
type?: string;
|
||||
keyword?: string;
|
||||
constraint?: { constraint_type?: string };
|
||||
// Properties for ADD COLUMN
|
||||
column?:
|
||||
| {
|
||||
column?:
|
||||
| {
|
||||
expr?: {
|
||||
value?: string;
|
||||
};
|
||||
}
|
||||
| string;
|
||||
}
|
||||
| string
|
||||
| ColumnReference;
|
||||
definition?: {
|
||||
dataType?: string;
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
suffix?: unknown[];
|
||||
nullable?: { type: string };
|
||||
unique?: string;
|
||||
primary_key?: string;
|
||||
constraint?: string;
|
||||
default_val?: unknown;
|
||||
auto_increment?: string;
|
||||
};
|
||||
nullable?: { type: string; value?: string };
|
||||
unique?: string;
|
||||
default_val?: unknown;
|
||||
// Properties for constraints
|
||||
create_definitions?:
|
||||
| AlterTableConstraintDefinition
|
||||
| {
|
||||
|
||||
@@ -7,6 +7,8 @@ import type {
|
||||
SQLForeignKey,
|
||||
SQLEnumType,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
@@ -347,13 +349,20 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
|
||||
// Try to extract column definition
|
||||
// Match: column_name TYPE[(params)][array]
|
||||
// Updated regex to handle complex types like GEOGRAPHY(POINT, 4326) and custom types like subscription_status
|
||||
const columnMatch = trimmedLine.match(
|
||||
/^\s*["']?(\w+)["']?\s+([\w_]+(?:\([^)]+\))?(?:\[\])?)/i
|
||||
);
|
||||
// First extract column name and everything after it
|
||||
const columnMatch = trimmedLine.match(/^\s*["']?(\w+)["']?\s+(.+)/i);
|
||||
if (columnMatch) {
|
||||
const columnName = columnMatch[1];
|
||||
let columnType = columnMatch[2];
|
||||
const restOfLine = columnMatch[2];
|
||||
|
||||
// Now extract the type from the rest of the line
|
||||
// Match type which could be multi-word (like CHARACTER VARYING) with optional params
|
||||
const typeMatch = restOfLine.match(
|
||||
/^((?:CHARACTER\s+VARYING|DOUBLE\s+PRECISION|[\w]+)(?:\([^)]+\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (!typeMatch) continue;
|
||||
let columnType = typeMatch[1].trim();
|
||||
|
||||
// Normalize PostGIS types
|
||||
if (columnType.toUpperCase().startsWith('GEOGRAPHY')) {
|
||||
@@ -380,7 +389,65 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
const isPrimary = trimmedLine.match(/PRIMARY\s+KEY/i) !== null;
|
||||
const isNotNull = trimmedLine.match(/NOT\s+NULL/i) !== null;
|
||||
const isUnique = trimmedLine.match(/\bUNIQUE\b/i) !== null;
|
||||
const hasDefault = trimmedLine.match(/DEFAULT\s+/i) !== null;
|
||||
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
// Updated regex to handle casting with :: operator
|
||||
const defaultMatch = trimmedLine.match(
|
||||
/DEFAULT\s+((?:'[^']*'|"[^"]*"|\S+)(?:::\w+)?)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
let defVal = defaultMatch[1].trim();
|
||||
// Remove trailing comma if present
|
||||
defVal = defVal.replace(/,$/, '').trim();
|
||||
// Handle string literals
|
||||
if (defVal.startsWith("'") && defVal.endsWith("'")) {
|
||||
// Keep the quotes for string literals
|
||||
defaultValue = defVal;
|
||||
} else if (defVal.match(/^\d+(\.\d+)?$/)) {
|
||||
// Numeric value
|
||||
defaultValue = defVal;
|
||||
} else if (
|
||||
defVal.toUpperCase() === 'TRUE' ||
|
||||
defVal.toUpperCase() === 'FALSE'
|
||||
) {
|
||||
// Boolean value
|
||||
defaultValue = defVal.toUpperCase();
|
||||
} else if (defVal.toUpperCase() === 'NULL') {
|
||||
// NULL value
|
||||
defaultValue = 'NULL';
|
||||
} else if (defVal.includes('(') && defVal.includes(')')) {
|
||||
// Function call (like gen_random_uuid())
|
||||
// Normalize PostgreSQL function names to uppercase
|
||||
const funcMatch = defVal.match(/^(\w+)\(/);
|
||||
if (funcMatch) {
|
||||
const funcName = funcMatch[1];
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (pgFunctions.includes(funcName.toLowerCase())) {
|
||||
defaultValue = defVal.replace(
|
||||
funcName,
|
||||
funcName.toUpperCase()
|
||||
);
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
// Other expressions
|
||||
defaultValue = defVal;
|
||||
}
|
||||
}
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
@@ -388,7 +455,7 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
nullable: !isNotNull && !isPrimary,
|
||||
primaryKey: isPrimary,
|
||||
unique: isUnique || isPrimary,
|
||||
default: hasDefault ? 'has default' : undefined,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isSerialType ||
|
||||
trimmedLine.includes('gen_random_uuid()') ||
|
||||
@@ -996,7 +1063,7 @@ export async function fromPostgres(
|
||||
}
|
||||
}
|
||||
|
||||
// Fourth pass: process ALTER TABLE statements for foreign keys
|
||||
// Fourth pass: process ALTER TABLE statements for foreign keys and ADD COLUMN
|
||||
for (const stmt of statements) {
|
||||
if (stmt.type === 'alter' && stmt.parsed) {
|
||||
const alterTableStmt = stmt.parsed as AlterTableStatement;
|
||||
@@ -1026,13 +1093,440 @@ export async function fromPostgres(
|
||||
);
|
||||
if (!table) continue;
|
||||
|
||||
// Process foreign key constraints in ALTER TABLE
|
||||
// Process ALTER TABLE expressions
|
||||
if (alterTableStmt.expr && Array.isArray(alterTableStmt.expr)) {
|
||||
alterTableStmt.expr.forEach((expr: AlterTableExprItem) => {
|
||||
if (expr.action === 'add' && expr.create_definitions) {
|
||||
// Handle ALTER COLUMN TYPE
|
||||
if (expr.action === 'alter' && expr.resource === 'column') {
|
||||
// Extract column name
|
||||
let columnName: string | undefined;
|
||||
if (
|
||||
typeof expr.column === 'object' &&
|
||||
'column' in expr.column
|
||||
) {
|
||||
const innerColumn = expr.column.column;
|
||||
if (
|
||||
typeof innerColumn === 'object' &&
|
||||
'expr' in innerColumn &&
|
||||
innerColumn.expr?.value
|
||||
) {
|
||||
columnName = innerColumn.expr.value;
|
||||
} else if (typeof innerColumn === 'string') {
|
||||
columnName = innerColumn;
|
||||
}
|
||||
} else if (typeof expr.column === 'string') {
|
||||
columnName = expr.column;
|
||||
}
|
||||
|
||||
// Check if it's a TYPE change
|
||||
if (
|
||||
columnName &&
|
||||
expr.type === 'alter' &&
|
||||
expr.definition?.dataType
|
||||
) {
|
||||
// Find the column in the table and update its type
|
||||
const column = table.columns.find(
|
||||
(col) => (col as SQLColumn).name === columnName
|
||||
);
|
||||
if (column) {
|
||||
const definition = expr.definition;
|
||||
const rawDataType = String(definition.dataType);
|
||||
|
||||
// console.log('ALTER TYPE expr:', JSON.stringify(expr, null, 2));
|
||||
|
||||
// Normalize the type
|
||||
let normalizedType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Handle type parameters
|
||||
if (
|
||||
definition.scale !== undefined &&
|
||||
definition.scale !== null
|
||||
) {
|
||||
// For NUMERIC/DECIMAL with scale, length is actually precision
|
||||
const precision =
|
||||
definition.length ||
|
||||
definition.precision;
|
||||
normalizedType = `${normalizedType}(${precision},${definition.scale})`;
|
||||
} else if (
|
||||
definition.length !== undefined &&
|
||||
definition.length !== null
|
||||
) {
|
||||
normalizedType = `${normalizedType}(${definition.length})`;
|
||||
} else if (definition.precision !== undefined) {
|
||||
normalizedType = `${normalizedType}(${definition.precision})`;
|
||||
} else if (
|
||||
definition.suffix &&
|
||||
Array.isArray(definition.suffix) &&
|
||||
definition.suffix.length > 0
|
||||
) {
|
||||
const params = definition.suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(s.value);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
normalizedType = `${normalizedType}(${params})`;
|
||||
}
|
||||
|
||||
// Update the column type
|
||||
(column as SQLColumn).type = normalizedType;
|
||||
|
||||
// Update typeArgs if applicable
|
||||
if (
|
||||
definition.scale !== undefined &&
|
||||
definition.scale !== null
|
||||
) {
|
||||
// For NUMERIC/DECIMAL with scale
|
||||
const precision =
|
||||
definition.length ||
|
||||
definition.precision;
|
||||
(column as SQLColumn).typeArgs = {
|
||||
precision: precision,
|
||||
scale: definition.scale,
|
||||
};
|
||||
} else if (definition.length) {
|
||||
(column as SQLColumn).typeArgs = {
|
||||
length: definition.length,
|
||||
};
|
||||
} else if (definition.precision) {
|
||||
(column as SQLColumn).typeArgs = {
|
||||
precision: definition.precision,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle ADD COLUMN
|
||||
} else if (
|
||||
expr.action === 'add' &&
|
||||
expr.resource === 'column'
|
||||
) {
|
||||
// Handle ADD COLUMN directly from expr structure
|
||||
// Extract column name from the nested structure
|
||||
let columnName: string | undefined;
|
||||
if (
|
||||
typeof expr.column === 'object' &&
|
||||
'column' in expr.column
|
||||
) {
|
||||
const innerColumn = expr.column.column;
|
||||
if (
|
||||
typeof innerColumn === 'object' &&
|
||||
'expr' in innerColumn &&
|
||||
innerColumn.expr?.value
|
||||
) {
|
||||
columnName = innerColumn.expr.value;
|
||||
} else if (typeof innerColumn === 'string') {
|
||||
columnName = innerColumn;
|
||||
}
|
||||
} else if (typeof expr.column === 'string') {
|
||||
columnName = expr.column;
|
||||
}
|
||||
|
||||
if (columnName && typeof columnName === 'string') {
|
||||
const definition = expr.definition || {};
|
||||
const rawDataType = String(
|
||||
definition?.dataType || 'TEXT'
|
||||
);
|
||||
// console.log('expr:', JSON.stringify(expr, null, 2));
|
||||
|
||||
// Normalize the type
|
||||
let normalizedBaseType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Check if it's a serial type
|
||||
const upperType = rawDataType.toUpperCase();
|
||||
const isSerialType = [
|
||||
'SERIAL',
|
||||
'SERIAL2',
|
||||
'SERIAL4',
|
||||
'SERIAL8',
|
||||
'BIGSERIAL',
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
if (isSerialType) {
|
||||
const typeLength = definition?.length as
|
||||
| number
|
||||
| undefined;
|
||||
if (upperType === 'SERIAL') {
|
||||
if (typeLength === 2) {
|
||||
normalizedBaseType = 'SMALLINT';
|
||||
} else if (typeLength === 8) {
|
||||
normalizedBaseType = 'BIGINT';
|
||||
} else {
|
||||
normalizedBaseType = 'INTEGER';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type parameters
|
||||
let finalDataType = normalizedBaseType;
|
||||
const isNormalizedIntegerType =
|
||||
['INTEGER', 'BIGINT', 'SMALLINT'].includes(
|
||||
normalizedBaseType
|
||||
) &&
|
||||
(upperType === 'INT' || upperType === 'SERIAL');
|
||||
|
||||
if (!isSerialType && !isNormalizedIntegerType) {
|
||||
const precision = definition?.precision;
|
||||
const scale = definition?.scale;
|
||||
const length = definition?.length;
|
||||
const suffix =
|
||||
(definition?.suffix as unknown[]) || [];
|
||||
|
||||
if (suffix.length > 0) {
|
||||
const params = suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(
|
||||
(s as { value: unknown })
|
||||
.value
|
||||
);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
finalDataType = `${normalizedBaseType}(${params})`;
|
||||
} else if (precision !== undefined) {
|
||||
if (scale !== undefined) {
|
||||
finalDataType = `${normalizedBaseType}(${precision},${scale})`;
|
||||
} else {
|
||||
finalDataType = `${normalizedBaseType}(${precision})`;
|
||||
}
|
||||
} else if (
|
||||
length !== undefined &&
|
||||
length !== null
|
||||
) {
|
||||
finalDataType = `${normalizedBaseType}(${length})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for nullable constraint
|
||||
let nullable = true;
|
||||
if (isSerialType) {
|
||||
nullable = false;
|
||||
} else if (
|
||||
expr.nullable &&
|
||||
expr.nullable.type === 'not null'
|
||||
) {
|
||||
nullable = false;
|
||||
} else if (
|
||||
definition?.nullable &&
|
||||
definition.nullable.type === 'not null'
|
||||
) {
|
||||
nullable = false;
|
||||
}
|
||||
|
||||
// Check for unique constraint
|
||||
const isUnique =
|
||||
expr.unique === 'unique' ||
|
||||
definition?.unique === 'unique';
|
||||
|
||||
// Check for default value
|
||||
let defaultValue: string | undefined;
|
||||
const defaultVal =
|
||||
expr.default_val || definition?.default_val;
|
||||
if (defaultVal && !isSerialType) {
|
||||
// Create a temporary columnDef to use the getDefaultValueString function
|
||||
const tempColumnDef = {
|
||||
default_val: defaultVal,
|
||||
} as ColumnDefinition;
|
||||
defaultValue =
|
||||
getDefaultValueString(tempColumnDef);
|
||||
}
|
||||
|
||||
// Create the new column object
|
||||
const newColumn: SQLColumn = {
|
||||
name: columnName,
|
||||
type: finalDataType,
|
||||
nullable: nullable,
|
||||
primaryKey:
|
||||
definition?.primary_key === 'primary key' ||
|
||||
definition?.constraint === 'primary key' ||
|
||||
isSerialType,
|
||||
unique: isUnique,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isSerialType ||
|
||||
definition?.auto_increment ===
|
||||
'auto_increment' ||
|
||||
(stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('GENERATED') &&
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
};
|
||||
|
||||
// Add the column to the table if it doesn't already exist
|
||||
const tableColumns = table.columns as SQLColumn[];
|
||||
if (
|
||||
!tableColumns.some(
|
||||
(col) => col.name === columnName
|
||||
)
|
||||
) {
|
||||
tableColumns.push(newColumn);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
expr.action === 'add' &&
|
||||
expr.create_definitions
|
||||
) {
|
||||
const createDefs = expr.create_definitions;
|
||||
|
||||
if (
|
||||
// Check if it's adding a column (legacy structure)
|
||||
if (createDefs.resource === 'column') {
|
||||
const columnDef =
|
||||
createDefs as unknown as ColumnDefinition;
|
||||
const columnName = extractColumnName(
|
||||
columnDef.column
|
||||
);
|
||||
|
||||
if (columnName) {
|
||||
// Extract the column type and properties
|
||||
const definition =
|
||||
columnDef.definition as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
const rawDataType = String(
|
||||
definition?.dataType || 'TEXT'
|
||||
);
|
||||
|
||||
// Normalize the type
|
||||
let normalizedBaseType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Check if it's a serial type
|
||||
const upperType = rawDataType.toUpperCase();
|
||||
const isSerialType = [
|
||||
'SERIAL',
|
||||
'SERIAL2',
|
||||
'SERIAL4',
|
||||
'SERIAL8',
|
||||
'BIGSERIAL',
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
if (isSerialType) {
|
||||
const typeLength = definition?.length as
|
||||
| number
|
||||
| undefined;
|
||||
if (upperType === 'SERIAL') {
|
||||
if (typeLength === 2) {
|
||||
normalizedBaseType = 'SMALLINT';
|
||||
} else if (typeLength === 8) {
|
||||
normalizedBaseType = 'BIGINT';
|
||||
} else {
|
||||
normalizedBaseType = 'INTEGER';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type parameters
|
||||
let finalDataType = normalizedBaseType;
|
||||
const isNormalizedIntegerType =
|
||||
['INTEGER', 'BIGINT', 'SMALLINT'].includes(
|
||||
normalizedBaseType
|
||||
) &&
|
||||
(upperType === 'INT' ||
|
||||
upperType === 'SERIAL');
|
||||
|
||||
if (!isSerialType && !isNormalizedIntegerType) {
|
||||
const precision =
|
||||
columnDef.definition?.precision;
|
||||
const scale = columnDef.definition?.scale;
|
||||
const length = columnDef.definition?.length;
|
||||
const suffix =
|
||||
(definition?.suffix as unknown[]) || [];
|
||||
|
||||
if (suffix.length > 0) {
|
||||
const params = suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(
|
||||
(
|
||||
s as {
|
||||
value: unknown;
|
||||
}
|
||||
).value
|
||||
);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
finalDataType = `${normalizedBaseType}(${params})`;
|
||||
} else if (precision !== undefined) {
|
||||
if (scale !== undefined) {
|
||||
finalDataType = `${normalizedBaseType}(${precision},${scale})`;
|
||||
} else {
|
||||
finalDataType = `${normalizedBaseType}(${precision})`;
|
||||
}
|
||||
} else if (
|
||||
length !== undefined &&
|
||||
length !== null
|
||||
) {
|
||||
finalDataType = `${normalizedBaseType}(${length})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Create the new column object
|
||||
const newColumn: SQLColumn = {
|
||||
name: columnName,
|
||||
type: finalDataType,
|
||||
nullable: isSerialType
|
||||
? false
|
||||
: columnDef.nullable?.type !==
|
||||
'not null',
|
||||
primaryKey:
|
||||
columnDef.primary_key ===
|
||||
'primary key' ||
|
||||
columnDef.definition?.constraint ===
|
||||
'primary key' ||
|
||||
isSerialType,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(columnDef.definition),
|
||||
default: isSerialType
|
||||
? undefined
|
||||
: getDefaultValueString(columnDef),
|
||||
increment:
|
||||
isSerialType ||
|
||||
columnDef.auto_increment ===
|
||||
'auto_increment' ||
|
||||
(stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('GENERATED') &&
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
};
|
||||
|
||||
// Add the column to the table if it doesn't already exist
|
||||
const tableColumns2 =
|
||||
table.columns as SQLColumn[];
|
||||
if (
|
||||
!tableColumns2.some(
|
||||
(col) => col.name === columnName
|
||||
)
|
||||
) {
|
||||
tableColumns2.push(newColumn);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
createDefs.constraint_type === 'FOREIGN KEY' ||
|
||||
createDefs.constraint_type === 'foreign key'
|
||||
) {
|
||||
@@ -1143,6 +1637,171 @@ export async function fromPostgres(
|
||||
}
|
||||
} else if (stmt.type === 'alter' && !stmt.parsed) {
|
||||
// Handle ALTER TABLE statements that failed to parse
|
||||
|
||||
// First try to extract ALTER COLUMN TYPE statements
|
||||
const alterTypeMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ALTER\s+COLUMN\s+(?:"([^"]+)"|([^"\s]+))\s+TYPE\s+([\w_]+(?:\([^)]*\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (alterTypeMatch) {
|
||||
const schemaName =
|
||||
alterTypeMatch[1] || alterTypeMatch[2] || 'public';
|
||||
const tableName = alterTypeMatch[3] || alterTypeMatch[4];
|
||||
const columnName = alterTypeMatch[5] || alterTypeMatch[6];
|
||||
let columnType = alterTypeMatch[7];
|
||||
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
if (table && columnName) {
|
||||
const column = (table.columns as SQLColumn[]).find(
|
||||
(col) => col.name === columnName
|
||||
);
|
||||
if (column) {
|
||||
// Normalize and update the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
column.type = columnType;
|
||||
|
||||
// Extract and update typeArgs if present
|
||||
const typeMatch = columnType.match(
|
||||
/^(\w+)(?:\(([^)]+)\))?$/
|
||||
);
|
||||
if (typeMatch && typeMatch[2]) {
|
||||
const params = typeMatch[2]
|
||||
.split(',')
|
||||
.map((p) => p.trim());
|
||||
if (params.length === 1) {
|
||||
column.typeArgs = {
|
||||
length: parseInt(params[0]),
|
||||
};
|
||||
} else if (params.length === 2) {
|
||||
column.typeArgs = {
|
||||
precision: parseInt(params[0]),
|
||||
scale: parseInt(params[1]),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then try to extract ADD COLUMN statements
|
||||
const alterColumnMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ADD\s+COLUMN\s+(?:"([^"]+)"|([^"\s]+))\s+([\w_]+(?:\([^)]*\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (alterColumnMatch) {
|
||||
const schemaName =
|
||||
alterColumnMatch[1] || alterColumnMatch[2] || 'public';
|
||||
const tableName = alterColumnMatch[3] || alterColumnMatch[4];
|
||||
const columnName = alterColumnMatch[5] || alterColumnMatch[6];
|
||||
let columnType = alterColumnMatch[7];
|
||||
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
if (table && columnName) {
|
||||
const tableColumns = table.columns as SQLColumn[];
|
||||
if (!tableColumns.some((col) => col.name === columnName)) {
|
||||
// Normalize the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
|
||||
// Check for constraints in the statement
|
||||
const columnDefPart = stmt.sql.substring(
|
||||
stmt.sql.indexOf(columnName)
|
||||
);
|
||||
const isPrimary =
|
||||
columnDefPart.match(/PRIMARY\s+KEY/i) !== null;
|
||||
const isNotNull =
|
||||
columnDefPart.match(/NOT\s+NULL/i) !== null;
|
||||
const isUnique =
|
||||
columnDefPart.match(/\bUNIQUE\b/i) !== null;
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
// Updated regex to handle casting with :: operator
|
||||
const defaultMatch = columnDefPart.match(
|
||||
/DEFAULT\s+((?:'[^']*'|"[^"]*"|\S+)(?:::\w+)?)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
let defVal = defaultMatch[1].trim();
|
||||
// Remove trailing comma or semicolon if present
|
||||
defVal = defVal.replace(/[,;]$/, '').trim();
|
||||
// Handle string literals
|
||||
if (
|
||||
defVal.startsWith("'") &&
|
||||
defVal.endsWith("'")
|
||||
) {
|
||||
// Keep the quotes for string literals
|
||||
defaultValue = defVal;
|
||||
} else if (defVal.match(/^\d+(\.\d+)?$/)) {
|
||||
// Numeric value
|
||||
defaultValue = defVal;
|
||||
} else if (
|
||||
defVal.toUpperCase() === 'TRUE' ||
|
||||
defVal.toUpperCase() === 'FALSE'
|
||||
) {
|
||||
// Boolean value
|
||||
defaultValue = defVal.toUpperCase();
|
||||
} else if (defVal.toUpperCase() === 'NULL') {
|
||||
// NULL value
|
||||
defaultValue = 'NULL';
|
||||
} else if (
|
||||
defVal.includes('(') &&
|
||||
defVal.includes(')')
|
||||
) {
|
||||
// Function call
|
||||
// Normalize PostgreSQL function names to uppercase
|
||||
const funcMatch = defVal.match(/^(\w+)\(/);
|
||||
if (funcMatch) {
|
||||
const funcName = funcMatch[1];
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (
|
||||
pgFunctions.includes(
|
||||
funcName.toLowerCase()
|
||||
)
|
||||
) {
|
||||
defaultValue = defVal.replace(
|
||||
funcName,
|
||||
funcName.toUpperCase()
|
||||
);
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
// Other expressions
|
||||
defaultValue = defVal;
|
||||
}
|
||||
}
|
||||
|
||||
tableColumns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull && !isPrimary,
|
||||
primaryKey: isPrimary,
|
||||
unique: isUnique || isPrimary,
|
||||
default: defaultValue,
|
||||
increment: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract foreign keys using regex as fallback
|
||||
// Updated regex to handle quoted identifiers properly
|
||||
const alterFKMatch = stmt.sql.match(
|
||||
@@ -1293,58 +1952,10 @@ export async function fromPostgres(
|
||||
function getDefaultValueString(
|
||||
columnDef: ColumnDefinition
|
||||
): string | undefined {
|
||||
let defVal = columnDef.default_val;
|
||||
|
||||
if (
|
||||
defVal &&
|
||||
typeof defVal === 'object' &&
|
||||
defVal.type === 'default' &&
|
||||
'value' in defVal
|
||||
) {
|
||||
defVal = defVal.value;
|
||||
}
|
||||
const defVal = columnDef.default_val;
|
||||
|
||||
if (defVal === undefined || defVal === null) return undefined;
|
||||
|
||||
let value: string | undefined;
|
||||
|
||||
switch (typeof defVal) {
|
||||
case 'string':
|
||||
value = defVal;
|
||||
break;
|
||||
case 'number':
|
||||
value = String(defVal);
|
||||
break;
|
||||
case 'boolean':
|
||||
value = defVal ? 'TRUE' : 'FALSE';
|
||||
break;
|
||||
case 'object':
|
||||
if ('value' in defVal && typeof defVal.value === 'string') {
|
||||
value = defVal.value;
|
||||
} else if ('raw' in defVal && typeof defVal.raw === 'string') {
|
||||
value = defVal.raw;
|
||||
} else if (defVal.type === 'bool') {
|
||||
value = defVal.value ? 'TRUE' : 'FALSE';
|
||||
} else if (defVal.type === 'function' && defVal.name) {
|
||||
const fnName = defVal.name;
|
||||
if (
|
||||
fnName &&
|
||||
typeof fnName === 'object' &&
|
||||
Array.isArray(fnName.name) &&
|
||||
fnName.name.length > 0 &&
|
||||
fnName.name[0].value
|
||||
) {
|
||||
value = fnName.name[0].value.toUpperCase();
|
||||
} else if (typeof fnName === 'string') {
|
||||
value = fnName.toUpperCase();
|
||||
} else {
|
||||
value = 'UNKNOWN_FUNCTION';
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
value = undefined;
|
||||
}
|
||||
|
||||
return value;
|
||||
// Use buildSQLFromAST to reconstruct the default value
|
||||
return buildSQLFromAST(defVal, DatabaseType.POSTGRESQL);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,252 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE kingdom_citizens (
|
||||
citizen_id INT NOT NULL,
|
||||
allegiance NVARCHAR(50) DEFAULT 'neutral',
|
||||
PRIMARY KEY (citizen_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const allegianceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'allegiance'
|
||||
);
|
||||
expect(allegianceColumn?.default).toBe("'neutral'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with Unicode prefix', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_scrolls (
|
||||
scroll_id INT NOT NULL,
|
||||
runic_inscription NVARCHAR(255) DEFAULT N'Ancient wisdom',
|
||||
prophecy NVARCHAR(MAX) DEFAULT N'The chosen one shall rise',
|
||||
PRIMARY KEY (scroll_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const runicColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'runic_inscription'
|
||||
);
|
||||
expect(runicColumn?.default).toBe("N'Ancient wisdom'");
|
||||
const prophecyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'prophecy'
|
||||
);
|
||||
expect(prophecyColumn?.default).toBe(
|
||||
"N'The chosen one shall rise'"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE castle_treasury (
|
||||
treasury_id INT NOT NULL,
|
||||
gold_count INT DEFAULT 0,
|
||||
max_capacity BIGINT DEFAULT 100000,
|
||||
guard_posts SMALLINT DEFAULT 5,
|
||||
PRIMARY KEY (treasury_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const goldColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'gold_count'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('0');
|
||||
const capacityColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_capacity'
|
||||
);
|
||||
expect(capacityColumn?.default).toBe('100000');
|
||||
const guardColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'guard_posts'
|
||||
);
|
||||
expect(guardColumn?.default).toBe('5');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE blacksmith_shop (
|
||||
item_id INT NOT NULL,
|
||||
weapon_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
guild_discount FLOAT DEFAULT 0.15,
|
||||
enchantment_tax NUMERIC(5, 4) DEFAULT 0.0825,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'weapon_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const discountColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'guild_discount'
|
||||
);
|
||||
expect(discountColumn?.default).toBe('0.15');
|
||||
const taxColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'enchantment_tax'
|
||||
);
|
||||
expect(taxColumn?.default).toBe('0.0825');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse BIT defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magic_barriers (
|
||||
barrier_id INT NOT NULL,
|
||||
is_active BIT DEFAULT 1,
|
||||
is_breached BIT DEFAULT 0,
|
||||
PRIMARY KEY (barrier_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const activeColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_active'
|
||||
);
|
||||
expect(activeColumn?.default).toBe('1');
|
||||
const breachedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_breached'
|
||||
);
|
||||
expect(breachedColumn?.default).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Date and Time Default Values', () => {
|
||||
it('should parse date/time function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE battle_logs (
|
||||
battle_id INT NOT NULL,
|
||||
battle_started DATETIME DEFAULT GETDATE(),
|
||||
last_action DATETIME2 DEFAULT SYSDATETIME(),
|
||||
battle_date DATE DEFAULT GETDATE(),
|
||||
PRIMARY KEY (battle_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const startedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'battle_started'
|
||||
);
|
||||
expect(startedColumn?.default).toBe('GETDATE()');
|
||||
const actionColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_action'
|
||||
);
|
||||
expect(actionColumn?.default).toBe('SYSDATETIME()');
|
||||
const dateColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'battle_date'
|
||||
);
|
||||
expect(dateColumn?.default).toBe('GETDATE()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('IDENTITY columns', () => {
|
||||
it('should handle IDENTITY columns correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE legendary_weapons (
|
||||
weapon_id INT IDENTITY(1,1) NOT NULL,
|
||||
legacy_id BIGINT IDENTITY(100,10) NOT NULL,
|
||||
weapon_name NVARCHAR(100),
|
||||
PRIMARY KEY (weapon_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const weaponColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'weapon_id'
|
||||
);
|
||||
expect(weaponColumn?.increment).toBe(true);
|
||||
const legacyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'legacy_id'
|
||||
);
|
||||
expect(legacyColumn?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example with Schema', () => {
|
||||
it('should handle complex table with schema and multiple default types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[QuestContracts] (
|
||||
[ContractID] INT IDENTITY(1,1) NOT NULL,
|
||||
[AdventurerID] INT NOT NULL,
|
||||
[QuestDate] DATETIME DEFAULT GETDATE(),
|
||||
[QuestStatus] NVARCHAR(20) DEFAULT N'Available',
|
||||
[RewardAmount] DECIMAL(10, 2) DEFAULT 0.00,
|
||||
[IsCompleted] BIT DEFAULT 0,
|
||||
[CompletedDate] DATETIME NULL,
|
||||
[QuestNotes] NVARCHAR(MAX) DEFAULT NULL,
|
||||
[DifficultyLevel] INT DEFAULT 5,
|
||||
[QuestGuid] UNIQUEIDENTIFIER DEFAULT NEWID(),
|
||||
PRIMARY KEY ([ContractID])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
const table = result.tables[0];
|
||||
expect(table).toBeDefined();
|
||||
expect(table.schema).toBe('dbo');
|
||||
|
||||
// Check various default values
|
||||
const questDateColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestDate'
|
||||
);
|
||||
expect(questDateColumn?.default).toBe('GETDATE()');
|
||||
|
||||
const statusColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestStatus'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("N'Available'");
|
||||
|
||||
const rewardColumn = table.columns.find(
|
||||
(c) => c.name === 'RewardAmount'
|
||||
);
|
||||
expect(rewardColumn?.default).toBe('0.00');
|
||||
|
||||
const completedColumn = table.columns.find(
|
||||
(c) => c.name === 'IsCompleted'
|
||||
);
|
||||
expect(completedColumn?.default).toBe('0');
|
||||
|
||||
const difficultyColumn = table.columns.find(
|
||||
(c) => c.name === 'DifficultyLevel'
|
||||
);
|
||||
expect(difficultyColumn?.default).toBe('5');
|
||||
|
||||
const guidColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestGuid'
|
||||
);
|
||||
expect(guidColumn?.default).toBe('NEWID()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Expressions in defaults', () => {
|
||||
it('should handle parentheses in default expressions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_calculations (
|
||||
calculation_id INT NOT NULL,
|
||||
base_damage INT DEFAULT (10 + 5),
|
||||
total_power DECIMAL(10,2) DEFAULT ((100.0 * 0.15) + 10),
|
||||
PRIMARY KEY (calculation_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const damageColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'base_damage'
|
||||
);
|
||||
expect(damageColumn?.default).toBe('(10 + 5)');
|
||||
const powerColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'total_power'
|
||||
);
|
||||
expect(powerColumn?.default).toBe('((100.0 * 0.15) + 10)');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Complex Fantasy Case', () => {
|
||||
it('should parse complex SQL with SpellDefinition and SpellComponent tables', async () => {
|
||||
// Complex SQL with same structure as user's case but fantasy-themed
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
console.log('Testing complex fantasy SQL...');
|
||||
console.log(
|
||||
'Number of CREATE TABLE statements:',
|
||||
(sql.match(/CREATE\s+TABLE/gi) || []).length
|
||||
);
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
console.log(
|
||||
'Result tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Result relationships:', result.relationships.length);
|
||||
|
||||
// Debug: Show actual relationships
|
||||
if (result.relationships.length === 0) {
|
||||
console.log('WARNING: No relationships found!');
|
||||
} else {
|
||||
console.log('Relationships found:');
|
||||
result.relationships.forEach((r) => {
|
||||
console.log(
|
||||
` ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Should create TWO tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const spellDef = result.tables.find(
|
||||
(t) => t.name === 'SpellDefinition'
|
||||
);
|
||||
expect(spellDef).toBeDefined();
|
||||
expect(spellDef?.schema).toBe('DBO');
|
||||
expect(spellDef?.columns).toHaveLength(6);
|
||||
|
||||
// Check second table
|
||||
const spellComp = result.tables.find(
|
||||
(t) => t.name === 'SpellComponent'
|
||||
);
|
||||
expect(spellComp).toBeDefined();
|
||||
expect(spellComp?.schema).toBe('DBO');
|
||||
expect(spellComp?.columns).toHaveLength(6);
|
||||
|
||||
// Check foreign key relationships (should have at least 2)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Check FK from SpellDefinition to SpellComponent
|
||||
const fkDefToComp = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellDefinition' &&
|
||||
r.targetTable === 'SpellComponent' &&
|
||||
r.sourceColumn === 'itscomponentrel'
|
||||
);
|
||||
expect(fkDefToComp).toBeDefined();
|
||||
expect(fkDefToComp?.targetColumn).toBe('SPELLID');
|
||||
|
||||
// Check self-referential FK in SpellComponent
|
||||
const selfRefFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellComponent' &&
|
||||
r.targetTable === 'SpellComponent' &&
|
||||
r.sourceColumn === 'itsparentcomp'
|
||||
);
|
||||
expect(selfRefFK).toBeDefined();
|
||||
expect(selfRefFK?.targetColumn).toBe('SPELLID');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,102 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { sqlImportToDiagram } from '../../../index';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('SQL Server Full Import Flow', () => {
|
||||
it('should create relationships when importing through the full flow', async () => {
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
// Test the full import flow as the application uses it
|
||||
const diagram = await sqlImportToDiagram({
|
||||
sqlContent: sql,
|
||||
sourceDatabaseType: DatabaseType.SQL_SERVER,
|
||||
targetDatabaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
// Verify tables
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
const tableNames = diagram.tables?.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['SpellComponent', 'SpellDefinition']);
|
||||
|
||||
// Verify relationships are created in the diagram
|
||||
expect(diagram.relationships).toBeDefined();
|
||||
expect(diagram.relationships?.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Check specific relationships
|
||||
const fk1 = diagram.relationships?.find(
|
||||
(r) =>
|
||||
r.sourceFieldId &&
|
||||
r.targetFieldId && // Must have field IDs
|
||||
diagram.tables?.some(
|
||||
(t) =>
|
||||
t.id === r.sourceTableId && t.name === 'SpellDefinition'
|
||||
)
|
||||
);
|
||||
expect(fk1).toBeDefined();
|
||||
|
||||
const fk2 = diagram.relationships?.find(
|
||||
(r) =>
|
||||
r.sourceFieldId &&
|
||||
r.targetFieldId && // Must have field IDs
|
||||
diagram.tables?.some(
|
||||
(t) =>
|
||||
t.id === r.sourceTableId &&
|
||||
t.name === 'SpellComponent' &&
|
||||
t.id === r.targetTableId // self-reference
|
||||
)
|
||||
);
|
||||
expect(fk2).toBeDefined();
|
||||
|
||||
console.log(
|
||||
'Full flow test - Relationships created:',
|
||||
diagram.relationships?.length
|
||||
);
|
||||
diagram.relationships?.forEach((r) => {
|
||||
const sourceTable = diagram.tables?.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = diagram.tables?.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
console.log(
|
||||
` ${sourceTable?.name}.${sourceField?.name} -> ${targetTable?.name}.${targetField?.name}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle case-insensitive field matching', async () => {
|
||||
const sql = `CREATE TABLE DragonLair (
|
||||
[LAIRID] INT PRIMARY KEY,
|
||||
[parentLairId] INT, FOREIGN KEY (PARENTLAIRID) REFERENCES DragonLair(lairid)
|
||||
)`;
|
||||
|
||||
const diagram = await sqlImportToDiagram({
|
||||
sqlContent: sql,
|
||||
sourceDatabaseType: DatabaseType.SQL_SERVER,
|
||||
targetDatabaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
// Should create the self-referential relationship despite case differences
|
||||
expect(diagram.relationships?.length).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,132 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Multiple Tables with Foreign Keys', () => {
|
||||
it('should parse multiple tables with foreign keys in user format', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [DBO].[QuestReward](
|
||||
[BOID] (VARCHAR)(32),
|
||||
[HASEXTRACOL] BOOLEAN,
|
||||
[REWARDCODE] [VARCHAR](128),
|
||||
[REWARDFIX] BOOLEAN,
|
||||
[ITSQUESTREL] [VARCHAR](32), FOREIGN KEY (itsquestrel) REFERENCES QuestRelation(BOID),
|
||||
[SHOWDETAILS] BOOLEAN,
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[QuestRelation](
|
||||
[ALIAS] CHAR (50),
|
||||
[BOID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTREL] [VARCHAR](32), FOREIGN KEY (itsparentrel) REFERENCES QuestRelation(BOID),
|
||||
[ITSGUILDMETA] [VARCHAR](32), FOREIGN KEY (itsguildmeta) REFERENCES GuildMeta(BOID),
|
||||
[KEYATTR] CHAR (100),
|
||||
) ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should create both tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const questReward = result.tables.find((t) => t.name === 'QuestReward');
|
||||
expect(questReward).toBeDefined();
|
||||
expect(questReward?.schema).toBe('DBO');
|
||||
expect(questReward?.columns).toHaveLength(6);
|
||||
|
||||
// Check second table
|
||||
const questRelation = result.tables.find(
|
||||
(t) => t.name === 'QuestRelation'
|
||||
);
|
||||
expect(questRelation).toBeDefined();
|
||||
expect(questRelation?.schema).toBe('DBO');
|
||||
expect(questRelation?.columns).toHaveLength(6);
|
||||
|
||||
// Check foreign key relationships
|
||||
expect(result.relationships).toHaveLength(2); // Should have 2 FKs (one self-referential in QuestRelation, one from QuestReward to QuestRelation)
|
||||
|
||||
// Check FK from QuestReward to QuestRelation
|
||||
const fkToRelation = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'QuestReward' &&
|
||||
r.targetTable === 'QuestRelation'
|
||||
);
|
||||
expect(fkToRelation).toBeDefined();
|
||||
expect(fkToRelation?.sourceColumn).toBe('itsquestrel');
|
||||
expect(fkToRelation?.targetColumn).toBe('BOID');
|
||||
|
||||
// Check self-referential FK in QuestRelation
|
||||
const selfRefFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'QuestRelation' &&
|
||||
r.targetTable === 'QuestRelation' &&
|
||||
r.sourceColumn === 'itsparentrel'
|
||||
);
|
||||
expect(selfRefFK).toBeDefined();
|
||||
expect(selfRefFK?.targetColumn).toBe('BOID');
|
||||
});
|
||||
|
||||
it('should parse multiple tables with circular dependencies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [DBO].[Dragon](
|
||||
[DRAGONID] (VARCHAR)(32),
|
||||
[NAME] [VARCHAR](100),
|
||||
[ITSLAIRREL] [VARCHAR](32), FOREIGN KEY (itslairrel) REFERENCES DragonLair(LAIRID),
|
||||
[POWER] INT,
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[DragonLair](
|
||||
[LAIRID] (VARCHAR)(32),
|
||||
[LOCATION] [VARCHAR](200),
|
||||
[ITSGUARDIAN] [VARCHAR](32), FOREIGN KEY (itsguardian) REFERENCES Dragon(DRAGONID),
|
||||
[TREASURES] INT,
|
||||
) ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should create both tables despite circular dependency
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
const dragon = result.tables.find((t) => t.name === 'Dragon');
|
||||
expect(dragon).toBeDefined();
|
||||
|
||||
const dragonLair = result.tables.find((t) => t.name === 'DragonLair');
|
||||
expect(dragonLair).toBeDefined();
|
||||
|
||||
// Check foreign key relationships (may have one or both depending on parser behavior with circular deps)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle exact user input format', async () => {
|
||||
// Exact copy of the user's input with fantasy theme
|
||||
const sql = `CREATE TABLE [DBO].[WizardDef](
|
||||
[BOID] (VARCHAR)(32),
|
||||
[HASEXTRACNTCOL] BOOLEAN,
|
||||
[HISTORYCD] [VARCHAR](128),
|
||||
[HISTORYCDFIX] BOOLEAN,
|
||||
[ITSADWIZARDREL] [VARCHAR](32), FOREIGN KEY (itsadwizardrel) REFERENCES WizardRel(BOID),
|
||||
[SHOWDETAILS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[WizardRel](
|
||||
[ALIAS] CHAR (50),
|
||||
[BOID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSARWIZARDREL] [VARCHAR](32), FOREIGN KEY (itsarwizardrel) REFERENCES WizardRel(BOID),
|
||||
[ITSARMETABO] [VARCHAR](32), FOREIGN KEY (itsarmetabo) REFERENCES MetaBO(BOID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// This should create TWO tables, not just one
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
const wizardDef = result.tables.find((t) => t.name === 'WizardDef');
|
||||
expect(wizardDef).toBeDefined();
|
||||
expect(wizardDef?.columns).toHaveLength(6);
|
||||
|
||||
const wizardRel = result.tables.find((t) => t.name === 'WizardRel');
|
||||
expect(wizardRel).toBeDefined();
|
||||
expect(wizardRel?.columns).toHaveLength(6);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server FK Verification', () => {
|
||||
it('should correctly parse FKs from complex fantasy SQL', async () => {
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Verify tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'SpellComponent',
|
||||
'SpellDefinition',
|
||||
]);
|
||||
|
||||
// Verify that FKs were found (even if MagicSchool doesn't exist)
|
||||
// The parsing should find 3 FKs initially, but linkRelationships will filter out the one to MagicSchool
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Verify specific FKs that should exist
|
||||
const fk1 = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellDefinition' &&
|
||||
r.sourceColumn.toLowerCase() === 'itscomponentrel' &&
|
||||
r.targetTable === 'SpellComponent'
|
||||
);
|
||||
expect(fk1).toBeDefined();
|
||||
expect(fk1?.targetColumn).toBe('SPELLID');
|
||||
expect(fk1?.sourceTableId).toBeTruthy();
|
||||
expect(fk1?.targetTableId).toBeTruthy();
|
||||
|
||||
const fk2 = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellComponent' &&
|
||||
r.sourceColumn.toLowerCase() === 'itsparentcomp' &&
|
||||
r.targetTable === 'SpellComponent'
|
||||
);
|
||||
expect(fk2).toBeDefined();
|
||||
expect(fk2?.targetColumn).toBe('SPELLID');
|
||||
expect(fk2?.sourceTableId).toBeTruthy();
|
||||
expect(fk2?.targetTableId).toBeTruthy();
|
||||
|
||||
// Log for debugging
|
||||
console.log('\n=== FK Verification Results ===');
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => `${t.schema}.${t.name}`)
|
||||
);
|
||||
console.log('Total FKs found:', result.relationships.length);
|
||||
result.relationships.forEach((r, i) => {
|
||||
console.log(
|
||||
`FK ${i + 1}: ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
console.log(` IDs: ${r.sourceTableId} -> ${r.targetTableId}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse inline FOREIGN KEY syntax correctly', async () => {
|
||||
// Simplified test with just one FK to ensure parsing works
|
||||
const sql = `CREATE TABLE [DBO].[WizardTower](
|
||||
[TOWERID] INT,
|
||||
[MASTERKEY] [VARCHAR](32), FOREIGN KEY (MASTERKEY) REFERENCES ArcaneGuild(GUILDID),
|
||||
[NAME] VARCHAR(100)
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[ArcaneGuild](
|
||||
[GUILDID] [VARCHAR](32),
|
||||
[GUILDNAME] VARCHAR(100)
|
||||
) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceColumn).toBe('MASTERKEY');
|
||||
expect(result.relationships[0].targetColumn).toBe('GUILDID');
|
||||
});
|
||||
});
|
||||
@@ -342,6 +342,35 @@ function parseCreateTableManually(
|
||||
|
||||
// Process each part (column or constraint)
|
||||
for (const part of parts) {
|
||||
// Handle standalone FOREIGN KEY definitions (without CONSTRAINT keyword)
|
||||
// Format: FOREIGN KEY (column) REFERENCES Table(column)
|
||||
if (part.match(/^\s*FOREIGN\s+KEY/i)) {
|
||||
const fkMatch = part.match(
|
||||
/FOREIGN\s+KEY\s*\(([^)]+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkMatch) {
|
||||
const [
|
||||
,
|
||||
sourceCol,
|
||||
targetSchema = 'dbo',
|
||||
targetTable,
|
||||
targetCol,
|
||||
] = fkMatch;
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${sourceCol.trim().replace(/\[|\]/g, '')}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: sourceCol.trim().replace(/\[|\]/g, ''),
|
||||
targetTable: targetTable || targetSchema,
|
||||
targetSchema: targetTable ? targetSchema : 'dbo',
|
||||
targetColumn: targetCol.trim().replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle constraint definitions
|
||||
if (part.match(/^\s*CONSTRAINT/i)) {
|
||||
// Parse constraints
|
||||
@@ -435,6 +464,13 @@ function parseCreateTableManually(
|
||||
columnMatch = part.match(/^\s*(\w+)\s+(\w+)\s+([\d,\s]+)\s+(.*)$/i);
|
||||
}
|
||||
|
||||
// Handle unusual format: [COLUMN_NAME] (VARCHAR)(32)
|
||||
if (!columnMatch) {
|
||||
columnMatch = part.match(
|
||||
/^\s*\[?(\w+)\]?\s+\((\w+)\)\s*\(([\d,\s]+|max)\)(.*)$/i
|
||||
);
|
||||
}
|
||||
|
||||
if (columnMatch) {
|
||||
const [, colName, baseType, typeArgs, rest] = columnMatch;
|
||||
|
||||
@@ -446,7 +482,37 @@ function parseCreateTableManually(
|
||||
const inlineFkMatch = rest.match(
|
||||
/FOREIGN\s+KEY\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (inlineFkMatch) {
|
||||
|
||||
// Also check if there's a FOREIGN KEY after a comma with column name
|
||||
// Format: , FOREIGN KEY (columnname) REFERENCES Table(column)
|
||||
if (!inlineFkMatch && rest.includes('FOREIGN KEY')) {
|
||||
const fkWithColumnMatch = rest.match(
|
||||
/,\s*FOREIGN\s+KEY\s*\((\w+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkWithColumnMatch) {
|
||||
const [, srcCol, targetSchema, targetTable, targetCol] =
|
||||
fkWithColumnMatch;
|
||||
// Only process if srcCol matches current colName (case-insensitive)
|
||||
if (srcCol.toLowerCase() === colName.toLowerCase()) {
|
||||
// Create FK relationship
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${colName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: colName,
|
||||
targetTable: targetTable || targetSchema,
|
||||
targetSchema: targetTable
|
||||
? targetSchema || 'dbo'
|
||||
: 'dbo',
|
||||
targetColumn: targetCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (inlineFkMatch) {
|
||||
const [, targetSchema = 'dbo', targetTable, targetCol] =
|
||||
inlineFkMatch;
|
||||
relationships.push({
|
||||
@@ -536,10 +602,36 @@ export async function fromSQLServer(
|
||||
try {
|
||||
// First, handle ALTER TABLE statements for foreign keys
|
||||
// Split by GO or semicolon for SQL Server
|
||||
const statements = sqlContent
|
||||
let statements = sqlContent
|
||||
.split(/(?:GO\s*$|;\s*$)/im)
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
|
||||
// Additional splitting for CREATE TABLE statements that might not be separated by semicolons
|
||||
// If we have a statement with multiple CREATE TABLE, split them
|
||||
const expandedStatements: string[] = [];
|
||||
for (const stmt of statements) {
|
||||
// Check if this statement contains multiple CREATE TABLE statements
|
||||
if ((stmt.match(/CREATE\s+TABLE/gi) || []).length > 1) {
|
||||
// Split by ") ON [PRIMARY]" followed by CREATE TABLE
|
||||
const parts = stmt.split(
|
||||
/\)\s*ON\s*\[PRIMARY\]\s*(?=CREATE\s+TABLE)/gi
|
||||
);
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
let part = parts[i].trim();
|
||||
// Re-add ") ON [PRIMARY]" to all parts except the last (which should already have it)
|
||||
if (i < parts.length - 1 && part.length > 0) {
|
||||
part += ') ON [PRIMARY]';
|
||||
}
|
||||
if (part.trim().length > 0) {
|
||||
expandedStatements.push(part);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
expandedStatements.push(stmt);
|
||||
}
|
||||
}
|
||||
statements = expandedStatements;
|
||||
|
||||
const alterTableStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().includes('ALTER TABLE') &&
|
||||
|
||||
7273
src/lib/dbml/dbml-export/__tests__/cases/1.dbml
Normal file
7273
src/lib/dbml/dbml-export/__tests__/cases/1.dbml
Normal file
File diff suppressed because it is too large
Load Diff
73546
src/lib/dbml/dbml-export/__tests__/cases/1.json
Normal file
73546
src/lib/dbml/dbml-export/__tests__/cases/1.json
Normal file
File diff suppressed because it is too large
Load Diff
17
src/lib/dbml/dbml-export/__tests__/cases/2.dbml
Normal file
17
src/lib/dbml/dbml-export/__tests__/cases/2.dbml
Normal file
@@ -0,0 +1,17 @@
|
||||
Table "bruit"."100-AAB-CABAS-Mdap" {
|
||||
"qgs_fid" int [pk, not null]
|
||||
"geom" geometry
|
||||
"from" decimal(8,2)
|
||||
"to" decimal(8,2)
|
||||
"period" nvarchar(500)
|
||||
"objectid" float
|
||||
"insee" float
|
||||
"nom" nvarchar(500)
|
||||
"code_posta" int
|
||||
"ut" nvarchar(500)
|
||||
"territoire" nvarchar(500)
|
||||
"surface" float
|
||||
"perimetre" float
|
||||
"ccodter" float
|
||||
"numcom" nvarchar(500)
|
||||
}
|
||||
1
src/lib/dbml/dbml-export/__tests__/cases/2.json
Normal file
1
src/lib/dbml/dbml-export/__tests__/cases/2.json
Normal file
@@ -0,0 +1 @@
|
||||
{"id":"mqqwkkodtkfm","name":"NTP_CANPT-db","createdAt":"2025-08-27T17:03:48.994Z","updatedAt":"2025-08-27T17:12:54.617Z","databaseType":"sql_server","tables":[{"id":"e4qecug35j4b7q75u1j3sdca5","name":"100-AAB-CABAS-Mdap","schema":"bruit","x":100,"y":100,"fields":[{"id":"04liixxb8yenudc6gqjjbgm1r","name":"qgs_fid","type":{"id":"int","name":"int"},"primaryKey":true,"unique":true,"nullable":false,"createdAt":1739267036546},{"id":"hr29n1e1jgybuac3gcerk7jyi","name":"geom","type":{"id":"geometry","name":"geometry"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"jcqh683op52ovfwqwe0w0i2or","name":"from","type":{"id":"decimal","name":"decimal"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"precision":8,"scale":2},{"id":"xev33ok0oqqom2n1tabpp5eds","name":"to","type":{"id":"decimal","name":"decimal"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"precision":8,"scale":2},{"id":"pj36qhdpl0vice9tsyiaaef4l","name":"period","type":{"id":"nvarchar","name":"nvarchar"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"collation":"French_CI_AS"},{"id":"l4ce4a68j9h7l46p8dg5qi09u","name":"objectid","type":{"id":"float","name":"float"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"fi4s2aahfjdeelfkgnrk4q5mk","name":"insee","type":{"id":"float","name":"float"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"ujsajf0t5xg0td614lpxk32py","name":"nom","type":{"id":"nvarchar","name":"nvarchar"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"collation":"French_CI_AS"},{"id":"9j0c54ez2t5dgr0ybzd0ksbuz","name":"code_posta","type":{"id":"int","name":"int"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"gybxvu42odvvjyfoe9zdn7tul","name":"ut","type":{"id":"nvarchar","name":"nvarchar"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"collation":"French_CI_AS"},{"id":"qon7xs001v9q8frad6jr9lrho","name":"territoire","type":{"id":"nvarchar","name":"nvarchar"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"collation":"French_CI_AS"},{"id":"aeqrfvw5dvig7t8zyjfiri707","name":"surface","type":{"id":"float","name":"float"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"eqbcy7gfd49a3a6ds6ne6fmzd","name":"perimetre","type":{"id":"float","name":"float"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"cbxmodo9l3keqxapqnlfjnqy2","name":"ccodter","type":{"id":"float","name":"float"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546},{"id":"c3j131aycof5kgyiypva428l3","name":"numcom","type":{"id":"nvarchar","name":"nvarchar"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1739267036546,"collation":"French_CI_AS"}],"indexes":[],"color":"#8a61f5","isView":false,"isMaterializedView":false,"createdAt":1739267036546,"diagramId":"mqqwkkodtkfm","expanded":true}],"relationships":[],"dependencies":[],"areas":[],"customTypes":[]}
|
||||
8
src/lib/dbml/dbml-export/__tests__/cases/3.dbml
Normal file
8
src/lib/dbml/dbml-export/__tests__/cases/3.dbml
Normal file
@@ -0,0 +1,8 @@
|
||||
Table "public"."guy_table" {
|
||||
"id" integer [pk, not null]
|
||||
"created_at" timestamp [not null]
|
||||
"column3" text
|
||||
"arrayfield" text[]
|
||||
"field_5" "character varying"
|
||||
"field_6" "character varying(100)"
|
||||
}
|
||||
1
src/lib/dbml/dbml-export/__tests__/cases/3.json
Normal file
1
src/lib/dbml/dbml-export/__tests__/cases/3.json
Normal file
@@ -0,0 +1 @@
|
||||
{"id":"mqqwkkod7trl","name":"guy-db","databaseType":"postgresql","createdAt":"2025-09-10T18:45:32.817Z","updatedAt":"2025-09-10T19:15:21.682Z","tables":[{"id":"g2hv9mlo3qbyjnxdc44j1zxl2","name":"guy_table","schema":"public","x":100,"y":300,"fields":[{"id":"qdqgzmtxsi84ujfuktsvjuop8","name":"id","type":{"id":"integer","name":"integer"},"primaryKey":true,"unique":true,"nullable":false,"createdAt":1757529932816},{"id":"wsys99f86679ch6fbjryw0egr","name":"created_at","type":{"id":"timestamp_without_time_zone","name":"timestamp without time zone"},"primaryKey":false,"unique":false,"nullable":false,"createdAt":1757529932816},{"id":"ro39cba7sd290k90qjgzib8pi","name":"column3","type":{"id":"text","name":"text"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1757529932816},{"id":"6cntbu2orwk7kxlg0rcduqgbo","name":"arrayfield","type":{"id":"array","name":"array"},"primaryKey":false,"unique":false,"nullable":true,"createdAt":1757529932816},{"id":"7cz0ybdoov2m3wbgm9tlzatz0","name":"field_5","type":{"id":"character_varying","name":"character varying"},"unique":false,"nullable":true,"primaryKey":false,"createdAt":1757531685981},{"id":"zzwlyvqzz93oh0vv8f8qob103","name":"field_6","type":{"id":"character_varying","name":"character varying"},"unique":false,"nullable":true,"primaryKey":false,"createdAt":1757531713961,"characterMaximumLength":"100"}],"indexes":[{"id":"r0w71lnbnje2j9cz1t9j64rya","name":"guy_table_pkey","unique":true,"fieldIds":["qdqgzmtxsi84ujfuktsvjuop8"],"createdAt":1757529932816,"isPrimaryKey":true}],"color":"#8eb7ff","isView":false,"isMaterializedView":false,"createdAt":1757529932816,"diagramId":"mqqwkkod7trl"}],"relationships":[],"dependencies":[],"areas":[],"customTypes":[]}
|
||||
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { diagramFromJSONInput } from '@/lib/export-import-utils';
|
||||
import { generateDBMLFromDiagram } from '../dbml-export';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
describe('DBML Export - Diagram Case 1 Tests', () => {
|
||||
it('should handle case 1 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '1.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '1.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
});
|
||||
|
||||
it('should handle case 2 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '2.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '2.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
});
|
||||
|
||||
it('should handle case 3 diagram', { timeout: 30000 }, async () => {
|
||||
// Read the JSON file
|
||||
const jsonPath = path.join(__dirname, 'cases', '3.json');
|
||||
const jsonContent = fs.readFileSync(jsonPath, 'utf-8');
|
||||
|
||||
// Parse the JSON and convert to diagram
|
||||
const diagram = diagramFromJSONInput(jsonContent);
|
||||
|
||||
// Generate DBML from the diagram
|
||||
const result = generateDBMLFromDiagram(diagram);
|
||||
const generatedDBML = result.standardDbml;
|
||||
|
||||
// Read the expected DBML file
|
||||
const dbmlPath = path.join(__dirname, 'cases', '3.dbml');
|
||||
const expectedDBML = fs.readFileSync(dbmlPath, 'utf-8');
|
||||
|
||||
// Compare the generated DBML with the expected DBML
|
||||
expect(generatedDBML).toBe(expectedDBML);
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { importer } from '@dbml/core';
|
||||
import { exportBaseSQL } from '@/lib/data/export-metadata/export-sql-script';
|
||||
import { exportBaseSQL } from '@/lib/data/sql-export/export-sql-script';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
@@ -596,6 +596,13 @@ const normalizeCharTypeFormat = (dbml: string): string => {
|
||||
.replace(/character \(([0-9]+)\)/g, 'character($1)');
|
||||
};
|
||||
|
||||
// Fix array types that are incorrectly quoted by DBML importer
|
||||
const fixArrayTypes = (dbml: string): string => {
|
||||
// Remove quotes around array types like "text[]" -> text[]
|
||||
// Matches patterns like: "fieldname" "type[]" and replaces with "fieldname" type[]
|
||||
return dbml.replace(/(\s+"[^"]+"\s+)"([^"\s]+\[\])"/g, '$1$2');
|
||||
};
|
||||
|
||||
// Fix table definitions with incorrect bracket syntax
|
||||
const fixTableBracketSyntax = (dbml: string): string => {
|
||||
// Fix patterns like Table [schema].[table] to Table "schema"."table"
|
||||
@@ -985,12 +992,14 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
);
|
||||
}
|
||||
|
||||
standard = normalizeCharTypeFormat(
|
||||
fixMultilineTableNames(
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
standard = fixArrayTypes(
|
||||
normalizeCharTypeFormat(
|
||||
fixMultilineTableNames(
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -1007,7 +1016,9 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
}
|
||||
|
||||
inline = normalizeCharTypeFormat(convertToInlineRefs(standard));
|
||||
inline = fixArrayTypes(
|
||||
normalizeCharTypeFormat(convertToInlineRefs(standard))
|
||||
);
|
||||
|
||||
// Clean up excessive empty lines in both outputs
|
||||
standard = standard.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importDBMLToDiagram } from '../dbml-import';
|
||||
import { exportPostgreSQL } from '@/lib/data/export-metadata/export-per-type/postgresql';
|
||||
import { exportMySQL } from '@/lib/data/export-metadata/export-per-type/mysql';
|
||||
import { exportMSSQL } from '@/lib/data/export-metadata/export-per-type/mssql';
|
||||
import { exportPostgreSQL } from '@/lib/data/sql-export/export-per-type/postgresql';
|
||||
import { exportMySQL } from '@/lib/data/sql-export/export-per-type/mysql';
|
||||
import { exportMSSQL } from '@/lib/data/sql-export/export-per-type/mssql';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Composite Primary Key with Name', () => {
|
||||
|
||||
@@ -154,7 +154,7 @@ Note note_1750185617764 {
|
||||
|
||||
// Should not throw
|
||||
const parser = new Parser();
|
||||
expect(() => parser.parse(sanitized, 'dbml')).not.toThrow();
|
||||
expect(() => parser.parse(sanitized, 'dbmlv2')).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { CompilerError } from '@dbml/core/types/parse/error';
|
||||
|
||||
export interface DBMLError {
|
||||
message: string;
|
||||
line: number;
|
||||
@@ -9,28 +11,12 @@ export function parseDBMLError(error: unknown): DBMLError | null {
|
||||
if (typeof error === 'string') {
|
||||
const parsed = JSON.parse(error);
|
||||
if (parsed.diags?.[0]) {
|
||||
const diag = parsed.diags[0];
|
||||
|
||||
return {
|
||||
message: diag.message,
|
||||
line: diag.location.start.line,
|
||||
column: diag.location.start.column,
|
||||
};
|
||||
const parsedError = parsed as CompilerError;
|
||||
return getFirstErrorFromCompileError(parsedError);
|
||||
}
|
||||
} else if (error && typeof error === 'object' && 'diags' in error) {
|
||||
const parsed = error as {
|
||||
diags: Array<{
|
||||
message: string;
|
||||
location: { start: { line: number; column: number } };
|
||||
}>;
|
||||
};
|
||||
if (parsed.diags?.[0]) {
|
||||
return {
|
||||
message: parsed.diags[0].message,
|
||||
line: parsed.diags[0].location.start.line,
|
||||
column: parsed.diags[0].location.start.column,
|
||||
};
|
||||
}
|
||||
const parsed = error as CompilerError;
|
||||
return getFirstErrorFromCompileError(parsed);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing DBML error:', e);
|
||||
@@ -38,3 +24,25 @@ export function parseDBMLError(error: unknown): DBMLError | null {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const getFirstErrorFromCompileError = (
|
||||
error: CompilerError
|
||||
): DBMLError | null => {
|
||||
const diags = (error.diags ?? []).sort((a, b) => {
|
||||
if (a.location.start.line === b.location.start.line) {
|
||||
return a.location.start.column - b.location.start.column;
|
||||
}
|
||||
return a.location.start.line - b.location.start.line;
|
||||
});
|
||||
|
||||
if (diags.length > 0) {
|
||||
const firstDiag = diags[0];
|
||||
return {
|
||||
message: firstDiag.message,
|
||||
line: firstDiag.location.start.line,
|
||||
column: firstDiag.location.start.column,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
@@ -223,7 +223,7 @@ export const importDBMLToDiagram = async (
|
||||
};
|
||||
}
|
||||
|
||||
const parsedData = parser.parse(sanitizedContent, 'dbml');
|
||||
const parsedData = parser.parse(sanitizedContent, 'dbmlv2');
|
||||
|
||||
// Handle case where no schemas are found
|
||||
if (!parsedData.schemas || parsedData.schemas.length === 0) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createTablesFromMetadata } from '../db-table';
|
||||
import { createTablesFromMetadata } from '@/lib/data/import-metadata/import/tables';
|
||||
import { DatabaseType } from '../database-type';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import { z } from 'zod';
|
||||
import type { DBCustomTypeInfo } from '@/lib/data/import-metadata/metadata-types/custom-type-info';
|
||||
import { generateId } from '../utils';
|
||||
import { schemaNameToDomainSchemaName } from './db-schema';
|
||||
|
||||
export enum DBCustomTypeKind {
|
||||
enum = 'enum',
|
||||
@@ -38,23 +35,6 @@ export const dbCustomTypeSchema: z.ZodType<DBCustomType> = z.object({
|
||||
order: z.number().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createCustomTypesFromMetadata = ({
|
||||
customTypes,
|
||||
}: {
|
||||
customTypes: DBCustomTypeInfo[];
|
||||
}): DBCustomType[] => {
|
||||
return customTypes.map((customType) => {
|
||||
return {
|
||||
id: generateId(),
|
||||
schema: schemaNameToDomainSchemaName(customType.schema),
|
||||
name: customType.type,
|
||||
kind: customType.kind as DBCustomTypeKind,
|
||||
values: customType.values,
|
||||
fields: customType.fields,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const customTypeKindToLabel: Record<DBCustomTypeKind, string> = {
|
||||
enum: 'Enum',
|
||||
composite: 'Composite',
|
||||
|
||||
@@ -1,10 +1,4 @@
|
||||
import { z } from 'zod';
|
||||
import type { ViewInfo } from '../data/import-metadata/metadata-types/view-info';
|
||||
import { DatabaseType } from './database-type';
|
||||
import { schemaNameToDomainSchemaName } from './db-schema';
|
||||
import { decodeViewDefinition, type DBTable } from './db-table';
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type { AST } from 'node-sql-parser';
|
||||
|
||||
export interface DBDependency {
|
||||
id: string;
|
||||
@@ -23,348 +17,3 @@ export const dbDependencySchema: z.ZodType<DBDependency> = z.object({
|
||||
dependentTableId: z.string(),
|
||||
createdAt: z.number(),
|
||||
});
|
||||
|
||||
const astDatabaseTypes: Record<DatabaseType, string> = {
|
||||
[DatabaseType.POSTGRESQL]: 'postgresql',
|
||||
[DatabaseType.MYSQL]: 'postgresql',
|
||||
[DatabaseType.MARIADB]: 'postgresql',
|
||||
[DatabaseType.GENERIC]: 'postgresql',
|
||||
[DatabaseType.SQLITE]: 'postgresql',
|
||||
[DatabaseType.SQL_SERVER]: 'postgresql',
|
||||
[DatabaseType.CLICKHOUSE]: 'postgresql',
|
||||
[DatabaseType.COCKROACHDB]: 'postgresql',
|
||||
[DatabaseType.ORACLE]: 'postgresql',
|
||||
};
|
||||
|
||||
export const createDependenciesFromMetadata = async ({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
}: {
|
||||
views: ViewInfo[];
|
||||
tables: DBTable[];
|
||||
databaseType: DatabaseType;
|
||||
}): Promise<DBDependency[]> => {
|
||||
if (!views || views.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
|
||||
const dependencies = views
|
||||
.flatMap((view) => {
|
||||
const viewSchema = schemaNameToDomainSchemaName(view.schema);
|
||||
const viewTable = tables.find(
|
||||
(table) =>
|
||||
table.name === view.view_name && viewSchema === table.schema
|
||||
);
|
||||
|
||||
if (!viewTable) {
|
||||
console.warn(
|
||||
`Source table for view ${view.view_name} not found (schema: ${viewSchema})`
|
||||
);
|
||||
return []; // Skip this view and proceed to the next
|
||||
}
|
||||
|
||||
if (view.view_definition) {
|
||||
try {
|
||||
const decodedViewDefinition = decodeViewDefinition(
|
||||
databaseType,
|
||||
view.view_definition
|
||||
);
|
||||
|
||||
let modifiedViewDefinition = '';
|
||||
if (
|
||||
databaseType === DatabaseType.MYSQL ||
|
||||
databaseType === DatabaseType.MARIADB
|
||||
) {
|
||||
modifiedViewDefinition = preprocessViewDefinitionMySQL(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
modifiedViewDefinition =
|
||||
preprocessViewDefinitionSQLServer(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else {
|
||||
modifiedViewDefinition = preprocessViewDefinition(
|
||||
decodedViewDefinition
|
||||
);
|
||||
}
|
||||
|
||||
// Parse using the appropriate dialect
|
||||
const ast = parser.astify(modifiedViewDefinition, {
|
||||
database: astDatabaseTypes[databaseType],
|
||||
type: 'select', // Parsing a SELECT statement
|
||||
});
|
||||
|
||||
let relatedTables = extractTablesFromAST(ast);
|
||||
|
||||
// Filter out duplicate tables without schema
|
||||
relatedTables = filterDuplicateTables(relatedTables);
|
||||
|
||||
return relatedTables.map((relTable) => {
|
||||
const relSchema = relTable.schema || view.schema; // Use view's schema if relSchema is undefined
|
||||
const relTableName = relTable.tableName;
|
||||
|
||||
const table = tables.find(
|
||||
(table) =>
|
||||
table.name === relTableName &&
|
||||
(table.schema || '') === relSchema
|
||||
);
|
||||
|
||||
if (table) {
|
||||
const dependency: DBDependency = {
|
||||
id: generateId(),
|
||||
schema: view.schema,
|
||||
tableId: table.id, // related table
|
||||
dependentSchema: table.schema,
|
||||
dependentTableId: viewTable.id, // dependent view
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
return dependency;
|
||||
} else {
|
||||
console.warn(
|
||||
`Dependent table ${relSchema}.${relTableName} not found for view ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error parsing view ${view.schema}.${view.view_name}:`,
|
||||
error
|
||||
);
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
console.warn(
|
||||
`View definition missing for ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
})
|
||||
.filter((dependency) => dependency !== null);
|
||||
|
||||
return dependencies;
|
||||
};
|
||||
|
||||
// Add this new function to filter out duplicate tables
|
||||
function filterDuplicateTables(
|
||||
tables: { schema?: string; tableName: string }[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tableMap = new Map<string, { schema?: string; tableName: string }>();
|
||||
|
||||
for (const table of tables) {
|
||||
const key = table.tableName;
|
||||
const existingTable = tableMap.get(key);
|
||||
|
||||
if (!existingTable || (table.schema && !existingTable.schema)) {
|
||||
tableMap.set(key, table);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(tableMap.values());
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinition(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove leading and trailing whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Replace escaped double quotes with regular ones
|
||||
viewDefinition = viewDefinition.replace(/\\"/g, '"');
|
||||
|
||||
// Replace 'CREATE MATERIALIZED VIEW' with 'CREATE VIEW'
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/CREATE\s+MATERIALIZED\s+VIEW/i,
|
||||
'CREATE VIEW'
|
||||
);
|
||||
|
||||
// Regular expression to match 'CREATE VIEW [schema.]view_name [ (column definitions) ] AS'
|
||||
// This regex captures the view name and skips any content between the view name and 'AS'
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:(?:`[^`]+`|"[^"]+"|\w+)\.)?(?:`([^`]+)`|"([^"]+)"|(\w+))[\s\S]*?\bAS\b\s+/i;
|
||||
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[1] || match[2] || match[3];
|
||||
// Extract the SQL after the 'AS' keyword
|
||||
const restOfDefinition = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Replace double-quoted identifiers with unquoted ones
|
||||
let modifiedSQL = restOfDefinition.replace(/"(\w+)"/g, '$1');
|
||||
|
||||
// Replace '::' type casts with 'CAST' expressions
|
||||
modifiedSQL = modifiedSQL.replace(
|
||||
/\(([^()]+)\)::(\w+)/g,
|
||||
'CAST($1 AS $2)'
|
||||
);
|
||||
|
||||
// Remove ClickHouse-specific syntax that may still be present
|
||||
// For example, remove SETTINGS clauses inside the SELECT statement
|
||||
modifiedSQL = modifiedSQL.replace(/\bSETTINGS\b[\s\S]*$/i, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW ${viewName} AS ${modifiedSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition for SQL Server
|
||||
function preprocessViewDefinitionSQLServer(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove BOM if present
|
||||
viewDefinition = viewDefinition.replace(/^\uFEFF/, '');
|
||||
|
||||
// Normalize whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Remove square brackets and replace with double quotes
|
||||
viewDefinition = viewDefinition.replace(/\[([^\]]+)\]/g, '"$1"');
|
||||
|
||||
// Remove database names from fully qualified identifiers
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/"([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"/g,
|
||||
'"$2"."$3"'
|
||||
);
|
||||
|
||||
// Replace SQL Server functions with PostgreSQL equivalents
|
||||
viewDefinition = viewDefinition.replace(/\bGETDATE\(\)/gi, 'NOW()');
|
||||
viewDefinition = viewDefinition.replace(/\bISNULL\(/gi, 'COALESCE(');
|
||||
|
||||
// Replace 'TOP N' with 'LIMIT N' at the end of the query
|
||||
const topMatch = viewDefinition.match(/SELECT\s+TOP\s+(\d+)/i);
|
||||
if (topMatch) {
|
||||
const topN = topMatch[1];
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/SELECT\s+TOP\s+\d+/i,
|
||||
'SELECT'
|
||||
);
|
||||
viewDefinition = viewDefinition.replace(/;+\s*$/, ''); // Remove semicolons at the end
|
||||
viewDefinition += ` LIMIT ${topN}`;
|
||||
}
|
||||
|
||||
viewDefinition = viewDefinition.replace(/\n/g, ''); // Remove newlines
|
||||
|
||||
// Adjust CREATE VIEW syntax
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:"?([^".\s]+)"?\.)?"?([^".\s]+)"?\s+AS\s+/i;
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[2];
|
||||
const modifiedSQL = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Remove semicolons at the end
|
||||
const finalSQL = modifiedSQL.replace(/;+\s*$/, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW "${viewName}" AS ${finalSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinitionMySQL(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove any trailing semicolons
|
||||
viewDefinition = viewDefinition.replace(/;\s*$/, '');
|
||||
|
||||
// Remove backticks from identifiers
|
||||
viewDefinition = viewDefinition.replace(/`/g, '');
|
||||
|
||||
// Remove unnecessary parentheses around joins and ON clauses
|
||||
viewDefinition = removeRedundantParentheses(viewDefinition);
|
||||
|
||||
return viewDefinition;
|
||||
}
|
||||
|
||||
function removeRedundantParentheses(sql: string): string {
|
||||
// Regular expressions to match unnecessary parentheses
|
||||
const patterns = [
|
||||
/\(\s*(JOIN\s+[^()]+?)\s*\)/gi,
|
||||
/\(\s*(ON\s+[^()]+?)\s*\)/gi,
|
||||
// Additional patterns if necessary
|
||||
];
|
||||
|
||||
let prevSql;
|
||||
do {
|
||||
prevSql = sql;
|
||||
patterns.forEach((pattern) => {
|
||||
sql = sql.replace(pattern, '$1');
|
||||
});
|
||||
} while (sql !== prevSql);
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
function extractTablesFromAST(
|
||||
ast: AST | AST[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tablesMap = new Map<string, { schema: string; tableName: string }>();
|
||||
const visitedNodes = new Set();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function traverse(node: any) {
|
||||
if (!node || visitedNodes.has(node)) return;
|
||||
visitedNodes.add(node);
|
||||
|
||||
if (Array.isArray(node)) {
|
||||
node.forEach(traverse);
|
||||
} else if (typeof node === 'object') {
|
||||
// Check if node represents a table
|
||||
if (
|
||||
Object.hasOwnProperty.call(node, 'table') &&
|
||||
typeof node.table === 'string'
|
||||
) {
|
||||
let schema = node.db || node.schema;
|
||||
const tableName = node.table;
|
||||
if (tableName) {
|
||||
// Assign default schema if undefined
|
||||
schema = schemaNameToDomainSchemaName(schema) || '';
|
||||
const key = `${schema}.${tableName}`;
|
||||
if (!tablesMap.has(key)) {
|
||||
tablesMap.set(key, { schema, tableName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively traverse all properties
|
||||
for (const key in node) {
|
||||
if (Object.hasOwnProperty.call(node, key)) {
|
||||
traverse(node[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
traverse(ast);
|
||||
|
||||
return Array.from(tablesMap.values());
|
||||
}
|
||||
|
||||
@@ -4,11 +4,6 @@ import {
|
||||
findDataTypeDataById,
|
||||
type DataType,
|
||||
} from '../data/data-types/data-types';
|
||||
import type { ColumnInfo } from '../data/import-metadata/metadata-types/column-info';
|
||||
import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types/index-info';
|
||||
import type { PrimaryKeyInfo } from '../data/import-metadata/metadata-types/primary-key-info';
|
||||
import type { TableInfo } from '../data/import-metadata/metadata-types/table-info';
|
||||
import { generateId } from '../utils';
|
||||
import type { DatabaseType } from './database-type';
|
||||
|
||||
export interface DBField {
|
||||
@@ -45,64 +40,6 @@ export const dbFieldSchema: z.ZodType<DBField> = z.object({
|
||||
comments: z.string().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createFieldsFromMetadata = ({
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
aggregatedIndexes,
|
||||
}: {
|
||||
tableColumns: ColumnInfo[];
|
||||
tableSchema?: string;
|
||||
tableInfo: TableInfo;
|
||||
tablePrimaryKeys: PrimaryKeyInfo[];
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
}) => {
|
||||
const uniqueColumns = tableColumns.reduce((acc, col) => {
|
||||
if (!acc.has(col.name)) {
|
||||
acc.set(col.name, col);
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, ColumnInfo>());
|
||||
|
||||
const sortedColumns = Array.from(uniqueColumns.values()).sort(
|
||||
(a, b) => a.ordinal_position - b.ordinal_position
|
||||
);
|
||||
|
||||
const tablePrimaryKeysColumns = tablePrimaryKeys.map((pk) =>
|
||||
pk.column.trim()
|
||||
);
|
||||
|
||||
return sortedColumns.map(
|
||||
(col: ColumnInfo): DBField => ({
|
||||
id: generateId(),
|
||||
name: col.name,
|
||||
type: {
|
||||
id: col.type.split(' ').join('_').toLowerCase(),
|
||||
name: col.type.toLowerCase(),
|
||||
},
|
||||
primaryKey: tablePrimaryKeysColumns.includes(col.name),
|
||||
unique: Object.values(aggregatedIndexes).some(
|
||||
(idx) =>
|
||||
idx.unique &&
|
||||
idx.columns.length === 1 &&
|
||||
idx.columns[0].name === col.name
|
||||
),
|
||||
nullable: Boolean(col.nullable),
|
||||
...(col.character_maximum_length &&
|
||||
col.character_maximum_length !== 'null'
|
||||
? { characterMaximumLength: col.character_maximum_length }
|
||||
: {}),
|
||||
...(col.precision?.precision
|
||||
? { precision: col.precision.precision }
|
||||
: {}),
|
||||
...(col.precision?.scale ? { scale: col.precision.scale } : {}),
|
||||
...(col.default ? { default: col.default } : {}),
|
||||
...(col.collation ? { collation: col.collation } : {}),
|
||||
createdAt: Date.now(),
|
||||
comments: col.comment ? col.comment : undefined,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const generateDBFieldSuffix = (
|
||||
field: DBField,
|
||||
{
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types/index-info';
|
||||
import { generateId } from '../utils';
|
||||
import type { DBField } from './db-field';
|
||||
import { DatabaseType } from './database-type';
|
||||
import type { DBTable } from './db-table';
|
||||
|
||||
@@ -43,27 +41,6 @@ export const dbIndexSchema: z.ZodType<DBIndex> = z.object({
|
||||
isPrimaryKey: z.boolean().or(z.null()).optional(),
|
||||
});
|
||||
|
||||
export const createIndexesFromMetadata = ({
|
||||
aggregatedIndexes,
|
||||
fields,
|
||||
}: {
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
fields: DBField[];
|
||||
}): DBIndex[] =>
|
||||
aggregatedIndexes.map(
|
||||
(idx): DBIndex => ({
|
||||
id: generateId(),
|
||||
name: idx.name,
|
||||
unique: Boolean(idx.unique),
|
||||
fieldIds: idx.columns
|
||||
.sort((a, b) => a.position - b.position)
|
||||
.map((c) => fields.find((f) => f.name === c.name)?.id)
|
||||
.filter((id): id is string => id !== undefined),
|
||||
createdAt: Date.now(),
|
||||
type: idx.index_type?.toLowerCase() as IndexType,
|
||||
})
|
||||
);
|
||||
|
||||
export const databaseIndexTypes: { [key in DatabaseType]?: IndexType[] } = {
|
||||
[DatabaseType.POSTGRESQL]: ['btree', 'hash'],
|
||||
};
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
import { z } from 'zod';
|
||||
import type { ForeignKeyInfo } from '../data/import-metadata/metadata-types/foreign-key-info';
|
||||
import type { DBField } from './db-field';
|
||||
import { schemaNameToDomainSchemaName } from './db-schema';
|
||||
import type { DBTable } from './db-table';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export interface DBRelationship {
|
||||
id: string;
|
||||
@@ -40,82 +35,6 @@ export type RelationshipType =
|
||||
| 'many_to_many';
|
||||
export type Cardinality = 'one' | 'many';
|
||||
|
||||
const determineCardinality = (
|
||||
field: DBField,
|
||||
isTablePKComplex: boolean
|
||||
): Cardinality => {
|
||||
return field.unique || (field.primaryKey && !isTablePKComplex)
|
||||
? 'one'
|
||||
: 'many';
|
||||
};
|
||||
|
||||
export const createRelationshipsFromMetadata = ({
|
||||
foreignKeys,
|
||||
tables,
|
||||
}: {
|
||||
foreignKeys: ForeignKeyInfo[];
|
||||
tables: DBTable[];
|
||||
}): DBRelationship[] => {
|
||||
return foreignKeys
|
||||
.map((fk: ForeignKeyInfo): DBRelationship | null => {
|
||||
const schema = schemaNameToDomainSchemaName(fk.schema);
|
||||
const sourceTable = tables.find(
|
||||
(table) => table.name === fk.table && table.schema === schema
|
||||
);
|
||||
|
||||
const targetSchema = schemaNameToDomainSchemaName(
|
||||
fk.reference_schema
|
||||
);
|
||||
|
||||
const targetTable = tables.find(
|
||||
(table) =>
|
||||
table.name === fk.reference_table &&
|
||||
table.schema === targetSchema
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(field) => field.name === fk.column
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(field) => field.name === fk.reference_column
|
||||
);
|
||||
|
||||
const isSourceTablePKComplex =
|
||||
(sourceTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
const isTargetTablePKComplex =
|
||||
(targetTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
|
||||
if (sourceTable && targetTable && sourceField && targetField) {
|
||||
const sourceCardinality = determineCardinality(
|
||||
sourceField,
|
||||
isSourceTablePKComplex
|
||||
);
|
||||
const targetCardinality = determineCardinality(
|
||||
targetField,
|
||||
isTargetTablePKComplex
|
||||
);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: fk.foreign_key_name,
|
||||
sourceSchema: schema,
|
||||
targetSchema: targetSchema,
|
||||
sourceTableId: sourceTable.id,
|
||||
targetTableId: targetTable.id,
|
||||
sourceFieldId: sourceField.id,
|
||||
targetFieldId: targetField.id,
|
||||
sourceCardinality,
|
||||
targetCardinality,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((rel) => rel !== null) as DBRelationship[];
|
||||
};
|
||||
|
||||
export const determineRelationshipType = ({
|
||||
sourceCardinality,
|
||||
targetCardinality,
|
||||
|
||||
@@ -1,30 +1,8 @@
|
||||
import {
|
||||
createIndexesFromMetadata,
|
||||
dbIndexSchema,
|
||||
type DBIndex,
|
||||
} from './db-index';
|
||||
import {
|
||||
createFieldsFromMetadata,
|
||||
dbFieldSchema,
|
||||
type DBField,
|
||||
} from './db-field';
|
||||
import type { TableInfo } from '../data/import-metadata/metadata-types/table-info';
|
||||
import { createAggregatedIndexes } from '../data/import-metadata/metadata-types/index-info';
|
||||
import {
|
||||
materializedViewColor,
|
||||
viewColor,
|
||||
defaultTableColor,
|
||||
} from '@/lib/colors';
|
||||
import { dbIndexSchema, type DBIndex } from './db-index';
|
||||
import { dbFieldSchema, type DBField } from './db-field';
|
||||
import type { DBRelationship } from './db-relationship';
|
||||
import {
|
||||
decodeBase64ToUtf16LE,
|
||||
decodeBase64ToUtf8,
|
||||
deepCopy,
|
||||
generateId,
|
||||
} from '../utils';
|
||||
import { deepCopy } from '../utils';
|
||||
import { schemaNameToDomainSchemaName } from './db-schema';
|
||||
import { DatabaseType } from './database-type';
|
||||
import type { DatabaseMetadata } from '../data/import-metadata/metadata-types/database-metadata';
|
||||
import { z } from 'zod';
|
||||
import type { Area } from './area';
|
||||
|
||||
@@ -79,213 +57,6 @@ export const generateTableKey = ({
|
||||
tableName: string;
|
||||
}) => `${schemaNameToDomainSchemaName(schemaName) ?? ''}.${tableName}`;
|
||||
|
||||
export const decodeViewDefinition = (
|
||||
databaseType: DatabaseType,
|
||||
viewDefinition?: string
|
||||
): string => {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let decodedViewDefinition: string;
|
||||
if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
decodedViewDefinition = decodeBase64ToUtf16LE(viewDefinition);
|
||||
} else {
|
||||
decodedViewDefinition = decodeBase64ToUtf8(viewDefinition);
|
||||
}
|
||||
|
||||
return decodedViewDefinition;
|
||||
};
|
||||
|
||||
export const createTablesFromMetadata = ({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
}: {
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
databaseType: DatabaseType;
|
||||
}): DBTable[] => {
|
||||
const {
|
||||
tables: tableInfos,
|
||||
pk_info: primaryKeys,
|
||||
columns,
|
||||
indexes,
|
||||
views: views,
|
||||
} = databaseMetadata;
|
||||
|
||||
// Pre-compute view names for faster lookup if there are views
|
||||
const viewNamesSet = new Set<string>();
|
||||
const materializedViewNamesSet = new Set<string>();
|
||||
|
||||
if (views && views.length > 0) {
|
||||
views.forEach((view) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: view.schema,
|
||||
tableName: view.view_name,
|
||||
});
|
||||
viewNamesSet.add(key);
|
||||
|
||||
if (
|
||||
view.view_definition &&
|
||||
decodeViewDefinition(databaseType, view.view_definition)
|
||||
.toLowerCase()
|
||||
.includes('materialized')
|
||||
) {
|
||||
materializedViewNamesSet.add(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Pre-compute lookup maps for better performance
|
||||
const columnsByTable = new Map<string, (typeof columns)[0][]>();
|
||||
const indexesByTable = new Map<string, (typeof indexes)[0][]>();
|
||||
const primaryKeysByTable = new Map<string, (typeof primaryKeys)[0][]>();
|
||||
|
||||
// Group columns by table
|
||||
columns.forEach((col) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: col.schema,
|
||||
tableName: col.table,
|
||||
});
|
||||
if (!columnsByTable.has(key)) {
|
||||
columnsByTable.set(key, []);
|
||||
}
|
||||
columnsByTable.get(key)!.push(col);
|
||||
});
|
||||
|
||||
// Group indexes by table
|
||||
indexes.forEach((idx) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: idx.schema,
|
||||
tableName: idx.table,
|
||||
});
|
||||
if (!indexesByTable.has(key)) {
|
||||
indexesByTable.set(key, []);
|
||||
}
|
||||
indexesByTable.get(key)!.push(idx);
|
||||
});
|
||||
|
||||
// Group primary keys by table
|
||||
primaryKeys.forEach((pk) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: pk.schema,
|
||||
tableName: pk.table,
|
||||
});
|
||||
if (!primaryKeysByTable.has(key)) {
|
||||
primaryKeysByTable.set(key, []);
|
||||
}
|
||||
primaryKeysByTable.get(key)!.push(pk);
|
||||
});
|
||||
|
||||
const result = tableInfos.map((tableInfo: TableInfo) => {
|
||||
const tableSchema = schemaNameToDomainSchemaName(tableInfo.schema);
|
||||
const tableKey = generateTableKey({
|
||||
schemaName: tableInfo.schema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
|
||||
// Use pre-computed lookups instead of filtering entire arrays
|
||||
const tableIndexes = indexesByTable.get(tableKey) || [];
|
||||
const tablePrimaryKeys = primaryKeysByTable.get(tableKey) || [];
|
||||
const tableColumns = columnsByTable.get(tableKey) || [];
|
||||
|
||||
// Aggregate indexes with multiple columns
|
||||
const aggregatedIndexes = createAggregatedIndexes({
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
tableIndexes,
|
||||
});
|
||||
|
||||
const fields = createFieldsFromMetadata({
|
||||
aggregatedIndexes,
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
// Check for composite primary key and find matching index name
|
||||
const primaryKeyFields = fields.filter((f) => f.primaryKey);
|
||||
let pkMatchingIndexName: string | undefined;
|
||||
let pkIndex: DBIndex | undefined;
|
||||
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// We have a composite primary key, look for an index that matches all PK columns
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).sort();
|
||||
|
||||
// Find an index that matches the primary key columns exactly
|
||||
const matchingIndex = aggregatedIndexes.find((index) => {
|
||||
const indexColumnNames = index.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
return (
|
||||
indexColumnNames.length === pkFieldNames.length &&
|
||||
indexColumnNames.every((col, i) => col === pkFieldNames[i])
|
||||
);
|
||||
});
|
||||
|
||||
if (matchingIndex) {
|
||||
pkMatchingIndexName = matchingIndex.name;
|
||||
// Create a special PK index
|
||||
pkIndex = {
|
||||
id: generateId(),
|
||||
name: matchingIndex.name,
|
||||
unique: true,
|
||||
fieldIds: primaryKeyFields.map((f) => f.id),
|
||||
createdAt: Date.now(),
|
||||
isPrimaryKey: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the index that matches the composite PK (to avoid duplication)
|
||||
const filteredAggregatedIndexes = pkMatchingIndexName
|
||||
? aggregatedIndexes.filter(
|
||||
(idx) => idx.name !== pkMatchingIndexName
|
||||
)
|
||||
: aggregatedIndexes;
|
||||
|
||||
const dbIndexes = createIndexesFromMetadata({
|
||||
aggregatedIndexes: filteredAggregatedIndexes,
|
||||
fields,
|
||||
});
|
||||
|
||||
// Add the PK index if it exists
|
||||
if (pkIndex) {
|
||||
dbIndexes.push(pkIndex);
|
||||
}
|
||||
|
||||
// Determine if the current table is a view by checking against pre-computed sets
|
||||
const viewKey = generateTableKey({
|
||||
schemaName: tableSchema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
const isView = viewNamesSet.has(viewKey);
|
||||
const isMaterializedView = materializedViewNamesSet.has(viewKey);
|
||||
|
||||
// Initial random positions; these will be adjusted later
|
||||
return {
|
||||
id: generateId(),
|
||||
name: tableInfo.table,
|
||||
schema: tableSchema,
|
||||
x: Math.random() * 1000, // Placeholder X
|
||||
y: Math.random() * 800, // Placeholder Y
|
||||
fields,
|
||||
indexes: dbIndexes,
|
||||
color: isMaterializedView
|
||||
? materializedViewColor
|
||||
: isView
|
||||
? viewColor
|
||||
: defaultTableColor,
|
||||
isView: isView,
|
||||
isMaterializedView: isMaterializedView,
|
||||
createdAt: Date.now(),
|
||||
comments: tableInfo.comment ? tableInfo.comment : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const adjustTablePositions = ({
|
||||
relationships: inputRelationships,
|
||||
tables: inputTables,
|
||||
|
||||
@@ -1,30 +1,15 @@
|
||||
import { z } from 'zod';
|
||||
import type { DatabaseMetadata } from '../data/import-metadata/metadata-types/database-metadata';
|
||||
import { DatabaseEdition } from './database-edition';
|
||||
import { DatabaseType } from './database-type';
|
||||
import type { DBDependency } from './db-dependency';
|
||||
import {
|
||||
createDependenciesFromMetadata,
|
||||
dbDependencySchema,
|
||||
} from './db-dependency';
|
||||
import { dbDependencySchema } from './db-dependency';
|
||||
import type { DBRelationship } from './db-relationship';
|
||||
import {
|
||||
createRelationshipsFromMetadata,
|
||||
dbRelationshipSchema,
|
||||
} from './db-relationship';
|
||||
import { dbRelationshipSchema } from './db-relationship';
|
||||
import type { DBTable } from './db-table';
|
||||
import {
|
||||
adjustTablePositions,
|
||||
createTablesFromMetadata,
|
||||
dbTableSchema,
|
||||
} from './db-table';
|
||||
import { generateDiagramId } from '@/lib/utils';
|
||||
import { dbTableSchema } from './db-table';
|
||||
import { areaSchema, type Area } from './area';
|
||||
import type { DBCustomType } from './db-custom-type';
|
||||
import {
|
||||
dbCustomTypeSchema,
|
||||
createCustomTypesFromMetadata,
|
||||
} from './db-custom-type';
|
||||
import { dbCustomTypeSchema } from './db-custom-type';
|
||||
|
||||
export interface Diagram {
|
||||
id: string;
|
||||
@@ -53,77 +38,3 @@ export const diagramSchema: z.ZodType<Diagram> = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
});
|
||||
|
||||
export const loadFromDatabaseMetadata = async ({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition,
|
||||
}: {
|
||||
databaseType: DatabaseType;
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
diagramNumber?: number;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}): Promise<Diagram> => {
|
||||
const {
|
||||
fk_info: foreignKeys,
|
||||
views: views,
|
||||
custom_types: customTypes,
|
||||
} = databaseMetadata;
|
||||
|
||||
const tables = createTablesFromMetadata({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const relationships = createRelationshipsFromMetadata({
|
||||
foreignKeys,
|
||||
tables,
|
||||
});
|
||||
|
||||
const dependencies = await createDependenciesFromMetadata({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const dbCustomTypes = customTypes
|
||||
? createCustomTypesFromMetadata({
|
||||
customTypes,
|
||||
})
|
||||
: [];
|
||||
|
||||
const adjustedTables = adjustTablePositions({
|
||||
tables,
|
||||
relationships,
|
||||
mode: 'perSchema',
|
||||
});
|
||||
|
||||
const sortedTables = adjustedTables.sort((a, b) => {
|
||||
if (a.isView === b.isView) {
|
||||
// Both are either tables or views, so sort alphabetically by name
|
||||
return a.name.localeCompare(b.name);
|
||||
}
|
||||
// If one is a view and the other is not, put tables first
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
const diagram: Diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: databaseMetadata.database_name
|
||||
? `${databaseMetadata.database_name}-db`
|
||||
: diagramNumber
|
||||
? `Diagram ${diagramNumber}`
|
||||
: 'New Diagram',
|
||||
databaseType: databaseType ?? DatabaseType.GENERIC,
|
||||
databaseEdition,
|
||||
tables: sortedTables,
|
||||
relationships,
|
||||
dependencies,
|
||||
customTypes: dbCustomTypes,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
return diagram;
|
||||
};
|
||||
|
||||
@@ -57,3 +57,40 @@ export type DiffObject<
|
||||
| FieldDiff<TField>['object']
|
||||
| IndexDiff<TIndex>['object']
|
||||
| RelationshipDiff<TRelationship>['object'];
|
||||
|
||||
type ExtractDiffKind<T> = T extends { object: infer O; type: infer Type }
|
||||
? T extends { attribute: infer A }
|
||||
? { object: O; type: Type; attribute: A }
|
||||
: { object: O; type: Type }
|
||||
: never;
|
||||
|
||||
export type DiffKind<
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
> = ExtractDiffKind<ChartDBDiff<TTable, TField, TIndex, TRelationship>>;
|
||||
|
||||
export const isDiffOfKind = <
|
||||
TTable = DBTable,
|
||||
TField = DBField,
|
||||
TIndex = DBIndex,
|
||||
TRelationship = DBRelationship,
|
||||
>(
|
||||
diff: ChartDBDiff<TTable, TField, TIndex, TRelationship>,
|
||||
kind: DiffKind<TTable, TField, TIndex, TRelationship>
|
||||
): boolean => {
|
||||
if ('attribute' in kind) {
|
||||
return (
|
||||
diff.object === kind.object &&
|
||||
diff.type === kind.type &&
|
||||
diff.attribute === kind.attribute
|
||||
);
|
||||
}
|
||||
|
||||
if ('attribute' in diff) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return diff.object === kind.object && diff.type === kind.type;
|
||||
};
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
import {
|
||||
ContextMenu,
|
||||
ContextMenuContent,
|
||||
ContextMenuItem,
|
||||
ContextMenuTrigger,
|
||||
} from '@/components/context-menu/context-menu';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { Pencil, Trash2 } from 'lucide-react';
|
||||
import React, { useCallback } from 'react';
|
||||
|
||||
export interface AreaNodeContextMenuProps {
|
||||
area: Area;
|
||||
onEditName?: () => void;
|
||||
}
|
||||
|
||||
export const AreaNodeContextMenu: React.FC<
|
||||
React.PropsWithChildren<AreaNodeContextMenuProps>
|
||||
> = ({ children, area, onEditName }) => {
|
||||
const { removeArea, readonly } = useChartDB();
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
const removeAreaHandler = useCallback(() => {
|
||||
removeArea(area.id);
|
||||
}, [removeArea, area.id]);
|
||||
|
||||
if (!isDesktop || readonly) {
|
||||
return <>{children}</>;
|
||||
}
|
||||
return (
|
||||
<ContextMenu>
|
||||
<ContextMenuTrigger>{children}</ContextMenuTrigger>
|
||||
<ContextMenuContent>
|
||||
{onEditName && (
|
||||
<ContextMenuItem
|
||||
onClick={onEditName}
|
||||
className="flex justify-between gap-3"
|
||||
>
|
||||
<span>Edit Area Name</span>
|
||||
<Pencil className="size-3.5" />
|
||||
</ContextMenuItem>
|
||||
)}
|
||||
<ContextMenuItem
|
||||
onClick={removeAreaHandler}
|
||||
className="flex justify-between gap-3"
|
||||
>
|
||||
<span>Delete Area</span>
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
</ContextMenuItem>
|
||||
</ContextMenuContent>
|
||||
</ContextMenu>
|
||||
);
|
||||
};
|
||||
@@ -1,10 +1,11 @@
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import type { NodeProps, Node } from '@xyflow/react';
|
||||
import { NodeResizer } from '@xyflow/react';
|
||||
import type { Area } from '@/lib/domain/area';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { useEditClickOutside } from '@/hooks/use-click-outside';
|
||||
import { useKeyPressEvent } from 'react-use';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
@@ -12,9 +13,10 @@ import {
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Check, GripVertical } from 'lucide-react';
|
||||
import { Check, GripVertical, Pencil } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { AreaNodeContextMenu } from './area-node-context-menu';
|
||||
|
||||
export type AreaNodeType = Node<
|
||||
{
|
||||
@@ -35,12 +37,11 @@ export const AreaNode: React.FC<NodeProps<AreaNodeType>> = React.memo(
|
||||
const focused = !!selected && !dragging;
|
||||
|
||||
const editAreaName = useCallback(() => {
|
||||
if (!editMode) return;
|
||||
if (areaName.trim()) {
|
||||
updateArea(area.id, { name: areaName.trim() });
|
||||
}
|
||||
setEditMode(false);
|
||||
}, [areaName, area.id, updateArea, editMode]);
|
||||
}, [areaName, area.id, updateArea]);
|
||||
|
||||
const abortEdit = useCallback(() => {
|
||||
setEditMode(false);
|
||||
@@ -52,87 +53,119 @@ export const AreaNode: React.FC<NodeProps<AreaNodeType>> = React.memo(
|
||||
openAreaFromSidebar(area.id);
|
||||
}, [selectSidebarSection, openAreaFromSidebar, area.id]);
|
||||
|
||||
useClickAway(inputRef, editAreaName);
|
||||
// Handle click outside to save and exit edit mode
|
||||
useEditClickOutside(inputRef, editMode, editAreaName);
|
||||
useKeyPressEvent('Enter', editAreaName);
|
||||
useKeyPressEvent('Escape', abortEdit);
|
||||
|
||||
const enterEditMode = (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setEditMode(true);
|
||||
};
|
||||
const enterEditMode = useCallback(
|
||||
(e?: React.MouseEvent) => {
|
||||
e?.stopPropagation();
|
||||
setAreaName(area.name);
|
||||
setEditMode(true);
|
||||
},
|
||||
[area.name]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (editMode) {
|
||||
// Small delay to ensure the input is rendered
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus();
|
||||
inputRef.current.select();
|
||||
}
|
||||
}, 50);
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
}
|
||||
}, [editMode]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-full flex-col rounded-md border-2 shadow-sm',
|
||||
selected ? 'border-pink-600' : 'border-transparent'
|
||||
)}
|
||||
style={{
|
||||
backgroundColor: `${area.color}15`,
|
||||
borderColor: selected ? undefined : area.color,
|
||||
}}
|
||||
onClick={(e) => {
|
||||
if (e.detail === 2) {
|
||||
openAreaInEditor();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<NodeResizer
|
||||
isVisible={focused}
|
||||
lineClassName="!border-4 !border-transparent"
|
||||
handleClassName="!h-[10px] !w-[10px] !rounded-full !bg-pink-600"
|
||||
minHeight={100}
|
||||
minWidth={100}
|
||||
/>
|
||||
<div className="group flex h-8 items-center justify-between rounded-t-md px-2">
|
||||
<div className="flex w-full items-center gap-1">
|
||||
<GripVertical className="size-4 shrink-0 text-slate-700 opacity-60 dark:text-slate-300" />
|
||||
<AreaNodeContextMenu area={area} onEditName={enterEditMode}>
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-full flex-col rounded-md border-2 shadow-sm',
|
||||
selected ? 'border-pink-600' : 'border-transparent'
|
||||
)}
|
||||
style={{
|
||||
backgroundColor: `${area.color}15`,
|
||||
borderColor: selected ? undefined : area.color,
|
||||
}}
|
||||
onClick={(e) => {
|
||||
if (e.detail === 2) {
|
||||
openAreaInEditor();
|
||||
}
|
||||
}}
|
||||
>
|
||||
{!readonly ? (
|
||||
<NodeResizer
|
||||
isVisible={focused}
|
||||
lineClassName="!border-4 !border-transparent"
|
||||
handleClassName="!h-[10px] !w-[10px] !rounded-full !bg-pink-600"
|
||||
minHeight={100}
|
||||
minWidth={100}
|
||||
/>
|
||||
) : null}
|
||||
<div className="group flex h-8 items-center justify-between rounded-t-md px-2">
|
||||
<div className="flex w-full items-center gap-1">
|
||||
<GripVertical className="size-4 shrink-0 text-slate-700 opacity-60 dark:text-slate-300" />
|
||||
|
||||
{editMode && !readonly ? (
|
||||
<div className="flex w-full items-center">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
autoFocus
|
||||
type="text"
|
||||
placeholder={area.name}
|
||||
value={areaName}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
onChange={(e) =>
|
||||
setAreaName(e.target.value)
|
||||
}
|
||||
className="h-6 bg-white/70 focus-visible:ring-0 dark:bg-slate-900/70"
|
||||
/>
|
||||
{editMode && !readonly ? (
|
||||
<div className="flex w-full items-center">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
autoFocus
|
||||
type="text"
|
||||
placeholder={area.name}
|
||||
value={areaName}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
onChange={(e) =>
|
||||
setAreaName(e.target.value)
|
||||
}
|
||||
className="h-6 bg-white/70 focus-visible:ring-0 dark:bg-slate-900/70"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="ml-1 size-6 p-0 hover:bg-white/20"
|
||||
onClick={editAreaName}
|
||||
>
|
||||
<Check className="size-3.5 text-slate-700 dark:text-slate-300" />
|
||||
</Button>
|
||||
</div>
|
||||
) : !readonly ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
className="text-editable truncate px-1 py-0.5 text-base font-semibold text-slate-700 dark:text-slate-300"
|
||||
onDoubleClick={enterEditMode}
|
||||
>
|
||||
{area.name}
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('tool_tips.double_click_to_edit')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<div className="truncate px-1 py-0.5 text-base font-semibold text-slate-700 dark:text-slate-300">
|
||||
{area.name}
|
||||
</div>
|
||||
)}
|
||||
{!editMode && !readonly && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="ml-1 size-6 p-0 hover:bg-white/20"
|
||||
onClick={editAreaName}
|
||||
className="ml-auto size-5 p-0 opacity-0 transition-opacity hover:bg-white/20 group-hover:opacity-100"
|
||||
onClick={enterEditMode}
|
||||
>
|
||||
<Check className="size-3.5 text-slate-700 dark:text-slate-300" />
|
||||
<Pencil className="size-3 text-slate-700 dark:text-slate-300" />
|
||||
</Button>
|
||||
</div>
|
||||
) : !readonly ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
className="text-editable max-w-[200px] cursor-text truncate px-1 py-0.5 text-base font-semibold text-slate-700 dark:text-slate-300"
|
||||
onDoubleClick={enterEditMode}
|
||||
>
|
||||
{area.name}
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('tool_tips.double_click_to_edit')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<div className="truncate px-1 py-0.5 text-base font-semibold text-slate-700 dark:text-slate-300">
|
||||
{area.name}
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex-1" />
|
||||
</div>
|
||||
<div className="flex-1" />
|
||||
</div>
|
||||
</AreaNodeContextMenu>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -28,6 +28,7 @@ import { FilterItemActions } from './filter-item-actions';
|
||||
import { databasesWithSchemas } from '@/lib/domain';
|
||||
import { getOperatingSystem } from '@/lib/utils';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
|
||||
export interface CanvasFilterProps {
|
||||
onClose: () => void;
|
||||
@@ -36,6 +37,7 @@ export interface CanvasFilterProps {
|
||||
export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
const { t } = useTranslation();
|
||||
const { tables, databaseType, areas } = useChartDB();
|
||||
const { checkIfNewTable } = useDiff();
|
||||
const {
|
||||
filter,
|
||||
toggleSchemaFilter,
|
||||
@@ -45,7 +47,7 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
addTablesToFilter,
|
||||
removeTablesFromFilter,
|
||||
} = useDiagramFilter();
|
||||
const { fitView, setNodes } = useReactFlow();
|
||||
const { setNodes } = useReactFlow();
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [expanded, setExpanded] = useState<Record<string, boolean>>({});
|
||||
const [isFilterVisible, setIsFilterVisible] = useState(false);
|
||||
@@ -58,13 +60,14 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
() =>
|
||||
tables
|
||||
.filter((table) => (showDBViews ? true : !table.isView))
|
||||
.filter((table) => !checkIfNewTable({ tableId: table.id }))
|
||||
.map((table) => ({
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
schema: table.schema,
|
||||
parentAreaId: table.parentAreaId,
|
||||
})),
|
||||
[tables, showDBViews]
|
||||
[tables, showDBViews, checkIfNewTable]
|
||||
);
|
||||
|
||||
const databaseWithSchemas = useMemo(
|
||||
@@ -157,39 +160,53 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
]
|
||||
);
|
||||
|
||||
const focusOnTable = useCallback(
|
||||
const selectTable = useCallback(
|
||||
(tableId: string) => {
|
||||
// Make sure the table is visible
|
||||
// Make sure the table is visible, selected and trigger animation
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) =>
|
||||
node.id === tableId
|
||||
? {
|
||||
...node,
|
||||
hidden: false,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...node,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
nodes.map((node) => {
|
||||
if (node.id === tableId) {
|
||||
return {
|
||||
...node,
|
||||
selected: true,
|
||||
data: {
|
||||
...node.data,
|
||||
highlightTable: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...node,
|
||||
selected: false,
|
||||
data: {
|
||||
...node.data,
|
||||
highlightTable: false,
|
||||
},
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
// Focus on the table
|
||||
// Remove the highlight flag after animation completes
|
||||
setTimeout(() => {
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: tableId,
|
||||
},
|
||||
],
|
||||
});
|
||||
}, 100);
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) => {
|
||||
if (node.id === tableId) {
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
highlightTable: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return node;
|
||||
})
|
||||
);
|
||||
}, 600);
|
||||
},
|
||||
[fitView, setNodes]
|
||||
[setNodes]
|
||||
);
|
||||
|
||||
// Handle node click
|
||||
@@ -199,13 +216,13 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
const context = node.context as TableContext;
|
||||
const isTableVisible = context.visible;
|
||||
|
||||
// Only focus if table is visible
|
||||
// Only select if table is visible
|
||||
if (isTableVisible) {
|
||||
focusOnTable(node.id);
|
||||
selectTable(node.id);
|
||||
}
|
||||
}
|
||||
},
|
||||
[focusOnTable]
|
||||
[selectTable]
|
||||
);
|
||||
|
||||
// Animate in on mount and focus search input
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import React from 'react';
|
||||
import { Eye, EyeOff } from 'lucide-react';
|
||||
import { Eye, EyeOff, CircleDotDashed } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type { TreeNode } from '@/components/tree-view/tree';
|
||||
import { schemaNameToSchemaId } from '@/lib/domain/db-schema';
|
||||
import { useFocusOn } from '@/hooks/use-focus-on';
|
||||
import type {
|
||||
AreaContext,
|
||||
NodeContext,
|
||||
@@ -12,6 +13,7 @@ import type {
|
||||
TableContext,
|
||||
} from './types';
|
||||
import type { FilterTableInfo } from '@/lib/domain/diagram-filter/diagram-filter';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
interface FilterItemActionsProps {
|
||||
node: TreeNode<NodeType, NodeContext>;
|
||||
@@ -40,6 +42,7 @@ export const FilterItemActions: React.FC<FilterItemActionsProps> = ({
|
||||
addTablesToFilter,
|
||||
removeTablesFromFilter,
|
||||
}) => {
|
||||
const { focusOnArea, focusOnTable } = useFocusOn();
|
||||
if (node.type === 'schema') {
|
||||
const context = node.context as SchemaContext;
|
||||
const schemaVisible = context.visible;
|
||||
@@ -50,7 +53,7 @@ export const FilterItemActions: React.FC<FilterItemActionsProps> = ({
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="size-7 h-fit p-0"
|
||||
className="h-fit w-6 p-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
|
||||
@@ -67,9 +70,9 @@ export const FilterItemActions: React.FC<FilterItemActionsProps> = ({
|
||||
}}
|
||||
>
|
||||
{!schemaVisible ? (
|
||||
<EyeOff className="size-3.5 text-muted-foreground" />
|
||||
<EyeOff className="!size-3.5 text-muted-foreground" />
|
||||
) : (
|
||||
<Eye className="size-3.5" />
|
||||
<Eye className="!size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
@@ -81,37 +84,60 @@ export const FilterItemActions: React.FC<FilterItemActionsProps> = ({
|
||||
const isUngrouped = context.isUngrouped;
|
||||
const areaId = context.id;
|
||||
|
||||
const handleZoomToArea = (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
if (!isUngrouped) {
|
||||
focusOnArea(areaId);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="size-7 h-fit p-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
// Toggle all tables in this area
|
||||
if (areaVisible) {
|
||||
// Hide all tables in this area
|
||||
removeTablesFromFilter({
|
||||
filterCallback: (table) =>
|
||||
(isUngrouped && !table.areaId) ||
|
||||
(!isUngrouped && table.areaId === areaId),
|
||||
});
|
||||
} else {
|
||||
// Show all tables in this area
|
||||
addTablesToFilter({
|
||||
filterCallback: (table) =>
|
||||
(isUngrouped && !table.areaId) ||
|
||||
(!isUngrouped && table.areaId === areaId),
|
||||
});
|
||||
}
|
||||
}}
|
||||
>
|
||||
{!areaVisible ? (
|
||||
<EyeOff className="size-3.5 text-muted-foreground" />
|
||||
) : (
|
||||
<Eye className="size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
<div className="flex h-full items-center gap-0.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className={cn(
|
||||
'flex h-fit w-6 items-center justify-center p-0 opacity-0 transition-opacity group-hover:opacity-100',
|
||||
{
|
||||
'!opacity-0': !areaVisible,
|
||||
}
|
||||
)}
|
||||
onClick={handleZoomToArea}
|
||||
disabled={!areaVisible}
|
||||
>
|
||||
<CircleDotDashed className="!size-3.5" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="flex h-fit w-6 items-center justify-center p-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
// Toggle all tables in this area
|
||||
if (areaVisible) {
|
||||
// Hide all tables in this area
|
||||
removeTablesFromFilter({
|
||||
filterCallback: (table) =>
|
||||
(isUngrouped && !table.areaId) ||
|
||||
(!isUngrouped && table.areaId === areaId),
|
||||
});
|
||||
} else {
|
||||
// Show all tables in this area
|
||||
addTablesToFilter({
|
||||
filterCallback: (table) =>
|
||||
(isUngrouped && !table.areaId) ||
|
||||
(!isUngrouped && table.areaId === areaId),
|
||||
});
|
||||
}
|
||||
}}
|
||||
>
|
||||
{!areaVisible ? (
|
||||
<EyeOff className="!size-3.5 text-muted-foreground" />
|
||||
) : (
|
||||
<Eye className="!size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -120,22 +146,43 @@ export const FilterItemActions: React.FC<FilterItemActionsProps> = ({
|
||||
const context = node.context as TableContext;
|
||||
const tableVisible = context.visible;
|
||||
|
||||
const handleZoomToTable = (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
focusOnTable(tableId);
|
||||
};
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="size-7 h-fit p-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
toggleTableFilter(tableId);
|
||||
}}
|
||||
>
|
||||
{!tableVisible ? (
|
||||
<EyeOff className="size-3.5 text-muted-foreground" />
|
||||
) : (
|
||||
<Eye className="size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
<div className="flex h-full items-center gap-0.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className={cn(
|
||||
'flex h-fit w-6 items-center justify-center p-0 opacity-0 transition-opacity group-hover:opacity-100',
|
||||
{
|
||||
'!opacity-0': !tableVisible,
|
||||
}
|
||||
)}
|
||||
onClick={handleZoomToTable}
|
||||
disabled={!tableVisible}
|
||||
>
|
||||
<CircleDotDashed className="!size-3.5" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="flex w-6 items-center justify-center p-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
toggleTableFilter(tableId);
|
||||
}}
|
||||
>
|
||||
{!tableVisible ? (
|
||||
<EyeOff className="!size-3.5 text-muted-foreground" />
|
||||
) : (
|
||||
<Eye className="!size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -40,13 +40,7 @@ import {
|
||||
} from './table-node/table-node-field';
|
||||
import { Toolbar } from './toolbar/toolbar';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import {
|
||||
Pencil,
|
||||
LayoutGrid,
|
||||
AlertTriangle,
|
||||
Magnet,
|
||||
Highlighter,
|
||||
} from 'lucide-react';
|
||||
import { Pencil, AlertTriangle, Magnet, Highlighter } from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
@@ -81,7 +75,6 @@ import {
|
||||
TOP_SOURCE_HANDLE_ID_PREFIX,
|
||||
} from './table-node/table-node-dependency-indicator';
|
||||
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { useAlert } from '@/context/alert-context/alert-context';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import type { AreaNodeType } from './area-node/area-node';
|
||||
import { AreaNode } from './area-node/area-node';
|
||||
@@ -95,6 +88,8 @@ import type { DiagramFilter } from '@/lib/domain/diagram-filter/diagram-filter';
|
||||
import { useDiagramFilter } from '@/context/diagram-filter-context/use-diagram-filter';
|
||||
import { filterTable } from '@/lib/domain/diagram-filter/filter';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { useClickAway } from 'react-use';
|
||||
|
||||
const HIGHLIGHTED_EDGE_Z_INDEX = 1;
|
||||
const DEFAULT_EDGE_Z_INDEX = 0;
|
||||
@@ -124,16 +119,33 @@ const tableToTableNode = (
|
||||
databaseType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
forceShow,
|
||||
}: {
|
||||
filter?: DiagramFilter;
|
||||
databaseType: DatabaseType;
|
||||
filterLoading: boolean;
|
||||
showDBViews?: boolean;
|
||||
forceShow?: boolean;
|
||||
}
|
||||
): TableNodeType => {
|
||||
// Always use absolute position for now
|
||||
const position = { x: table.x, y: table.y };
|
||||
|
||||
let hidden = false;
|
||||
|
||||
if (forceShow) {
|
||||
hidden = false;
|
||||
} else {
|
||||
hidden =
|
||||
!filterTable({
|
||||
table: { id: table.id, schema: table.schema },
|
||||
filter,
|
||||
options: { defaultSchema: defaultSchemas[databaseType] },
|
||||
}) ||
|
||||
filterLoading ||
|
||||
(!showDBViews && table.isView);
|
||||
}
|
||||
|
||||
return {
|
||||
id: table.id,
|
||||
type: 'table',
|
||||
@@ -143,14 +155,7 @@ const tableToTableNode = (
|
||||
isOverlapping: false,
|
||||
},
|
||||
width: table.width ?? MIN_TABLE_SIZE,
|
||||
hidden:
|
||||
!filterTable({
|
||||
table: { id: table.id, schema: table.schema },
|
||||
filter,
|
||||
options: { defaultSchema: defaultSchemas[databaseType] },
|
||||
}) ||
|
||||
filterLoading ||
|
||||
(!showDBViews && table.isView),
|
||||
hidden,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -231,19 +236,26 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
const { showSidePanel } = useLayout();
|
||||
const { effectiveTheme } = useTheme();
|
||||
const { scrollAction, showDBViews, showMiniMapOnCanvas } = useLocalConfig();
|
||||
const { showAlert } = useAlert();
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
const [highlightOverlappingTables, setHighlightOverlappingTables] =
|
||||
useState(false);
|
||||
const {
|
||||
reorderTables,
|
||||
fitView,
|
||||
setOverlapGraph,
|
||||
overlapGraph,
|
||||
showFilter,
|
||||
setShowFilter,
|
||||
setEditTableModeTable,
|
||||
} = useCanvas();
|
||||
const { filter, loading: filterLoading } = useDiagramFilter();
|
||||
const { checkIfNewTable } = useDiff();
|
||||
|
||||
const shouldForceShowTable = useCallback(
|
||||
(tableId: string) => {
|
||||
return checkIfNewTable({ tableId });
|
||||
},
|
||||
[checkIfNewTable]
|
||||
);
|
||||
|
||||
const [isInitialLoadingNodes, setIsInitialLoadingNodes] = useState(true);
|
||||
|
||||
@@ -254,6 +266,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
databaseType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
forceShow: shouldForceShowTable(table.id),
|
||||
})
|
||||
)
|
||||
);
|
||||
@@ -273,6 +286,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
databaseType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
forceShow: shouldForceShowTable(table.id),
|
||||
})
|
||||
);
|
||||
if (equal(initialNodes, nodes)) {
|
||||
@@ -285,6 +299,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
databaseType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
shouldForceShowTable,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -445,6 +460,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
databaseType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
forceShow: shouldForceShowTable(table.id),
|
||||
});
|
||||
|
||||
// Check if table uses the highlighted custom type
|
||||
@@ -495,6 +511,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
highlightedCustomType,
|
||||
filterLoading,
|
||||
showDBViews,
|
||||
shouldForceShowTable,
|
||||
]);
|
||||
|
||||
const prevFilter = useRef<DiagramFilter | undefined>(undefined);
|
||||
@@ -1185,16 +1202,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
const isLoadingDOM =
|
||||
tables.length > 0 ? !getInternalNode(tables[0].id) : false;
|
||||
|
||||
const showReorderConfirmation = useCallback(() => {
|
||||
showAlert({
|
||||
title: t('reorder_diagram_alert.title'),
|
||||
description: t('reorder_diagram_alert.description'),
|
||||
actionLabel: t('reorder_diagram_alert.reorder'),
|
||||
closeLabel: t('reorder_diagram_alert.cancel'),
|
||||
onAction: reorderTables,
|
||||
});
|
||||
}, [t, showAlert, reorderTables]);
|
||||
|
||||
const hasOverlappingTables = useMemo(
|
||||
() =>
|
||||
Array.from(overlapGraph.graph).some(
|
||||
@@ -1208,6 +1215,13 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
setTimeout(() => setHighlightOverlappingTables(false), 600);
|
||||
}, []);
|
||||
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const exitEditTableMode = useCallback(
|
||||
() => setEditTableModeTable(null),
|
||||
[setEditTableModeTable]
|
||||
);
|
||||
useClickAway(containerRef, exitEditTableMode);
|
||||
|
||||
const shiftPressed = useKeyPress('Shift');
|
||||
const operatingSystem = getOperatingSystem();
|
||||
|
||||
@@ -1225,7 +1239,11 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
|
||||
return (
|
||||
<CanvasContextMenu>
|
||||
<div className="relative flex h-full" id="canvas">
|
||||
<div
|
||||
className="relative flex h-full"
|
||||
id="canvas"
|
||||
ref={containerRef}
|
||||
>
|
||||
<ReactFlow
|
||||
onlyRenderVisibleElements
|
||||
colorMode={effectiveTheme}
|
||||
@@ -1250,6 +1268,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
panOnScroll={scrollAction === 'pan'}
|
||||
snapToGrid={shiftPressed || snapToGridEnabled}
|
||||
snapGrid={[20, 20]}
|
||||
onPaneClick={exitEditTableMode}
|
||||
>
|
||||
<Controls
|
||||
position="top-left"
|
||||
@@ -1261,24 +1280,6 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
<div className="flex flex-col items-center gap-2 md:flex-row">
|
||||
{!readonly ? (
|
||||
<>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="size-8 p-1 shadow-none"
|
||||
onClick={
|
||||
showReorderConfirmation
|
||||
}
|
||||
>
|
||||
<LayoutGrid className="size-4" />
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('toolbar.reorder_diagram')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
|
||||
@@ -0,0 +1,180 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import { KeyRound, Trash2 } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
import { useUpdateTableField } from '@/hooks/use-update-table-field';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { SelectBox } from '@/components/select-box/select-box';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { TableFieldToggle } from './table-field-toggle';
|
||||
|
||||
export interface TableEditModeFieldProps {
|
||||
table: DBTable;
|
||||
field: DBField;
|
||||
focused?: boolean;
|
||||
}
|
||||
|
||||
export const TableEditModeField: React.FC<TableEditModeFieldProps> = React.memo(
|
||||
({ table, field, focused = false }) => {
|
||||
const { t } = useTranslation();
|
||||
const [showHighlight, setShowHighlight] = React.useState(false);
|
||||
|
||||
const {
|
||||
dataFieldOptions,
|
||||
handleDataTypeChange,
|
||||
handlePrimaryKeyToggle,
|
||||
handleNullableToggle,
|
||||
handleNameChange,
|
||||
generateFieldSuffix,
|
||||
fieldName,
|
||||
nullable,
|
||||
primaryKey,
|
||||
removeField,
|
||||
} = useUpdateTableField(table, field);
|
||||
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
|
||||
// Animate the highlight after mount if focused
|
||||
useEffect(() => {
|
||||
if (focused) {
|
||||
const timer = setTimeout(() => {
|
||||
setShowHighlight(true);
|
||||
inputRef.current?.select();
|
||||
|
||||
setTimeout(() => {
|
||||
setShowHighlight(false);
|
||||
}, 2000);
|
||||
}, 200); // Small delay for the animation to be noticeable
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
} else {
|
||||
setShowHighlight(false);
|
||||
}
|
||||
}, [focused]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-1 flex-row justify-between gap-2 p-1 transition-colors duration-1000 ease-out',
|
||||
{
|
||||
'bg-sky-100 dark:bg-sky-950': showHighlight,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-1 items-center justify-start gap-1 overflow-hidden">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="min-w-0 flex-1">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
className="h-8 w-full !truncate bg-background focus-visible:ring-0"
|
||||
type="text"
|
||||
placeholder={t(
|
||||
'side_panel.tables_section.table.field_name'
|
||||
)}
|
||||
value={fieldName}
|
||||
onChange={(e) =>
|
||||
handleNameChange(e.target.value)
|
||||
}
|
||||
autoFocus={focused}
|
||||
/>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{fieldName}</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
className="flex h-8 min-w-0 flex-1"
|
||||
asChild
|
||||
>
|
||||
<span>
|
||||
<SelectBox
|
||||
className="flex h-8 min-h-8 w-full bg-background"
|
||||
popoverClassName="min-w-[200px]"
|
||||
options={dataFieldOptions}
|
||||
placeholder={t(
|
||||
'side_panel.tables_section.table.field_type'
|
||||
)}
|
||||
value={field.type.id}
|
||||
valueSuffix={generateDBFieldSuffix(field)}
|
||||
optionSuffix={(option) =>
|
||||
generateFieldSuffix(option.value)
|
||||
}
|
||||
onChange={handleDataTypeChange}
|
||||
emptyPlaceholder={t(
|
||||
'side_panel.tables_section.table.no_types_found'
|
||||
)}
|
||||
commandOnClick={(e) => e.stopPropagation()}
|
||||
commandOnMouseDown={(e) =>
|
||||
e.stopPropagation()
|
||||
}
|
||||
/>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{field.type.name}
|
||||
{field.characterMaximumLength
|
||||
? `(${field.characterMaximumLength})`
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="flex shrink-0 items-center justify-end gap-1">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={nullable}
|
||||
onPressedChange={handleNullableToggle}
|
||||
>
|
||||
N
|
||||
</TableFieldToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('side_panel.tables_section.table.nullable')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={primaryKey}
|
||||
onPressedChange={handlePrimaryKeyToggle}
|
||||
>
|
||||
<KeyRound className="h-3.5" />
|
||||
</TableFieldToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t('side_panel.tables_section.table.primary_key')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableFieldToggle onPressedChange={removeField}>
|
||||
<Trash2 className="h-3.5 text-red-700" />
|
||||
</TableFieldToggle>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.delete_field'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
TableEditModeField.displayName = 'TableEditModeField';
|
||||
@@ -0,0 +1,205 @@
|
||||
import { Input } from '@/components/input/input';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
import { FileType2, X } from 'lucide-react';
|
||||
import React, { useEffect, useState, useRef, useCallback } from 'react';
|
||||
import { TableEditModeField } from './table-edit-mode-field';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { ScrollArea } from '@/components/scroll-area/scroll-area';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { ColorPicker } from '@/components/color-picker/color-picker';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useUpdateTable } from '@/hooks/use-update-table';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useClickOutside } from '@/hooks/use-click-outside';
|
||||
|
||||
export interface TableEditModeProps {
|
||||
table: DBTable;
|
||||
color: string;
|
||||
focusFieldId?: string;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
export const TableEditMode: React.FC<TableEditModeProps> = React.memo(
|
||||
({ table, color, focusFieldId: focusFieldIdProp, onClose }) => {
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null);
|
||||
const fieldRefs = useRef<Map<string, HTMLDivElement>>(new Map());
|
||||
const [isVisible, setIsVisible] = useState(false);
|
||||
const { createField, updateTable } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const { tableName, handleTableNameChange } = useUpdateTable(table);
|
||||
const [focusFieldId, setFocusFieldId] = useState<string | undefined>(
|
||||
focusFieldIdProp
|
||||
);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
setFocusFieldId(focusFieldIdProp);
|
||||
if (!focusFieldIdProp) {
|
||||
inputRef.current?.select();
|
||||
}
|
||||
}, [focusFieldIdProp]);
|
||||
|
||||
// Callback to store field refs
|
||||
const setFieldRef = useCallback((fieldId: string) => {
|
||||
return (element: HTMLDivElement | null) => {
|
||||
if (element) {
|
||||
fieldRefs.current.set(fieldId, element);
|
||||
} else {
|
||||
fieldRefs.current.delete(fieldId);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Trigger animation after mount
|
||||
requestAnimationFrame(() => {
|
||||
setIsVisible(true);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const scrollToFieldId = useCallback((fieldId: string) => {
|
||||
const fieldElement = fieldRefs.current.get(fieldId);
|
||||
if (fieldElement) {
|
||||
fieldElement.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Scroll to focused field when component mounts
|
||||
useEffect(() => {
|
||||
if (focusFieldId) {
|
||||
scrollToFieldId(focusFieldId);
|
||||
}
|
||||
}, [focusFieldId, scrollToFieldId]);
|
||||
|
||||
// Handle wheel events: allow zoom to pass through, but handle scroll locally
|
||||
useEffect(() => {
|
||||
const handleWheel = (e: WheelEvent) => {
|
||||
// If Ctrl or Cmd is pressed, it's a zoom gesture - let it pass through to canvas
|
||||
if (e.ctrlKey || e.metaKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, it's a scroll - stop propagation to prevent canvas panning
|
||||
e.stopPropagation();
|
||||
};
|
||||
|
||||
const scrollArea = scrollAreaRef.current;
|
||||
if (scrollArea) {
|
||||
// Use passive: false to allow preventDefault if needed
|
||||
scrollArea.addEventListener('wheel', handleWheel, {
|
||||
passive: false,
|
||||
});
|
||||
|
||||
return () => {
|
||||
scrollArea.removeEventListener('wheel', handleWheel);
|
||||
};
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleAddField = useCallback(async () => {
|
||||
const field = await createField(table.id);
|
||||
|
||||
if (field.id) {
|
||||
setFocusFieldId(field.id);
|
||||
}
|
||||
}, [createField, table.id]);
|
||||
|
||||
// Close edit mode when clicking outside
|
||||
useClickOutside(containerRef, onClose, isVisible);
|
||||
|
||||
const handleColorChange = useCallback(
|
||||
(newColor: string) => {
|
||||
updateTable(table.id, { color: newColor });
|
||||
},
|
||||
[updateTable, table.id]
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn(
|
||||
'flex z-50 border-slate-500 dark:border-slate-700 flex-col border-2 bg-slate-50 dark:bg-slate-950 rounded-lg shadow-lg absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2 cursor-auto transition-all duration-100 ease-out',
|
||||
{
|
||||
'opacity-100 scale-100': isVisible,
|
||||
'opacity-0 scale-95': !isVisible,
|
||||
}
|
||||
)}
|
||||
style={{
|
||||
minHeight: '300px',
|
||||
minWidth: '350px',
|
||||
height: 'max(calc(100% + 48px), 200px)',
|
||||
width: 'max(calc(100% + 48px), 300px)',
|
||||
}}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<div
|
||||
className="h-2 rounded-t-[6px]"
|
||||
style={{ backgroundColor: color }}
|
||||
></div>
|
||||
<div className="group flex h-9 items-center justify-between gap-2 bg-slate-200 px-2 dark:bg-slate-900">
|
||||
<div className="flex min-w-0 flex-1 items-center gap-2">
|
||||
<ColorPicker
|
||||
color={color}
|
||||
onChange={handleColorChange}
|
||||
disabled={table.isView}
|
||||
popoverOnMouseDown={(e) => e.stopPropagation()}
|
||||
popoverOnClick={(e) => e.stopPropagation()}
|
||||
/>
|
||||
<Input
|
||||
ref={inputRef}
|
||||
className="h-6 flex-1 rounded-sm border-slate-600 bg-background text-sm"
|
||||
placeholder="Table name"
|
||||
value={tableName}
|
||||
onChange={(e) =>
|
||||
handleTableNameChange(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="size-6 p-0 hover:bg-slate-300 dark:hover:bg-slate-700"
|
||||
onClick={onClose}
|
||||
>
|
||||
<X className="size-4" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<ScrollArea ref={scrollAreaRef} className="nodrag flex-1 p-2">
|
||||
{table.fields.map((field) => (
|
||||
<div key={field.id} ref={setFieldRef(field.id)}>
|
||||
<TableEditModeField
|
||||
table={table}
|
||||
field={field}
|
||||
focused={focusFieldId === field.id}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</ScrollArea>
|
||||
|
||||
<Separator />
|
||||
<div className="flex items-center justify-between p-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
className="h-8 p-2 text-xs"
|
||||
onClick={handleAddField}
|
||||
>
|
||||
<FileType2 className="mr-1 h-4" />
|
||||
{t('side_panel.tables_section.table.add_field')}
|
||||
</Button>
|
||||
<span className="text-xs font-medium text-muted-foreground">
|
||||
{table.fields.length}{' '}
|
||||
{t('side_panel.tables_section.table.fields')}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
TableEditMode.displayName = 'TableEditMode';
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user