mirror of
https://github.com/chartdb/chartdb.git
synced 2025-11-03 05:23:26 +00:00
Compare commits
51 Commits
jf/add_sup
...
jf/add_rea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba280f0278 | ||
|
|
17abc3598f | ||
|
|
aa3ec70f02 | ||
|
|
38fedcec0c | ||
|
|
498655e7b7 | ||
|
|
bcd8aa9378 | ||
|
|
b15bc945ac | ||
|
|
c3c646bf7c | ||
|
|
57b3b8777f | ||
|
|
bb033091b1 | ||
|
|
c9ac8929c5 | ||
|
|
c567c0a5f3 | ||
|
|
2dc1a6fc75 | ||
|
|
98f6edd5c8 | ||
|
|
47a7a73a13 | ||
|
|
d71b46e8b5 | ||
|
|
e4c4a3b354 | ||
|
|
1b8d51b73c | ||
|
|
93d72a896b | ||
|
|
9991077978 | ||
|
|
bc82f9d6a8 | ||
|
|
26dc299cd2 | ||
|
|
d6ba4a4074 | ||
|
|
d09379e8be | ||
|
|
bdc41c0b74 | ||
|
|
d3dbf41894 | ||
|
|
e6783a89cc | ||
|
|
af3638da7a | ||
|
|
8954d893bb | ||
|
|
1a6688e85e | ||
|
|
5e81c1848a | ||
|
|
2bd9ca25b2 | ||
|
|
b016a70691 | ||
|
|
a0fb1ed08b | ||
|
|
ffddcdcc98 | ||
|
|
fe9ef275b8 | ||
|
|
df89f0b6b9 | ||
|
|
534d2858af | ||
|
|
2a64deebb8 | ||
|
|
e5e1d59327 | ||
|
|
aa290615ca | ||
|
|
ec6e46fe81 | ||
|
|
ac128d67de | ||
|
|
07937a2f51 | ||
|
|
d8e0bc7db8 | ||
|
|
1ce265781b | ||
|
|
60c5675cbf | ||
|
|
66b086378c | ||
|
|
abd2a6ccbe | ||
|
|
459c5f1ce3 | ||
|
|
44be48ff3a |
89
CHANGELOG.md
89
CHANGELOG.md
@@ -1,5 +1,94 @@
|
||||
# Changelog
|
||||
|
||||
## [1.16.0](https://github.com/chartdb/chartdb/compare/v1.15.1...v1.16.0) (2025-09-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add area context menu and UI improvements ([#918](https://github.com/chartdb/chartdb/issues/918)) ([d09379e](https://github.com/chartdb/chartdb/commit/d09379e8be0fa3c83ca77ff62ae815fe4db9869b))
|
||||
* add quick table mode on canvas ([#915](https://github.com/chartdb/chartdb/issues/915)) ([8954d89](https://github.com/chartdb/chartdb/commit/8954d893bbfee45bb311380115fb14ebbf3a3133))
|
||||
* add zoom navigation buttons to canvas filter for tables and areas ([#903](https://github.com/chartdb/chartdb/issues/903)) ([a0fb1ed](https://github.com/chartdb/chartdb/commit/a0fb1ed08ba18b66354fa3498d610097a83d4afc))
|
||||
* **import-db:** add DBML syntax to import database dialog ([#768](https://github.com/chartdb/chartdb/issues/768)) ([af3638d](https://github.com/chartdb/chartdb/commit/af3638da7a9b70f281ceaddbc2f712a713d90cda))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add areas width and height + table width to diff check ([#931](https://github.com/chartdb/chartdb/issues/931)) ([98f6edd](https://github.com/chartdb/chartdb/commit/98f6edd5c8a8e9130e892b2d841744e0cf63a7bf))
|
||||
* add diff x,y ([#928](https://github.com/chartdb/chartdb/issues/928)) ([e4c4a3b](https://github.com/chartdb/chartdb/commit/e4c4a3b35484d9ece955a5aec577603dde73d634))
|
||||
* add support for ALTER TABLE ADD COLUMN in PostgreSQL importer ([#892](https://github.com/chartdb/chartdb/issues/892)) ([ec6e46f](https://github.com/chartdb/chartdb/commit/ec6e46fe81ea1806c179c50a4c5779d8596008aa))
|
||||
* add tests for diff ([#930](https://github.com/chartdb/chartdb/issues/930)) ([47a7a73](https://github.com/chartdb/chartdb/commit/47a7a73a137b87dfa6e67aff5f939cf64ccf4601))
|
||||
* dbml edit mode glitch ([#925](https://github.com/chartdb/chartdb/issues/925)) ([93d72a8](https://github.com/chartdb/chartdb/commit/93d72a896bab9aa79d8ea2f876126887e432214c))
|
||||
* dbml export default time bug ([#922](https://github.com/chartdb/chartdb/issues/922)) ([bc82f9d](https://github.com/chartdb/chartdb/commit/bc82f9d6a8fe4de2f7e0fc465e0a20c5dbf8f41d))
|
||||
* dbml export renaming fields bug ([#921](https://github.com/chartdb/chartdb/issues/921)) ([26dc299](https://github.com/chartdb/chartdb/commit/26dc299cd28e9890d191c13f84a15ac38ae48b11))
|
||||
* **dbml:** export array fields without quotes ([#911](https://github.com/chartdb/chartdb/issues/911)) ([5e81c18](https://github.com/chartdb/chartdb/commit/5e81c1848aaa911990e1e881d62525f5254d6d34))
|
||||
* diff logic ([#927](https://github.com/chartdb/chartdb/issues/927)) ([1b8d51b](https://github.com/chartdb/chartdb/commit/1b8d51b73c4ed4b7c5929adcb17a44927c7defca))
|
||||
* export dbml issues after upgrade version ([#883](https://github.com/chartdb/chartdb/issues/883)) ([07937a2](https://github.com/chartdb/chartdb/commit/07937a2f51708b1c10b45c2bd1f9a9acf5c3f708))
|
||||
* export sql + import metadata lib ([#902](https://github.com/chartdb/chartdb/issues/902)) ([ffddcdc](https://github.com/chartdb/chartdb/commit/ffddcdcc987bacb0e0d7e8dea27d08d3a8c5a8c8))
|
||||
* handle bidirectional relationships in DBML export ([#924](https://github.com/chartdb/chartdb/issues/924)) ([9991077](https://github.com/chartdb/chartdb/commit/99910779789a9c6ef113d06bc3de31e35b9b04d1))
|
||||
* import dbml set pk field unique ([#920](https://github.com/chartdb/chartdb/issues/920)) ([d6ba4a4](https://github.com/chartdb/chartdb/commit/d6ba4a40749d85d2703f120600df4345dab3c561))
|
||||
* improve SQL default value parsing for PostgreSQL, MySQL, and SQL Server with proper type handling and casting support ([#900](https://github.com/chartdb/chartdb/issues/900)) ([fe9ef27](https://github.com/chartdb/chartdb/commit/fe9ef275b8619dcfd7e57541a62a6237a16d29a8))
|
||||
* move area utils ([#932](https://github.com/chartdb/chartdb/issues/932)) ([2dc1a6f](https://github.com/chartdb/chartdb/commit/2dc1a6fc7519e0a455b0e1306601195deb156c96))
|
||||
* move auto arrange to toolbar ([#904](https://github.com/chartdb/chartdb/issues/904)) ([b016a70](https://github.com/chartdb/chartdb/commit/b016a70691bc22af5720b4de683e8c9353994fcc))
|
||||
* remove general db creation ([#901](https://github.com/chartdb/chartdb/issues/901)) ([df89f0b](https://github.com/chartdb/chartdb/commit/df89f0b6b9ba3fcc8b05bae4f60c0dc4ad1d2215))
|
||||
* remove many to many rel option ([#933](https://github.com/chartdb/chartdb/issues/933)) ([c567c0a](https://github.com/chartdb/chartdb/commit/c567c0a5f39157b2c430e92192b6750304d7a834))
|
||||
* reset increment and default when change field ([#896](https://github.com/chartdb/chartdb/issues/896)) ([e5e1d59](https://github.com/chartdb/chartdb/commit/e5e1d5932762422ea63acfd6cf9fe4f03aa822f7))
|
||||
* **sql-import:** handle SQL Server DDL with multiple tables, inline foreign keys, and case-insensitive field matching ([#897](https://github.com/chartdb/chartdb/issues/897)) ([2a64dee](https://github.com/chartdb/chartdb/commit/2a64deebb87a11ee3892024c3273d682bb86f7ef))
|
||||
* **sql-import:** support ALTER TABLE ALTER COLUMN TYPE in PostgreSQL importer ([#895](https://github.com/chartdb/chartdb/issues/895)) ([aa29061](https://github.com/chartdb/chartdb/commit/aa290615caf806d7d0374c848d50b4636fde7e96))
|
||||
* **sqlite:** improve parser to handle tables without column types and fix column detection ([#914](https://github.com/chartdb/chartdb/issues/914)) ([d3dbf41](https://github.com/chartdb/chartdb/commit/d3dbf41894d74f0ffce9afe3bd810f065aa53017))
|
||||
* trigger edit table on canvas from context menu ([#919](https://github.com/chartdb/chartdb/issues/919)) ([bdc41c0](https://github.com/chartdb/chartdb/commit/bdc41c0b74d9d9918e7b6cd2152fa07c0c58ce60))
|
||||
* update deps vulns ([#909](https://github.com/chartdb/chartdb/issues/909)) ([2bd9ca2](https://github.com/chartdb/chartdb/commit/2bd9ca25b2c7b1f053ff4fdc8c5cfc1b0e65901d))
|
||||
* upgrade dbml lib ([#880](https://github.com/chartdb/chartdb/issues/880)) ([d8e0bc7](https://github.com/chartdb/chartdb/commit/d8e0bc7db8881971ddaea7177bcebee13cc865f6))
|
||||
|
||||
## [1.15.1](https://github.com/chartdb/chartdb/compare/v1.15.0...v1.15.1) (2025-08-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add actions menu to diagram list + add duplicate diagram ([#876](https://github.com/chartdb/chartdb/issues/876)) ([abd2a6c](https://github.com/chartdb/chartdb/commit/abd2a6ccbe1aa63db44ec28b3eff525cc5d3f8b0))
|
||||
* **custom-types:** Make schema optional ([#866](https://github.com/chartdb/chartdb/issues/866)) ([60c5675](https://github.com/chartdb/chartdb/commit/60c5675cbfe205859d2d0c9848d8345a0a854671))
|
||||
* handle quoted identifiers with special characters in SQL import/export and DBML generation ([#877](https://github.com/chartdb/chartdb/issues/877)) ([66b0863](https://github.com/chartdb/chartdb/commit/66b086378cd63347acab5fc7f13db7db4feaa872))
|
||||
|
||||
## [1.15.0](https://github.com/chartdb/chartdb/compare/v1.14.0...v1.15.0) (2025-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add auto increment support for fields with database-specific export ([#851](https://github.com/chartdb/chartdb/issues/851)) ([c77c983](https://github.com/chartdb/chartdb/commit/c77c983989ae38a6b1139dd9015f4f3178d4e103))
|
||||
* **filter:** filter tables by areas ([#836](https://github.com/chartdb/chartdb/issues/836)) ([e9c5442](https://github.com/chartdb/chartdb/commit/e9c5442d9df2beadad78187da3363bb6406636c4))
|
||||
* include foreign keys inline in SQLite CREATE TABLE statements ([#833](https://github.com/chartdb/chartdb/issues/833)) ([43fc1d7](https://github.com/chartdb/chartdb/commit/43fc1d7fc26876b22c61405f6c3df89fc66b7992))
|
||||
* **postgres:** add support hash index types ([#812](https://github.com/chartdb/chartdb/issues/812)) ([0d623a8](https://github.com/chartdb/chartdb/commit/0d623a86b1cb7cbd223e10ad23d09fc0e106c006))
|
||||
* support create views ([#868](https://github.com/chartdb/chartdb/issues/868)) ([0a5874a](https://github.com/chartdb/chartdb/commit/0a5874a69b6323145430c1fb4e3482ac7da4916c))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* area filter logic ([#861](https://github.com/chartdb/chartdb/issues/861)) ([73daf0d](https://github.com/chartdb/chartdb/commit/73daf0df2142a29c2eeebe60b43198bcca869026))
|
||||
* **area filter:** fix dragging tables over filtered areas ([#842](https://github.com/chartdb/chartdb/issues/842)) ([19fd94c](https://github.com/chartdb/chartdb/commit/19fd94c6bde3a9ec749cd1ccacbedb6abc96d037))
|
||||
* **canvas:** delete table + area together bug ([#859](https://github.com/chartdb/chartdb/issues/859)) ([b697e26](https://github.com/chartdb/chartdb/commit/b697e26170da95dcb427ff6907b6f663c98ba59f))
|
||||
* **cla:** Harden action ([#867](https://github.com/chartdb/chartdb/issues/867)) ([ad8e344](https://github.com/chartdb/chartdb/commit/ad8e34483fdf4226de76c9e7768bc2ba9bf154de))
|
||||
* DBML export error with multi-line table comments for SQL Server ([#852](https://github.com/chartdb/chartdb/issues/852)) ([0545b41](https://github.com/chartdb/chartdb/commit/0545b411407b2449220d10981a04c3e368a90ca3))
|
||||
* filter to default schema on load new diagram ([#849](https://github.com/chartdb/chartdb/issues/849)) ([712bdf5](https://github.com/chartdb/chartdb/commit/712bdf5b958919d940c4f2a1c3b7c7e969990f02))
|
||||
* **filter:** filter toggle issues with no schemas dbs ([#856](https://github.com/chartdb/chartdb/issues/856)) ([d0dee84](https://github.com/chartdb/chartdb/commit/d0dee849702161d979b4f589a7e6579fbaade22d))
|
||||
* **filters:** refactor diagram filters - remove schema filter ([#832](https://github.com/chartdb/chartdb/issues/832)) ([4f1d329](https://github.com/chartdb/chartdb/commit/4f1d3295c09782ab46d82ce21b662032aa094f22))
|
||||
* for sqlite import - add more types & include type parameters ([#834](https://github.com/chartdb/chartdb/issues/834)) ([5936500](https://github.com/chartdb/chartdb/commit/5936500ca00a57b3f161616264c26152a13c36d2))
|
||||
* improve creating view to table dependency ([#874](https://github.com/chartdb/chartdb/issues/874)) ([44be48f](https://github.com/chartdb/chartdb/commit/44be48ff3ad1361279331c17364090b13af471a1))
|
||||
* initially show filter when filter active ([#853](https://github.com/chartdb/chartdb/issues/853)) ([ab4845c](https://github.com/chartdb/chartdb/commit/ab4845c7728e6e0b2d852f8005921fd90630eef9))
|
||||
* **menu:** clear file menu ([#843](https://github.com/chartdb/chartdb/issues/843)) ([eaebe34](https://github.com/chartdb/chartdb/commit/eaebe3476824af779214a354b3e991923a22f195))
|
||||
* merge relationship & dependency sections to ref section ([#870](https://github.com/chartdb/chartdb/issues/870)) ([ec3719e](https://github.com/chartdb/chartdb/commit/ec3719ebce4664b2aa6e3322fb3337e72bc21015))
|
||||
* move dbml into sections menu ([#862](https://github.com/chartdb/chartdb/issues/862)) ([2531a70](https://github.com/chartdb/chartdb/commit/2531a7023f36ef29e67c0da6bca4fd0346b18a51))
|
||||
* open filter by default ([#863](https://github.com/chartdb/chartdb/issues/863)) ([7e0fdd1](https://github.com/chartdb/chartdb/commit/7e0fdd1595bffe29e769d29602d04f42edfe417e))
|
||||
* preserve composite primary key constraint names across import/export workflows ([#869](https://github.com/chartdb/chartdb/issues/869)) ([215d579](https://github.com/chartdb/chartdb/commit/215d57979df2e91fa61988acff590daad2f4e771))
|
||||
* prevent false change detection in DBML editor by stripping public schema on import ([#858](https://github.com/chartdb/chartdb/issues/858)) ([0aaa451](https://github.com/chartdb/chartdb/commit/0aaa451479911d047e4cc83f063afa68a122ba9b))
|
||||
* remove unnecessary space ([#845](https://github.com/chartdb/chartdb/issues/845)) ([f1a4298](https://github.com/chartdb/chartdb/commit/f1a429836221aacdda73b91665bf33ffb011164c))
|
||||
* reorder with areas ([#846](https://github.com/chartdb/chartdb/issues/846)) ([d7c9536](https://github.com/chartdb/chartdb/commit/d7c9536272cf1d42104b7064ea448d128d091a20))
|
||||
* **select-box:** fix select box issue in dialog ([#840](https://github.com/chartdb/chartdb/issues/840)) ([cb2ba66](https://github.com/chartdb/chartdb/commit/cb2ba66233c8c04e2d963cf2d210499d8512a268))
|
||||
* set default filter only if has more than 1 schemas ([#855](https://github.com/chartdb/chartdb/issues/855)) ([b4ccfcd](https://github.com/chartdb/chartdb/commit/b4ccfcdcde2f3565b0d3bbc46fa1715feb6cd925))
|
||||
* show default schema first ([#854](https://github.com/chartdb/chartdb/issues/854)) ([1759b0b](https://github.com/chartdb/chartdb/commit/1759b0b9f271ed25f7c71f26c344e3f1d97bc5fb))
|
||||
* **sidebar:** add titles to sidebar ([#844](https://github.com/chartdb/chartdb/issues/844)) ([b8f2141](https://github.com/chartdb/chartdb/commit/b8f2141bd2e67272030896fb4009a7925f9f09e4))
|
||||
* **sql-import:** fix SQL Server foreign key parsing for tables without schema prefix ([#857](https://github.com/chartdb/chartdb/issues/857)) ([04d91c6](https://github.com/chartdb/chartdb/commit/04d91c67b1075e94948f75186878e633df7abbca))
|
||||
* **table colors:** switch to default table color ([#841](https://github.com/chartdb/chartdb/issues/841)) ([0da3cae](https://github.com/chartdb/chartdb/commit/0da3caeeac37926dd22f38d98423611f39c0412a))
|
||||
* update filter on adding table ([#838](https://github.com/chartdb/chartdb/issues/838)) ([41ba251](https://github.com/chartdb/chartdb/commit/41ba25137789dda25266178cd7c96ecbb37e62a4))
|
||||
|
||||
## [1.14.0](https://github.com/chartdb/chartdb/compare/v1.13.2...v1.14.0) (2025-08-04)
|
||||
|
||||
|
||||
|
||||
281
package-lock.json
generated
281
package-lock.json
generated
@@ -1,15 +1,15 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"version": "1.14.0",
|
||||
"version": "1.16.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "chartdb",
|
||||
"version": "1.14.0",
|
||||
"version": "1.16.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
"@dbml/core": "^3.13.9",
|
||||
"@dnd-kit/sortable": "^8.0.0",
|
||||
"@monaco-editor/react": "^4.6.0",
|
||||
"@radix-ui/react-accordion": "^1.2.0",
|
||||
@@ -586,15 +586,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
"version": "7.26.2",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
|
||||
"integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.25.9",
|
||||
"@babel/helper-validator-identifier": "^7.27.1",
|
||||
"js-tokens": "^4.0.0",
|
||||
"picocolors": "^1.0.0"
|
||||
"picocolors": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -738,18 +738,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-string-parser": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz",
|
||||
"integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
|
||||
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-validator-identifier": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz",
|
||||
"integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==",
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
|
||||
"integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -766,26 +766,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helpers": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.7.tgz",
|
||||
"integrity": "sha512-8NHiL98vsi0mbPQmYAGWwfcFaOy4j2HY49fXJCfuDcdE7fMIsH9a7GdaeXpIBsbT7307WU8KCMp5pUVDNL4f9A==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
|
||||
"integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/template": "^7.25.9",
|
||||
"@babel/types": "^7.26.7"
|
||||
"@babel/template": "^7.27.2",
|
||||
"@babel/types": "^7.28.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/parser": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.7.tgz",
|
||||
"integrity": "sha512-kEvgGGgEjRUutvdVvZhbn/BxVt+5VSpwXz1j3WYXQbXDo8KzFOPNG2GQbdAiNq8g6wn1yKk7C/qrke03a84V+w==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz",
|
||||
"integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/types": "^7.26.7"
|
||||
"@babel/types": "^7.28.4"
|
||||
},
|
||||
"bin": {
|
||||
"parser": "bin/babel-parser.js"
|
||||
@@ -827,27 +827,24 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.7.tgz",
|
||||
"integrity": "sha512-AOPI3D+a8dXnja+iwsUqGRjr1BbZIe771sXdapOtYI531gSqpi92vXivKcq2asu/DFpdl1ceFAKZyRzK2PCVcQ==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
|
||||
"integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.14.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/template": {
|
||||
"version": "7.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz",
|
||||
"integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==",
|
||||
"version": "7.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
|
||||
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.25.9",
|
||||
"@babel/parser": "^7.25.9",
|
||||
"@babel/types": "^7.25.9"
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
"@babel/parser": "^7.27.2",
|
||||
"@babel/types": "^7.27.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -883,25 +880,25 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/types": {
|
||||
"version": "7.26.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.7.tgz",
|
||||
"integrity": "sha512-t8kDRGrKXyp6+tjUh7hw2RLyclsW4TRoRvRHtSyAX9Bb5ldlFh+90YAYY6awRXrlB4G5G2izNeGySpATlFzmOg==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz",
|
||||
"integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-string-parser": "^7.25.9",
|
||||
"@babel/helper-validator-identifier": "^7.25.9"
|
||||
"@babel/helper-string-parser": "^7.27.1",
|
||||
"@babel/helper-validator-identifier": "^7.27.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@dbml/core": {
|
||||
"version": "3.9.5",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/core/-/core-3.9.5.tgz",
|
||||
"integrity": "sha512-lX/G5qer42irufv5rvx6Y3ISV2ZLDRlxj8R+OZMdhC6wAw0VYPYIts23MdMFPY39Iay0TDtfmwsbOsVy/yjSIg==",
|
||||
"version": "3.13.9",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/core/-/core-3.13.9.tgz",
|
||||
"integrity": "sha512-JgJ470yuTZU7tP64ZL5FpEh7zSXjSoKzkARmin8iVVhdsNM8Nq4e+FFhG6J6acPtGHtoLahOs9LqrC17B9MqYg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@dbml/parse": "^3.9.5",
|
||||
"@dbml/parse": "^3.13.9",
|
||||
"antlr4": "^4.13.1",
|
||||
"lodash": "^4.17.15",
|
||||
"parsimmon": "^1.13.0",
|
||||
@@ -912,15 +909,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@dbml/parse": {
|
||||
"version": "3.9.5",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/parse/-/parse-3.9.5.tgz",
|
||||
"integrity": "sha512-z8MjBYDFiYf7WtsagwGATEye81xQcO9VXFzttSjdJ+wgdSFzFSex9letJPIMIcYXBkm4Fg5qLDk+G9uq/413Dg==",
|
||||
"version": "3.13.9",
|
||||
"resolved": "https://registry.npmjs.org/@dbml/parse/-/parse-3.13.9.tgz",
|
||||
"integrity": "sha512-JMfOxWquXMZpF/MTLy2xWLImx3z9D0t67T7x/BT892WvmhM+9cnJHFA2URT1NXu9jdajbTTFuoWSyzdsfNpaRw==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.21"
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"lodash": "^4.17.21"
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@dnd-kit/accessibility": {
|
||||
@@ -1370,9 +1367,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz",
|
||||
"integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==",
|
||||
"version": "4.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz",
|
||||
"integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1417,9 +1414,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/config-array": {
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz",
|
||||
"integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==",
|
||||
"version": "0.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz",
|
||||
"integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
@@ -1431,10 +1428,20 @@
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/config-helpers": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz",
|
||||
"integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/core": {
|
||||
"version": "0.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz",
|
||||
"integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==",
|
||||
"version": "0.15.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz",
|
||||
"integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
@@ -1445,9 +1452,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/eslintrc": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz",
|
||||
"integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==",
|
||||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz",
|
||||
"integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1482,13 +1489,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.19.0.tgz",
|
||||
"integrity": "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ==",
|
||||
"version": "9.35.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.35.0.tgz",
|
||||
"integrity": "sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://eslint.org/donate"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/object-schema": {
|
||||
@@ -1502,13 +1512,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/plugin-kit": {
|
||||
"version": "0.2.5",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz",
|
||||
"integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==",
|
||||
"version": "0.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz",
|
||||
"integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@eslint/core": "^0.10.0",
|
||||
"@eslint/core": "^0.15.2",
|
||||
"levn": "^0.4.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1606,9 +1616,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/retry": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz",
|
||||
"integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==",
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz",
|
||||
"integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -4276,12 +4286,6 @@
|
||||
"@types/deep-eql": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/cookie": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz",
|
||||
"integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/d3-color": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz",
|
||||
@@ -4553,9 +4557,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -4961,9 +4965,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.14.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
|
||||
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
|
||||
"version": "8.15.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
|
||||
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
@@ -5486,9 +5490,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -6578,22 +6582,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint": {
|
||||
"version": "9.19.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.19.0.tgz",
|
||||
"integrity": "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA==",
|
||||
"version": "9.35.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.35.0.tgz",
|
||||
"integrity": "sha512-QePbBFMJFjgmlE+cXAlbHZbHpdFVS2E/6vzCy7aKlebddvl1vadiC4JFV5u/wqTkNUwEV8WrQi257jf5f06hrg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.2.0",
|
||||
"@eslint-community/eslint-utils": "^4.8.0",
|
||||
"@eslint-community/regexpp": "^4.12.1",
|
||||
"@eslint/config-array": "^0.19.0",
|
||||
"@eslint/core": "^0.10.0",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "9.19.0",
|
||||
"@eslint/plugin-kit": "^0.2.5",
|
||||
"@eslint/config-array": "^0.21.0",
|
||||
"@eslint/config-helpers": "^0.3.1",
|
||||
"@eslint/core": "^0.15.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "9.35.0",
|
||||
"@eslint/plugin-kit": "^0.3.5",
|
||||
"@humanfs/node": "^0.16.6",
|
||||
"@humanwhocodes/module-importer": "^1.0.1",
|
||||
"@humanwhocodes/retry": "^0.4.1",
|
||||
"@humanwhocodes/retry": "^0.4.2",
|
||||
"@types/estree": "^1.0.6",
|
||||
"@types/json-schema": "^7.0.15",
|
||||
"ajv": "^6.12.4",
|
||||
@@ -6601,9 +6606,9 @@
|
||||
"cross-spawn": "^7.0.6",
|
||||
"debug": "^4.3.2",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"eslint-scope": "^8.2.0",
|
||||
"eslint-visitor-keys": "^4.2.0",
|
||||
"espree": "^10.3.0",
|
||||
"eslint-scope": "^8.4.0",
|
||||
"eslint-visitor-keys": "^4.2.1",
|
||||
"espree": "^10.4.0",
|
||||
"esquery": "^1.5.0",
|
||||
"esutils": "^2.0.2",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
@@ -6812,9 +6817,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-scope": {
|
||||
"version": "8.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
|
||||
"integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
|
||||
"version": "8.4.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz",
|
||||
"integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
@@ -6842,9 +6847,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -6862,15 +6867,15 @@
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/espree": {
|
||||
"version": "10.3.0",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
|
||||
"integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
|
||||
"version": "10.4.0",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
|
||||
"integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"acorn": "^8.14.0",
|
||||
"acorn": "^8.15.0",
|
||||
"acorn-jsx": "^5.3.2",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -6880,9 +6885,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/espree/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
@@ -7365,9 +7370,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
@@ -8440,6 +8445,12 @@
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash-es": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
|
||||
"integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.merge": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
|
||||
@@ -9600,15 +9611,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-router": {
|
||||
"version": "7.1.5",
|
||||
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.1.5.tgz",
|
||||
"integrity": "sha512-8BUF+hZEU4/z/JD201yK6S+UYhsf58bzYIDq2NS1iGpwxSXDu7F+DeGSkIXMFBuHZB21FSiCzEcUb18cQNdRkA==",
|
||||
"version": "7.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.8.2.tgz",
|
||||
"integrity": "sha512-7M2fR1JbIZ/jFWqelpvSZx+7vd7UlBTfdZqf6OSdF9g6+sfdqJDAWcak6ervbHph200ePlu+7G8LdoiC3ReyAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/cookie": "^0.6.0",
|
||||
"cookie": "^1.0.1",
|
||||
"set-cookie-parser": "^2.6.0",
|
||||
"turbo-stream": "2.4.0"
|
||||
"set-cookie-parser": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
@@ -9624,12 +9633,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-router-dom": {
|
||||
"version": "7.1.5",
|
||||
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.1.5.tgz",
|
||||
"integrity": "sha512-/4f9+up0Qv92D3bB8iN5P1s3oHAepSGa9h5k6tpTFlixTTskJZwKGhJ6vRJ277tLD1zuaZTt95hyGWV1Z37csQ==",
|
||||
"version": "7.8.2",
|
||||
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.8.2.tgz",
|
||||
"integrity": "sha512-Z4VM5mKDipal2jQ385H6UBhiiEDlnJPx6jyWsTYoZQdl5TrjxEV2a9yl3Fi60NBJxYzOTGTTHXPi0pdizvTwow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"react-router": "7.1.5"
|
||||
"react-router": "7.8.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
@@ -9760,12 +9769,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.14.1",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
|
||||
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/regexp.prototype.flags": {
|
||||
"version": "1.5.4",
|
||||
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
|
||||
@@ -11026,12 +11029,6 @@
|
||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/turbo-stream": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz",
|
||||
"integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
@@ -11313,9 +11310,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.4.14",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz",
|
||||
"integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==",
|
||||
"version": "5.4.20",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz",
|
||||
"integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"private": true,
|
||||
"version": "1.14.0",
|
||||
"version": "1.16.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -17,7 +17,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
"@dbml/core": "^3.13.9",
|
||||
"@dnd-kit/sortable": "^8.0.0",
|
||||
"@monaco-editor/react": "^4.6.0",
|
||||
"@radix-ui/react-accordion": "^1.2.0",
|
||||
|
||||
@@ -11,18 +11,26 @@ import {
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/dropdown-menu/dropdown-menu';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
|
||||
export interface ButtonAlternative {
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
icon?: React.ReactNode;
|
||||
className?: string;
|
||||
tooltip?: string;
|
||||
}
|
||||
|
||||
export interface ButtonWithAlternativesProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
|
||||
VariantProps<typeof buttonVariants> {
|
||||
asChild?: boolean;
|
||||
alternatives: Array<{
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
icon?: React.ReactNode;
|
||||
className?: string;
|
||||
}>;
|
||||
alternatives: Array<ButtonAlternative>;
|
||||
dropdownTriggerClassName?: string;
|
||||
chevronDownIconClassName?: string;
|
||||
}
|
||||
@@ -87,19 +95,36 @@ const ButtonWithAlternatives = React.forwardRef<
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
{alternatives.map((alternative, index) => (
|
||||
<DropdownMenuItem
|
||||
key={index}
|
||||
onClick={alternative.onClick}
|
||||
disabled={alternative.disabled}
|
||||
className={cn(alternative.className)}
|
||||
>
|
||||
<span className="flex w-full items-center justify-between gap-2">
|
||||
{alternative.label}
|
||||
{alternative.icon}
|
||||
</span>
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
{alternatives.map((alternative, index) => {
|
||||
const menuItem = (
|
||||
<DropdownMenuItem
|
||||
key={index}
|
||||
onClick={alternative.onClick}
|
||||
disabled={alternative.disabled}
|
||||
className={cn(alternative.className)}
|
||||
>
|
||||
<span className="flex w-full items-center justify-between gap-2">
|
||||
{alternative.label}
|
||||
{alternative.icon}
|
||||
</span>
|
||||
</DropdownMenuItem>
|
||||
);
|
||||
|
||||
if (alternative.tooltip) {
|
||||
return (
|
||||
<Tooltip key={index}>
|
||||
<TooltipTrigger asChild>
|
||||
{menuItem}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="left">
|
||||
{alternative.tooltip}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return menuItem;
|
||||
})}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
) : null}
|
||||
|
||||
@@ -38,7 +38,7 @@ export interface CodeSnippetProps {
|
||||
className?: string;
|
||||
code: string;
|
||||
codeToCopy?: string;
|
||||
language?: 'sql' | 'shell';
|
||||
language?: 'sql' | 'shell' | 'dbml';
|
||||
loading?: boolean;
|
||||
autoScroll?: boolean;
|
||||
isComplete?: boolean;
|
||||
|
||||
@@ -9,12 +9,14 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
base: 'vs-dark',
|
||||
inherit: true,
|
||||
rules: [
|
||||
{ token: 'comment', foreground: '6A9955' }, // Comments
|
||||
{ token: 'keyword', foreground: '569CD6' }, // Table, Ref keywords
|
||||
{ token: 'string', foreground: 'CE9178' }, // Strings
|
||||
{ token: 'annotation', foreground: '9CDCFE' }, // [annotations]
|
||||
{ token: 'delimiter', foreground: 'D4D4D4' }, // Braces {}
|
||||
{ token: 'operator', foreground: 'D4D4D4' }, // Operators
|
||||
{ token: 'datatype', foreground: '4EC9B0' }, // Data types
|
||||
{ token: 'type', foreground: '4EC9B0' }, // Data types
|
||||
{ token: 'identifier', foreground: '9CDCFE' }, // Field names
|
||||
],
|
||||
colors: {},
|
||||
});
|
||||
@@ -23,12 +25,14 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
base: 'vs',
|
||||
inherit: true,
|
||||
rules: [
|
||||
{ token: 'comment', foreground: '008000' }, // Comments
|
||||
{ token: 'keyword', foreground: '0000FF' }, // Table, Ref keywords
|
||||
{ token: 'string', foreground: 'A31515' }, // Strings
|
||||
{ token: 'annotation', foreground: '001080' }, // [annotations]
|
||||
{ token: 'delimiter', foreground: '000000' }, // Braces {}
|
||||
{ token: 'operator', foreground: '000000' }, // Operators
|
||||
{ token: 'type', foreground: '267F99' }, // Data types
|
||||
{ token: 'identifier', foreground: '001080' }, // Field names
|
||||
],
|
||||
colors: {},
|
||||
});
|
||||
@@ -37,23 +41,59 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
const datatypePattern = dataTypesNames.join('|');
|
||||
|
||||
monaco.languages.setMonarchTokensProvider('dbml', {
|
||||
keywords: ['Table', 'Ref', 'Indexes', 'Note', 'Enum'],
|
||||
keywords: ['Table', 'Ref', 'Indexes', 'Note', 'Enum', 'enum'],
|
||||
datatypes: dataTypesNames,
|
||||
operators: ['>', '<', '-'],
|
||||
|
||||
tokenizer: {
|
||||
root: [
|
||||
// Comments
|
||||
[/\/\/.*$/, 'comment'],
|
||||
|
||||
// Keywords - case insensitive
|
||||
[
|
||||
/\b([Tt][Aa][Bb][Ll][Ee]|[Ee][Nn][Uu][Mm]|[Rr][Ee][Ff]|[Ii][Nn][Dd][Ee][Xx][Ee][Ss]|[Nn][Oo][Tt][Ee])\b/,
|
||||
'keyword',
|
||||
],
|
||||
|
||||
// Annotations in brackets
|
||||
[/\[.*?\]/, 'annotation'],
|
||||
|
||||
// Strings
|
||||
[/'''/, 'string', '@tripleQuoteString'],
|
||||
[/".*?"/, 'string'],
|
||||
[/'.*?'/, 'string'],
|
||||
[/"([^"\\]|\\.)*$/, 'string.invalid'], // non-terminated string
|
||||
[/'([^'\\]|\\.)*$/, 'string.invalid'], // non-terminated string
|
||||
[/"/, 'string', '@string_double'],
|
||||
[/'/, 'string', '@string_single'],
|
||||
[/`.*?`/, 'string'],
|
||||
[/[{}]/, 'delimiter'],
|
||||
[/[<>]/, 'operator'],
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'], // Added 'i' flag for case-insensitive matching
|
||||
|
||||
// Delimiters and operators
|
||||
[/[{}()]/, 'delimiter'],
|
||||
[/[<>-]/, 'operator'],
|
||||
[/:/, 'delimiter'],
|
||||
|
||||
// Data types
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'],
|
||||
|
||||
// Numbers
|
||||
[/\d+/, 'number'],
|
||||
|
||||
// Identifiers
|
||||
[/[a-zA-Z_]\w*/, 'identifier'],
|
||||
],
|
||||
|
||||
string_double: [
|
||||
[/[^\\"]+/, 'string'],
|
||||
[/\\./, 'string.escape'],
|
||||
[/"/, 'string', '@pop'],
|
||||
],
|
||||
|
||||
string_single: [
|
||||
[/[^\\']+/, 'string'],
|
||||
[/\\./, 'string.escape'],
|
||||
[/'/, 'string', '@pop'],
|
||||
],
|
||||
|
||||
tripleQuoteString: [
|
||||
[/[^']+/, 'string'],
|
||||
[/'''/, 'string', '@pop'],
|
||||
|
||||
@@ -5,27 +5,45 @@ import {
|
||||
PopoverTrigger,
|
||||
} from '@/components/popover/popover';
|
||||
import { colorOptions } from '@/lib/colors';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
export interface ColorPickerProps {
|
||||
color: string;
|
||||
onChange: (color: string) => void;
|
||||
disabled?: boolean;
|
||||
popoverOnMouseDown?: (e: React.MouseEvent) => void;
|
||||
popoverOnClick?: (e: React.MouseEvent) => void;
|
||||
}
|
||||
|
||||
export const ColorPicker = React.forwardRef<
|
||||
React.ElementRef<typeof PopoverTrigger>,
|
||||
ColorPickerProps
|
||||
>(({ color, onChange }, ref) => {
|
||||
>(({ color, onChange, disabled, popoverOnMouseDown, popoverOnClick }, ref) => {
|
||||
return (
|
||||
<Popover>
|
||||
<PopoverTrigger asChild ref={ref}>
|
||||
<PopoverTrigger
|
||||
asChild
|
||||
ref={ref}
|
||||
disabled={disabled}
|
||||
{...(disabled ? { onClick: (e) => e.preventDefault() } : {})}
|
||||
>
|
||||
<div
|
||||
className="h-6 w-8 cursor-pointer rounded-md border-2 border-muted transition-shadow hover:shadow-md"
|
||||
className={cn(
|
||||
'h-6 w-8 cursor-pointer rounded-md border-2 border-muted transition-shadow hover:shadow-md',
|
||||
{
|
||||
'hover:shadow-none cursor-default': disabled,
|
||||
}
|
||||
)}
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
}}
|
||||
/>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-fit">
|
||||
<PopoverContent
|
||||
className="w-fit"
|
||||
onMouseDown={popoverOnMouseDown}
|
||||
onClick={popoverOnClick}
|
||||
>
|
||||
<div className="grid grid-cols-4 gap-2">
|
||||
{colorOptions.map((option) => (
|
||||
<div
|
||||
|
||||
@@ -27,6 +27,7 @@ export interface SelectBoxOption {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
group?: string;
|
||||
icon?: React.ReactNode;
|
||||
}
|
||||
|
||||
export interface SelectBoxProps {
|
||||
@@ -53,6 +54,11 @@ export interface SelectBoxProps {
|
||||
open?: boolean;
|
||||
onOpenChange?: (open: boolean) => void;
|
||||
popoverClassName?: string;
|
||||
readonly?: boolean;
|
||||
footerButtons?: React.ReactNode;
|
||||
commandOnMouseDown?: (e: React.MouseEvent) => void;
|
||||
commandOnClick?: (e: React.MouseEvent) => void;
|
||||
onSearchChange?: (search: string) => void;
|
||||
}
|
||||
|
||||
export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
@@ -78,6 +84,11 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
open,
|
||||
onOpenChange: setOpen,
|
||||
popoverClassName,
|
||||
readonly,
|
||||
footerButtons,
|
||||
commandOnMouseDown,
|
||||
commandOnClick,
|
||||
onSearchChange,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
@@ -152,18 +163,20 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
className={`inline-flex min-w-0 shrink-0 items-center gap-1 rounded-md border py-0.5 pl-2 pr-1 text-xs font-medium text-foreground transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 ${oneLine ? 'mx-0.5' : ''}`}
|
||||
>
|
||||
<span>{option.label}</span>
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
handleSelect(option.value);
|
||||
}}
|
||||
className="flex items-center rounded-sm px-px text-muted-foreground/60 hover:bg-accent hover:text-muted-foreground"
|
||||
>
|
||||
<Cross2Icon />
|
||||
</span>
|
||||
{!readonly ? (
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
handleSelect(option.value);
|
||||
}}
|
||||
className="flex items-center rounded-sm px-px text-muted-foreground/60 hover:bg-accent hover:text-muted-foreground"
|
||||
>
|
||||
<Cross2Icon />
|
||||
</span>
|
||||
) : null}
|
||||
</span>
|
||||
)),
|
||||
[options, value, handleSelect, oneLine, keepOrder]
|
||||
[options, value, handleSelect, oneLine, keepOrder, readonly]
|
||||
);
|
||||
|
||||
const isAllSelected = React.useMemo(
|
||||
@@ -236,6 +249,8 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
matches?.map((match) => match?.toString())
|
||||
)
|
||||
}
|
||||
onMouseDown={commandOnMouseDown}
|
||||
onClick={commandOnClick}
|
||||
>
|
||||
{multiple && (
|
||||
<div
|
||||
@@ -250,6 +265,11 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-1 items-center truncate">
|
||||
{option.icon ? (
|
||||
<span className="mr-2 shrink-0">
|
||||
{option.icon}
|
||||
</span>
|
||||
) : null}
|
||||
<span>
|
||||
{isRegexMatch ? searchTerm : option.label}
|
||||
{!isRegexMatch && optionSuffix
|
||||
@@ -276,7 +296,15 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</CommandItem>
|
||||
);
|
||||
},
|
||||
[value, multiple, searchTerm, handleSelect, optionSuffix]
|
||||
[
|
||||
value,
|
||||
multiple,
|
||||
searchTerm,
|
||||
handleSelect,
|
||||
optionSuffix,
|
||||
commandOnClick,
|
||||
commandOnMouseDown,
|
||||
]
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -284,7 +312,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
<PopoverTrigger asChild tabIndex={0} onKeyDown={handleKeyDown}>
|
||||
<div
|
||||
className={cn(
|
||||
`flex min-h-[36px] cursor-pointer items-center justify-between rounded-md border px-3 py-1 data-[state=open]:border-ring ${disabled ? 'bg-muted pointer-events-none' : ''}`,
|
||||
`flex min-h-[36px] cursor-pointer items-center justify-between rounded-md border px-3 py-1 data-[state=open]:border-ring ${disabled ? 'bg-muted pointer-events-none' : ''} ${readonly ? 'pointer-events-none' : ''}`,
|
||||
className
|
||||
)}
|
||||
>
|
||||
@@ -354,6 +382,8 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
popoverClassName
|
||||
)}
|
||||
align="center"
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<Command
|
||||
filter={(value, search, keywords) => {
|
||||
@@ -376,7 +406,10 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
<div className="relative">
|
||||
<CommandInput
|
||||
value={searchTerm}
|
||||
onValueChange={(e) => setSearchTerm(e)}
|
||||
onValueChange={(e) => {
|
||||
setSearchTerm(e);
|
||||
onSearchChange?.(e);
|
||||
}}
|
||||
ref={ref}
|
||||
placeholder={inputPlaceholder ?? 'Search...'}
|
||||
className="h-9"
|
||||
@@ -443,6 +476,9 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</Command>
|
||||
{footerButtons ? (
|
||||
<div className="border-t">{footerButtons}</div>
|
||||
) : null}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
|
||||
@@ -14,6 +14,16 @@ export interface CanvasContext {
|
||||
overlapGraph: Graph<string>;
|
||||
setShowFilter: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
showFilter: boolean;
|
||||
editTableModeTable: {
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null;
|
||||
setEditTableModeTable: React.Dispatch<
|
||||
React.SetStateAction<{
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null>
|
||||
>;
|
||||
}
|
||||
|
||||
export const canvasContext = createContext<CanvasContext>({
|
||||
@@ -23,4 +33,6 @@ export const canvasContext = createContext<CanvasContext>({
|
||||
overlapGraph: createGraph(),
|
||||
setShowFilter: emptyFn,
|
||||
showFilter: false,
|
||||
editTableModeTable: null,
|
||||
setEditTableModeTable: emptyFn,
|
||||
});
|
||||
|
||||
@@ -33,6 +33,10 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
const { fitView } = useReactFlow();
|
||||
const [overlapGraph, setOverlapGraph] =
|
||||
useState<Graph<string>>(createGraph());
|
||||
const [editTableModeTable, setEditTableModeTable] = useState<{
|
||||
tableId: string;
|
||||
fieldId?: string;
|
||||
} | null>(null);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const diagramIdActiveFilterRef = useRef<string>();
|
||||
@@ -127,6 +131,8 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
overlapGraph,
|
||||
setShowFilter,
|
||||
showFilter,
|
||||
editTableModeTable,
|
||||
setEditTableModeTable,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -41,8 +41,7 @@ export const ChartDBProvider: React.FC<
|
||||
React.PropsWithChildren<ChartDBProviderProps>
|
||||
> = ({ children, diagram, readonly: readonlyProp }) => {
|
||||
const { hasDiff } = useDiff();
|
||||
const dbStorage = useStorage();
|
||||
let db = dbStorage;
|
||||
const storageDB = useStorage();
|
||||
const events = useEventEmitter<ChartDBEvent>();
|
||||
const { addUndoAction, resetRedoStack, resetUndoStack } =
|
||||
useRedoUndoStack();
|
||||
@@ -75,10 +74,10 @@ export const ChartDBProvider: React.FC<
|
||||
useState<string>();
|
||||
|
||||
const diffCalculatedHandler = useCallback((event: DiffCalculatedEvent) => {
|
||||
const { tablesAdded, fieldsAdded, relationshipsAdded } = event.data;
|
||||
const { tablesToAdd, fieldsToAdd, relationshipsToAdd } = event.data;
|
||||
setTables((tables) =>
|
||||
[...tables, ...(tablesAdded ?? [])].map((table) => {
|
||||
const fields = fieldsAdded.get(table.id);
|
||||
[...tables, ...(tablesToAdd ?? [])].map((table) => {
|
||||
const fields = fieldsToAdd.get(table.id);
|
||||
return fields
|
||||
? { ...table, fields: [...table.fields, ...fields] }
|
||||
: table;
|
||||
@@ -86,7 +85,7 @@ export const ChartDBProvider: React.FC<
|
||||
);
|
||||
setRelationships((relationships) => [
|
||||
...relationships,
|
||||
...(relationshipsAdded ?? []),
|
||||
...(relationshipsToAdd ?? []),
|
||||
]);
|
||||
}, []);
|
||||
|
||||
@@ -102,10 +101,6 @@ export const ChartDBProvider: React.FC<
|
||||
[readonlyProp, hasDiff]
|
||||
);
|
||||
|
||||
if (readonly) {
|
||||
db = storageInitialValue;
|
||||
}
|
||||
|
||||
const schemas = useMemo(
|
||||
() =>
|
||||
databasesWithSchemas.includes(databaseType)
|
||||
@@ -134,6 +129,11 @@ export const ChartDBProvider: React.FC<
|
||||
[tables, defaultSchemaName, databaseType]
|
||||
);
|
||||
|
||||
const db = useMemo(
|
||||
() => (readonly ? storageInitialValue : storageDB),
|
||||
[storageDB, readonly]
|
||||
);
|
||||
|
||||
const currentDiagram: Diagram = useMemo(
|
||||
() => ({
|
||||
id: diagramId,
|
||||
@@ -350,6 +350,7 @@ export const ChartDBProvider: React.FC<
|
||||
isView: false,
|
||||
order: tables.length,
|
||||
...attributes,
|
||||
schema: attributes?.schema ?? defaultSchemas[databaseType],
|
||||
};
|
||||
|
||||
table.indexes = getTableIndexesWithPrimaryKey({
|
||||
@@ -1580,17 +1581,17 @@ export const ChartDBProvider: React.FC<
|
||||
|
||||
const updateDiagramData: ChartDBContext['updateDiagramData'] = useCallback(
|
||||
async (diagram, options) => {
|
||||
const st = options?.forceUpdateStorage ? dbStorage : db;
|
||||
const st = options?.forceUpdateStorage ? storageDB : db;
|
||||
await st.deleteDiagram(diagram.id);
|
||||
await st.addDiagram({ diagram });
|
||||
loadDiagramFromData(diagram);
|
||||
},
|
||||
[db, dbStorage, loadDiagramFromData]
|
||||
[db, storageDB, loadDiagramFromData]
|
||||
);
|
||||
|
||||
const loadDiagram: ChartDBContext['loadDiagram'] = useCallback(
|
||||
async (diagramId: string) => {
|
||||
const diagram = await db.getDiagram(diagramId, {
|
||||
const diagram = await storageDB.getDiagram(diagramId, {
|
||||
includeRelationships: true,
|
||||
includeTables: true,
|
||||
includeDependencies: true,
|
||||
@@ -1604,7 +1605,7 @@ export const ChartDBProvider: React.FC<
|
||||
|
||||
return diagram;
|
||||
},
|
||||
[db, loadDiagramFromData]
|
||||
[storageDB, loadDiagramFromData]
|
||||
);
|
||||
|
||||
// Custom type operations
|
||||
|
||||
@@ -7,7 +7,6 @@ import type { ExportImageDialogProps } from '@/dialogs/export-image-dialog/expor
|
||||
import type { ExportDiagramDialogProps } from '@/dialogs/export-diagram-dialog/export-diagram-dialog';
|
||||
import type { ImportDiagramDialogProps } from '@/dialogs/import-diagram-dialog/import-diagram-dialog';
|
||||
import type { CreateRelationshipDialogProps } from '@/dialogs/create-relationship-dialog/create-relationship-dialog';
|
||||
import type { ImportDBMLDialogProps } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
import type { OpenDiagramDialogProps } from '@/dialogs/open-diagram-dialog/open-diagram-dialog';
|
||||
import type { CreateDiagramDialogProps } from '@/dialogs/create-diagram-dialog/create-diagram-dialog';
|
||||
|
||||
@@ -67,12 +66,6 @@ export interface DialogContext {
|
||||
params: Omit<ImportDiagramDialogProps, 'dialog'>
|
||||
) => void;
|
||||
closeImportDiagramDialog: () => void;
|
||||
|
||||
// Import DBML dialog
|
||||
openImportDBMLDialog: (
|
||||
params?: Omit<ImportDBMLDialogProps, 'dialog'>
|
||||
) => void;
|
||||
closeImportDBMLDialog: () => void;
|
||||
}
|
||||
|
||||
export const dialogContext = createContext<DialogContext>({
|
||||
@@ -96,6 +89,4 @@ export const dialogContext = createContext<DialogContext>({
|
||||
closeExportDiagramDialog: emptyFn,
|
||||
openImportDiagramDialog: emptyFn,
|
||||
closeImportDiagramDialog: emptyFn,
|
||||
openImportDBMLDialog: emptyFn,
|
||||
closeImportDBMLDialog: emptyFn,
|
||||
});
|
||||
|
||||
@@ -20,8 +20,6 @@ import type { ExportImageDialogProps } from '@/dialogs/export-image-dialog/expor
|
||||
import { ExportImageDialog } from '@/dialogs/export-image-dialog/export-image-dialog';
|
||||
import { ExportDiagramDialog } from '@/dialogs/export-diagram-dialog/export-diagram-dialog';
|
||||
import { ImportDiagramDialog } from '@/dialogs/import-diagram-dialog/import-diagram-dialog';
|
||||
import type { ImportDBMLDialogProps } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
import { ImportDBMLDialog } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
|
||||
export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
children,
|
||||
@@ -132,11 +130,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const [openImportDiagramDialog, setOpenImportDiagramDialog] =
|
||||
useState(false);
|
||||
|
||||
// Import DBML dialog
|
||||
const [openImportDBMLDialog, setOpenImportDBMLDialog] = useState(false);
|
||||
const [importDBMLDialogParams, setImportDBMLDialogParams] =
|
||||
useState<Omit<ImportDBMLDialogProps, 'dialog'>>();
|
||||
|
||||
return (
|
||||
<dialogContext.Provider
|
||||
value={{
|
||||
@@ -165,11 +158,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
openImportDiagramDialog: () => setOpenImportDiagramDialog(true),
|
||||
closeImportDiagramDialog: () =>
|
||||
setOpenImportDiagramDialog(false),
|
||||
openImportDBMLDialog: (params) => {
|
||||
setImportDBMLDialogParams(params);
|
||||
setOpenImportDBMLDialog(true);
|
||||
},
|
||||
closeImportDBMLDialog: () => setOpenImportDBMLDialog(false),
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
@@ -204,10 +192,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
/>
|
||||
<ExportDiagramDialog dialog={{ open: openExportDiagramDialog }} />
|
||||
<ImportDiagramDialog dialog={{ open: openImportDiagramDialog }} />
|
||||
<ImportDBMLDialog
|
||||
dialog={{ open: openImportDBMLDialog }}
|
||||
{...importDBMLDialogParams}
|
||||
/>
|
||||
</dialogContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -15,9 +15,9 @@ export type DiffEventBase<T extends DiffEventType, D> = {
|
||||
};
|
||||
|
||||
export type DiffCalculatedData = {
|
||||
tablesAdded: DBTable[];
|
||||
fieldsAdded: Map<string, DBField[]>;
|
||||
relationshipsAdded: DBRelationship[];
|
||||
tablesToAdd: DBTable[];
|
||||
fieldsToAdd: Map<string, DBField[]>;
|
||||
relationshipsToAdd: DBRelationship[];
|
||||
};
|
||||
|
||||
export type DiffCalculatedEvent = DiffEventBase<
|
||||
@@ -44,15 +44,21 @@ export interface DiffContext {
|
||||
options?: {
|
||||
summaryOnly?: boolean;
|
||||
};
|
||||
}) => void;
|
||||
}) => { foundDiff: boolean };
|
||||
resetDiff: () => void;
|
||||
|
||||
// table diff
|
||||
checkIfTableHasChange: ({ tableId }: { tableId: string }) => boolean;
|
||||
checkIfNewTable: ({ tableId }: { tableId: string }) => boolean;
|
||||
checkIfTableRemoved: ({ tableId }: { tableId: string }) => boolean;
|
||||
getTableNewName: ({ tableId }: { tableId: string }) => string | null;
|
||||
getTableNewColor: ({ tableId }: { tableId: string }) => string | null;
|
||||
getTableNewName: ({ tableId }: { tableId: string }) => {
|
||||
old: string;
|
||||
new: string;
|
||||
} | null;
|
||||
getTableNewColor: ({ tableId }: { tableId: string }) => {
|
||||
old: string;
|
||||
new: string;
|
||||
} | null;
|
||||
|
||||
// field diff
|
||||
checkIfFieldHasChange: ({
|
||||
@@ -64,17 +70,41 @@ export interface DiffContext {
|
||||
}) => boolean;
|
||||
checkIfFieldRemoved: ({ fieldId }: { fieldId: string }) => boolean;
|
||||
checkIfNewField: ({ fieldId }: { fieldId: string }) => boolean;
|
||||
getFieldNewName: ({ fieldId }: { fieldId: string }) => string | null;
|
||||
getFieldNewType: ({ fieldId }: { fieldId: string }) => DataType | null;
|
||||
getFieldNewPrimaryKey: ({ fieldId }: { fieldId: string }) => boolean | null;
|
||||
getFieldNewNullable: ({ fieldId }: { fieldId: string }) => boolean | null;
|
||||
getFieldNewName: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: string; new: string } | null;
|
||||
getFieldNewType: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: DataType; new: DataType } | null;
|
||||
getFieldNewPrimaryKey: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: boolean; new: boolean } | null;
|
||||
getFieldNewNullable: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: boolean; new: boolean } | null;
|
||||
getFieldNewCharacterMaximumLength: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => string | null;
|
||||
getFieldNewScale: ({ fieldId }: { fieldId: string }) => number | null;
|
||||
getFieldNewPrecision: ({ fieldId }: { fieldId: string }) => number | null;
|
||||
}) => { old: string; new: string } | null;
|
||||
getFieldNewScale: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: number; new: number } | null;
|
||||
getFieldNewPrecision: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => { old: number; new: number } | null;
|
||||
|
||||
// relationship diff
|
||||
checkIfNewRelationship: ({
|
||||
|
||||
@@ -36,7 +36,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
|
||||
const events = useEventEmitter<DiffEvent>();
|
||||
|
||||
const generateNewFieldsMap = useCallback(
|
||||
const generateFieldsToAddMap = useCallback(
|
||||
({
|
||||
diffMap,
|
||||
newDiagram,
|
||||
@@ -66,7 +66,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
[]
|
||||
);
|
||||
|
||||
const findNewRelationships = useCallback(
|
||||
const findRelationshipsToAdd = useCallback(
|
||||
({
|
||||
diffMap,
|
||||
newDiagram,
|
||||
@@ -101,7 +101,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
diffMap: DiffMap;
|
||||
}): DiffCalculatedData => {
|
||||
return {
|
||||
tablesAdded:
|
||||
tablesToAdd:
|
||||
newDiagram?.tables?.filter((table) => {
|
||||
const tableKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
@@ -114,17 +114,17 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
);
|
||||
}) ?? [],
|
||||
|
||||
fieldsAdded: generateNewFieldsMap({
|
||||
fieldsToAdd: generateFieldsToAddMap({
|
||||
diffMap: diffMap,
|
||||
newDiagram: newDiagram,
|
||||
}),
|
||||
relationshipsAdded: findNewRelationships({
|
||||
relationshipsToAdd: findRelationshipsToAdd({
|
||||
diffMap: diffMap,
|
||||
newDiagram: newDiagram,
|
||||
}),
|
||||
};
|
||||
},
|
||||
[findNewRelationships, generateNewFieldsMap]
|
||||
[findRelationshipsToAdd, generateFieldsToAddMap]
|
||||
);
|
||||
|
||||
const calculateDiff: DiffContext['calculateDiff'] = useCallback(
|
||||
@@ -149,6 +149,8 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
newDiagram: newDiagramArg,
|
||||
}),
|
||||
});
|
||||
|
||||
return { foundDiff: !!newDiffs.size };
|
||||
},
|
||||
[setDiffMap, events, generateDiffCalculatedData]
|
||||
);
|
||||
@@ -165,7 +167,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(tableNameKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
return {
|
||||
new: diff.newValue as string,
|
||||
old: diff.oldValue as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,7 +191,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(tableColorKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
return {
|
||||
new: diff.newValue as string,
|
||||
old: diff.oldValue as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
@@ -277,7 +285,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
return {
|
||||
old: diff.oldValue as string,
|
||||
new: diff.newValue as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -298,7 +309,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as DataType;
|
||||
return {
|
||||
old: diff.oldValue as DataType,
|
||||
new: diff.newValue as DataType,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -321,7 +335,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as boolean;
|
||||
return {
|
||||
old: diff.oldValue as boolean,
|
||||
new: diff.newValue as boolean,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -342,7 +359,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as boolean;
|
||||
return {
|
||||
old: diff.oldValue as boolean,
|
||||
new: diff.newValue as boolean,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -365,7 +385,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
return {
|
||||
old: diff.oldValue as string,
|
||||
new: diff.newValue as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -386,7 +409,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as number;
|
||||
return {
|
||||
old: diff.oldValue as number,
|
||||
new: diff.newValue as number,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,7 +435,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as number;
|
||||
return {
|
||||
old: diff.oldValue as number,
|
||||
new: diff.newValue as number,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,6 +42,14 @@ import {
|
||||
type ValidationResult,
|
||||
} from '@/lib/data/sql-import/sql-validator';
|
||||
import { SQLValidationStatus } from './sql-validation-status';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
import { detectImportMethod } from '@/lib/import-method/detect-import-method';
|
||||
import { verifyDBML } from '@/lib/dbml/dbml-import/verify-dbml';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
|
||||
const calculateContentSizeMB = (content: string): number => {
|
||||
return content.length / (1024 * 1024); // Convert to MB
|
||||
@@ -55,49 +63,6 @@ const calculateIsLargeFile = (content: string): boolean => {
|
||||
const errorScriptOutputMessage =
|
||||
'Invalid JSON. Please correct it or contact us at support@chartdb.io for help.';
|
||||
|
||||
// Helper to detect if content is likely SQL DDL or JSON
|
||||
const detectContentType = (content: string): 'query' | 'ddl' | null => {
|
||||
if (!content || content.trim().length === 0) return null;
|
||||
|
||||
// Common SQL DDL keywords
|
||||
const ddlKeywords = [
|
||||
'CREATE TABLE',
|
||||
'ALTER TABLE',
|
||||
'DROP TABLE',
|
||||
'CREATE INDEX',
|
||||
'CREATE VIEW',
|
||||
'CREATE PROCEDURE',
|
||||
'CREATE FUNCTION',
|
||||
'CREATE SCHEMA',
|
||||
'CREATE DATABASE',
|
||||
];
|
||||
|
||||
const upperContent = content.toUpperCase();
|
||||
|
||||
// Check for SQL DDL patterns
|
||||
const hasDDLKeywords = ddlKeywords.some((keyword) =>
|
||||
upperContent.includes(keyword)
|
||||
);
|
||||
if (hasDDLKeywords) return 'ddl';
|
||||
|
||||
// Check if it looks like JSON
|
||||
try {
|
||||
// Just check structure, don't need full parse for detection
|
||||
if (
|
||||
(content.trim().startsWith('{') && content.trim().endsWith('}')) ||
|
||||
(content.trim().startsWith('[') && content.trim().endsWith(']'))
|
||||
) {
|
||||
return 'query';
|
||||
}
|
||||
} catch (error) {
|
||||
// Not valid JSON, might be partial
|
||||
console.error('Error detecting content type:', error);
|
||||
}
|
||||
|
||||
// If we can't confidently detect, return null
|
||||
return null;
|
||||
};
|
||||
|
||||
export interface ImportDatabaseProps {
|
||||
goBack?: () => void;
|
||||
onImport: () => void;
|
||||
@@ -111,8 +76,8 @@ export interface ImportDatabaseProps {
|
||||
>;
|
||||
keepDialogAfterImport?: boolean;
|
||||
title: string;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
importMethod: ImportMethod;
|
||||
setImportMethod: (method: ImportMethod) => void;
|
||||
}
|
||||
|
||||
export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
@@ -132,6 +97,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const { effectiveTheme } = useTheme();
|
||||
const [errorMessage, setErrorMessage] = useState('');
|
||||
const editorRef = useRef<editor.IStandaloneCodeEditor | null>(null);
|
||||
const decorationsCollection = useRef<editor.IEditorDecorationsCollection>();
|
||||
const pasteDisposableRef = useRef<IDisposable | null>(null);
|
||||
|
||||
const { t } = useTranslation();
|
||||
@@ -146,15 +112,20 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const [isAutoFixing, setIsAutoFixing] = useState(false);
|
||||
const [showAutoFixButton, setShowAutoFixButton] = useState(false);
|
||||
|
||||
const clearDecorations = useCallback(() => {
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
setScriptResult('');
|
||||
setErrorMessage('');
|
||||
setShowCheckJsonButton(false);
|
||||
}, [importMethod, setScriptResult]);
|
||||
|
||||
// Check if the ddl is valid
|
||||
// Check if the ddl or dbml is valid
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'ddl') {
|
||||
clearDecorations();
|
||||
if (importMethod === 'query') {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
@@ -163,9 +134,54 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
if (!scriptResult.trim()) {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
setErrorMessage('');
|
||||
return;
|
||||
}
|
||||
|
||||
if (importMethod === 'dbml') {
|
||||
// Validate DBML by parsing it
|
||||
const validateResponse = verifyDBML(scriptResult);
|
||||
if (!validateResponse.hasError) {
|
||||
setErrorMessage('');
|
||||
setSqlValidation({
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
});
|
||||
} else {
|
||||
let errorMsg = 'Invalid DBML syntax';
|
||||
let line: number = 1;
|
||||
|
||||
if (validateResponse.parsedError) {
|
||||
errorMsg = validateResponse.parsedError.message;
|
||||
line = validateResponse.parsedError.line;
|
||||
highlightErrorLine({
|
||||
error: validateResponse.parsedError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
}
|
||||
|
||||
setSqlValidation({
|
||||
isValid: false,
|
||||
errors: [
|
||||
{
|
||||
message: errorMsg,
|
||||
line: line,
|
||||
type: 'syntax' as const,
|
||||
},
|
||||
],
|
||||
warnings: [],
|
||||
});
|
||||
setErrorMessage(errorMsg);
|
||||
}
|
||||
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// SQL validation
|
||||
// First run our validation based on database type
|
||||
const validation = validateSQL(scriptResult, databaseType);
|
||||
setSqlValidation(validation);
|
||||
@@ -192,7 +208,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setErrorMessage(result.error);
|
||||
}
|
||||
});
|
||||
}, [importMethod, scriptResult, databaseType]);
|
||||
}, [importMethod, scriptResult, databaseType, clearDecorations]);
|
||||
|
||||
// Check if the script result is a valid JSON
|
||||
useEffect(() => {
|
||||
@@ -320,6 +336,8 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: editor.IStandaloneCodeEditor) => {
|
||||
editorRef.current = editor;
|
||||
decorationsCollection.current =
|
||||
editor.createDecorationsCollection();
|
||||
|
||||
// Cleanup previous disposable if it exists
|
||||
if (pasteDisposableRef.current) {
|
||||
@@ -338,7 +356,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const isLargeFile = calculateIsLargeFile(content);
|
||||
|
||||
// First, detect content type to determine if we should switch modes
|
||||
const detectedType = detectContentType(content);
|
||||
const detectedType = detectImportMethod(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
// Switch to the detected mode immediately
|
||||
setImportMethod(detectedType);
|
||||
@@ -352,7 +370,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode, do NOT format as it can break the SQL
|
||||
// For DDL and DBML modes, do NOT format as it can break the syntax
|
||||
} else {
|
||||
// Content type didn't change, apply formatting based on current mode
|
||||
if (importMethod === 'query' && !isLargeFile) {
|
||||
@@ -363,7 +381,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode or large files, do NOT format
|
||||
// For DDL and DBML modes or large files, do NOT format
|
||||
}
|
||||
});
|
||||
|
||||
@@ -410,16 +428,25 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
<div className="w-full text-center text-xs text-muted-foreground">
|
||||
{importMethod === 'query'
|
||||
? 'Smart Query Output'
|
||||
: 'SQL Script'}
|
||||
: importMethod === 'dbml'
|
||||
? 'DBML Script'
|
||||
: 'SQL Script'}
|
||||
</div>
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<Suspense fallback={<Spinner />}>
|
||||
<Editor
|
||||
value={scriptResult}
|
||||
onChange={debouncedHandleInputChange}
|
||||
language={importMethod === 'query' ? 'json' : 'sql'}
|
||||
language={
|
||||
importMethod === 'query'
|
||||
? 'json'
|
||||
: importMethod === 'dbml'
|
||||
? 'dbml'
|
||||
: 'sql'
|
||||
}
|
||||
loading={<Spinner />}
|
||||
onMount={handleEditorDidMount}
|
||||
beforeMount={setupDBMLLanguage}
|
||||
theme={
|
||||
effectiveTheme === 'dark'
|
||||
? 'dbml-dark'
|
||||
@@ -430,7 +457,6 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
glyphMargin: false,
|
||||
lineNumbers: 'on',
|
||||
guides: {
|
||||
indentation: false,
|
||||
@@ -455,7 +481,9 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
</Suspense>
|
||||
</div>
|
||||
|
||||
{errorMessage || (importMethod === 'ddl' && sqlValidation) ? (
|
||||
{errorMessage ||
|
||||
((importMethod === 'ddl' || importMethod === 'dbml') &&
|
||||
sqlValidation) ? (
|
||||
<SQLValidationStatus
|
||||
validation={sqlValidation}
|
||||
errorMessage={errorMessage}
|
||||
|
||||
@@ -15,9 +15,11 @@ import {
|
||||
AvatarImage,
|
||||
} from '@/components/avatar/avatar';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Code } from 'lucide-react';
|
||||
import { Code, FileCode } from 'lucide-react';
|
||||
import { SmartQueryInstructions } from './instructions/smart-query-instructions';
|
||||
import { DDLInstructions } from './instructions/ddl-instructions';
|
||||
import { DBMLInstructions } from './instructions/dbml-instructions';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
const DatabasesWithoutDDLInstructions: DatabaseType[] = [
|
||||
DatabaseType.CLICKHOUSE,
|
||||
@@ -30,8 +32,8 @@ export interface InstructionsSectionProps {
|
||||
setDatabaseEdition: React.Dispatch<
|
||||
React.SetStateAction<DatabaseEdition | undefined>
|
||||
>;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
importMethod: ImportMethod;
|
||||
setImportMethod: (method: ImportMethod) => void;
|
||||
showSSMSInfoDialog: boolean;
|
||||
setShowSSMSInfoDialog: (show: boolean) => void;
|
||||
}
|
||||
@@ -125,9 +127,9 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
className="ml-1 flex-wrap justify-start gap-2"
|
||||
value={importMethod}
|
||||
onValueChange={(value) => {
|
||||
let selectedImportMethod: 'query' | 'ddl' = 'query';
|
||||
let selectedImportMethod: ImportMethod = 'query';
|
||||
if (value) {
|
||||
selectedImportMethod = value as 'query' | 'ddl';
|
||||
selectedImportMethod = value as ImportMethod;
|
||||
}
|
||||
|
||||
setImportMethod(selectedImportMethod);
|
||||
@@ -150,10 +152,20 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<Code size={16} />
|
||||
<FileCode size={16} />
|
||||
</Avatar>
|
||||
SQL Script
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value="dbml"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<Code size={16} />
|
||||
</Avatar>
|
||||
DBML
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</div>
|
||||
)}
|
||||
@@ -167,11 +179,16 @@ export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
showSSMSInfoDialog={showSSMSInfoDialog}
|
||||
setShowSSMSInfoDialog={setShowSSMSInfoDialog}
|
||||
/>
|
||||
) : (
|
||||
) : importMethod === 'ddl' ? (
|
||||
<DDLInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
/>
|
||||
) : (
|
||||
<DBMLInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import React from 'react';
|
||||
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
|
||||
export interface DBMLInstructionsProps {
|
||||
databaseType: DatabaseType;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}
|
||||
|
||||
export const DBMLInstructions: React.FC<DBMLInstructionsProps> = () => {
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col gap-1 text-sm text-primary">
|
||||
<div>
|
||||
Paste your DBML (Database Markup Language) schema definition
|
||||
here →
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex h-64 flex-col gap-1 text-sm text-primary">
|
||||
<h4 className="text-xs font-medium">Example:</h4>
|
||||
<CodeSnippet
|
||||
className="h-full"
|
||||
allowCopy={false}
|
||||
editorProps={{
|
||||
beforeMount: setupDBMLLanguage,
|
||||
}}
|
||||
code={`Table users {
|
||||
id int [pk]
|
||||
username varchar
|
||||
email varchar
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id int [pk]
|
||||
user_id int [ref: > users.id]
|
||||
title varchar
|
||||
content text
|
||||
}`}
|
||||
language={'dbml'}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -43,8 +43,8 @@ const DDLInstructionsMap: Record<DatabaseType, DDLInstruction[]> = {
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal:',
|
||||
code: `sqlite3 <database_file_path>\n.dump > <output_file_path>`,
|
||||
example: `sqlite3 my_db.db\n.dump > schema_export.sql`,
|
||||
code: `sqlite3 <database_file_path>\n".schema" > <output_file_path>`,
|
||||
example: `sqlite3 my_db.db\n".schema" > schema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
|
||||
@@ -73,7 +73,7 @@ export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
|
||||
{hasErrors ? (
|
||||
<div className="rounded-md border border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-950">
|
||||
<ScrollArea className="h-24">
|
||||
<ScrollArea className="h-fit max-h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-red-700 dark:text-red-300">
|
||||
{validation?.errors
|
||||
.slice(0, 3)
|
||||
@@ -137,7 +137,7 @@ export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
|
||||
{hasWarnings && !hasErrors ? (
|
||||
<div className="rounded-md border border-sky-200 bg-sky-50 dark:border-sky-800 dark:bg-sky-950">
|
||||
<ScrollArea className="h-24">
|
||||
<ScrollArea className="h-fit max-h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-sky-700 dark:text-sky-300">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertTriangle className="mt-0.5 size-4 shrink-0 text-sky-700 dark:text-sky-300" />
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Dialog, DialogContent } from '@/components/dialog/dialog';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/data/import-metadata/import';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useConfig } from '@/hooks/use-config';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
@@ -22,6 +22,11 @@ import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
import type { SelectedTable } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { filterMetadataByTables } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { MAX_TABLES_WITHOUT_SHOWING_FILTER } from '../common/select-tables/constants';
|
||||
import {
|
||||
defaultDBMLDiagramName,
|
||||
importDBMLToDiagram,
|
||||
} from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
export interface CreateDiagramDialogProps extends BaseDialogProps {}
|
||||
|
||||
@@ -30,11 +35,11 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}) => {
|
||||
const { diagramId } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const [importMethod, setImportMethod] = useState<ImportMethod>('query');
|
||||
const [databaseType, setDatabaseType] = useState<DatabaseType>(
|
||||
DatabaseType.GENERIC
|
||||
);
|
||||
const { closeCreateDiagramDialog, openImportDBMLDialog } = useDialog();
|
||||
const { closeCreateDiagramDialog } = useDialog();
|
||||
const { updateConfig } = useConfig();
|
||||
const [scriptResult, setScriptResult] = useState('');
|
||||
const [databaseEdition, setDatabaseEdition] = useState<
|
||||
@@ -89,6 +94,14 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else if (importMethod === 'dbml') {
|
||||
diagram = await importDBMLToDiagram(scriptResult, {
|
||||
databaseType,
|
||||
});
|
||||
// Update the diagram name if it's the default
|
||||
if (diagram.name === defaultDBMLDiagramName) {
|
||||
diagram.name = `Diagram ${diagramNumber}`;
|
||||
}
|
||||
} else {
|
||||
let metadata: DatabaseMetadata | undefined = databaseMetadata;
|
||||
|
||||
@@ -152,10 +165,6 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
await updateConfig({ config: { defaultDiagramId: diagram.id } });
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
setTimeout(
|
||||
() => openImportDBMLDialog({ withCreateEmptyDiagram: true }),
|
||||
700
|
||||
);
|
||||
}, [
|
||||
databaseType,
|
||||
addDiagram,
|
||||
@@ -164,14 +173,13 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
navigate,
|
||||
updateConfig,
|
||||
diagramNumber,
|
||||
openImportDBMLDialog,
|
||||
]);
|
||||
|
||||
const importNewDiagramOrFilterTables = useCallback(async () => {
|
||||
try {
|
||||
setIsParsingMetadata(true);
|
||||
|
||||
if (importMethod === 'ddl') {
|
||||
if (importMethod === 'ddl' || importMethod === 'dbml') {
|
||||
await importNewDiagram();
|
||||
} else {
|
||||
// Parse metadata asynchronously to avoid blocking the UI
|
||||
|
||||
@@ -69,6 +69,7 @@ export const SelectDatabase: React.FC<SelectDatabaseProps> = ({
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={createNewDiagram}
|
||||
disabled={databaseType === DatabaseType.GENERIC}
|
||||
>
|
||||
{t('new_diagram_dialog.empty_diagram')}
|
||||
</Button>
|
||||
|
||||
@@ -17,7 +17,7 @@ import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
exportBaseSQL,
|
||||
exportSQL,
|
||||
} from '@/lib/data/export-metadata/export-sql-script';
|
||||
} from '@/lib/data/sql-export/export-sql-script';
|
||||
import { databaseTypeToLabelMap } from '@/lib/databases';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { Annoyed, Sparkles } from 'lucide-react';
|
||||
|
||||
@@ -7,7 +7,7 @@ import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import { loadDatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/data/import-metadata/import';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useRedoUndoStack } from '@/hooks/use-redo-undo-stack';
|
||||
import { Trans, useTranslation } from 'react-i18next';
|
||||
@@ -15,6 +15,8 @@ import { useReactFlow } from '@xyflow/react';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useAlert } from '@/context/alert-context/alert-context';
|
||||
import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
import { importDBMLToDiagram } from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import type { ImportMethod } from '@/lib/import-method/import-method';
|
||||
|
||||
export interface ImportDatabaseDialogProps extends BaseDialogProps {
|
||||
databaseType: DatabaseType;
|
||||
@@ -24,7 +26,7 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
dialog,
|
||||
databaseType,
|
||||
}) => {
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const [importMethod, setImportMethod] = useState<ImportMethod>('query');
|
||||
const { closeImportDatabaseDialog } = useDialog();
|
||||
const { showAlert } = useAlert();
|
||||
const {
|
||||
@@ -65,6 +67,10 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else if (importMethod === 'dbml') {
|
||||
diagram = await importDBMLToDiagram(scriptResult, {
|
||||
databaseType,
|
||||
});
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
|
||||
@@ -1,359 +0,0 @@
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useState,
|
||||
Suspense,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import type * as monaco from 'monaco-editor';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
DialogClose,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogInternalContent,
|
||||
DialogTitle,
|
||||
} from '@/components/dialog/dialog';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Editor } from '@/components/code-snippet/code-snippet';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
import { AlertCircle } from 'lucide-react';
|
||||
import {
|
||||
importDBMLToDiagram,
|
||||
sanitizeDBML,
|
||||
preprocessDBML,
|
||||
} from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { Parser } from '@dbml/core';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
import { debounce } from '@/lib/utils';
|
||||
import { parseDBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
|
||||
export interface ImportDBMLDialogProps extends BaseDialogProps {
|
||||
withCreateEmptyDiagram?: boolean;
|
||||
}
|
||||
|
||||
export const ImportDBMLDialog: React.FC<ImportDBMLDialogProps> = ({
|
||||
dialog,
|
||||
withCreateEmptyDiagram,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const initialDBML = `// Use DBML to define your database structure
|
||||
// Simple Blog System with Comments Example
|
||||
|
||||
Table users {
|
||||
id integer [primary key]
|
||||
name varchar
|
||||
email varchar
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id integer [primary key]
|
||||
title varchar
|
||||
content text
|
||||
user_id integer
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
Table comments {
|
||||
id integer [primary key]
|
||||
content text
|
||||
post_id integer
|
||||
user_id integer
|
||||
created_at timestamp
|
||||
}
|
||||
|
||||
// Relationships
|
||||
Ref: posts.user_id > users.id // Each post belongs to one user
|
||||
Ref: comments.post_id > posts.id // Each comment belongs to one post
|
||||
Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
|
||||
const [dbmlContent, setDBMLContent] = useState<string>(initialDBML);
|
||||
const { closeImportDBMLDialog } = useDialog();
|
||||
const [errorMessage, setErrorMessage] = useState<string | undefined>();
|
||||
const { effectiveTheme } = useTheme();
|
||||
const { toast } = useToast();
|
||||
const {
|
||||
addTables,
|
||||
addRelationships,
|
||||
tables,
|
||||
relationships,
|
||||
removeTables,
|
||||
removeRelationships,
|
||||
} = useChartDB();
|
||||
const { reorderTables } = useCanvas();
|
||||
const [reorder, setReorder] = useState(false);
|
||||
const editorRef = useRef<monaco.editor.IStandaloneCodeEditor>();
|
||||
const decorationsCollection =
|
||||
useRef<monaco.editor.IEditorDecorationsCollection>();
|
||||
|
||||
const handleEditorDidMount = (
|
||||
editor: monaco.editor.IStandaloneCodeEditor
|
||||
) => {
|
||||
editorRef.current = editor;
|
||||
decorationsCollection.current = editor.createDecorationsCollection();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (reorder) {
|
||||
reorderTables({
|
||||
updateHistory: false,
|
||||
});
|
||||
setReorder(false);
|
||||
}
|
||||
}, [reorder, reorderTables]);
|
||||
|
||||
const clearDecorations = useCallback(() => {
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
}, []);
|
||||
|
||||
const validateDBML = useCallback(
|
||||
async (content: string) => {
|
||||
// Clear previous errors
|
||||
setErrorMessage(undefined);
|
||||
clearDecorations();
|
||||
|
||||
if (!content.trim()) return;
|
||||
|
||||
try {
|
||||
const preprocessedContent = preprocessDBML(content);
|
||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||
const parser = new Parser();
|
||||
parser.parse(sanitizedContent, 'dbml');
|
||||
} catch (e) {
|
||||
const parsedError = parseDBMLError(e);
|
||||
if (parsedError) {
|
||||
setErrorMessage(
|
||||
t('import_dbml_dialog.error.description') +
|
||||
` (1 error found - in line ${parsedError.line})`
|
||||
);
|
||||
highlightErrorLine({
|
||||
error: parsedError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
} else {
|
||||
setErrorMessage(
|
||||
e instanceof Error ? e.message : JSON.stringify(e)
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[clearDecorations, t]
|
||||
);
|
||||
|
||||
const debouncedValidateRef = useRef<((value: string) => void) | null>(null);
|
||||
|
||||
// Set up debounced validation
|
||||
useEffect(() => {
|
||||
debouncedValidateRef.current = debounce((value: string) => {
|
||||
validateDBML(value);
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
debouncedValidateRef.current = null;
|
||||
};
|
||||
}, [validateDBML]);
|
||||
|
||||
// Trigger validation when content changes
|
||||
useEffect(() => {
|
||||
if (debouncedValidateRef.current) {
|
||||
debouncedValidateRef.current(dbmlContent);
|
||||
}
|
||||
}, [dbmlContent]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) {
|
||||
setErrorMessage(undefined);
|
||||
clearDecorations();
|
||||
setDBMLContent(initialDBML);
|
||||
}
|
||||
}, [dialog.open, initialDBML, clearDecorations]);
|
||||
|
||||
const handleImport = useCallback(async () => {
|
||||
if (!dbmlContent.trim() || errorMessage) return;
|
||||
|
||||
try {
|
||||
const importedDiagram = await importDBMLToDiagram(dbmlContent);
|
||||
const tableIdsToRemove = tables
|
||||
.filter((table) =>
|
||||
importedDiagram.tables?.some(
|
||||
(t: DBTable) =>
|
||||
t.name === table.name && t.schema === table.schema
|
||||
)
|
||||
)
|
||||
.map((table) => table.id);
|
||||
// Find relationships that need to be removed
|
||||
const relationshipIdsToRemove = relationships
|
||||
.filter((relationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.targetTableId
|
||||
);
|
||||
if (!sourceTable || !targetTable) return true;
|
||||
const replacementSourceTable = importedDiagram.tables?.find(
|
||||
(table: DBTable) =>
|
||||
table.name === sourceTable.name &&
|
||||
table.schema === sourceTable.schema
|
||||
);
|
||||
const replacementTargetTable = importedDiagram.tables?.find(
|
||||
(table: DBTable) =>
|
||||
table.name === targetTable.name &&
|
||||
table.schema === targetTable.schema
|
||||
);
|
||||
return replacementSourceTable || replacementTargetTable;
|
||||
})
|
||||
.map((relationship) => relationship.id);
|
||||
|
||||
// Remove existing items
|
||||
await Promise.all([
|
||||
removeTables(tableIdsToRemove, { updateHistory: false }),
|
||||
removeRelationships(relationshipIdsToRemove, {
|
||||
updateHistory: false,
|
||||
}),
|
||||
]);
|
||||
|
||||
// Add new items
|
||||
await Promise.all([
|
||||
addTables(importedDiagram.tables ?? [], {
|
||||
updateHistory: false,
|
||||
}),
|
||||
addRelationships(importedDiagram.relationships ?? [], {
|
||||
updateHistory: false,
|
||||
}),
|
||||
]);
|
||||
setReorder(true);
|
||||
closeImportDBMLDialog();
|
||||
} catch (e) {
|
||||
toast({
|
||||
title: t('import_dbml_dialog.error.title'),
|
||||
variant: 'destructive',
|
||||
description: (
|
||||
<>
|
||||
<div>{t('import_dbml_dialog.error.description')}</div>
|
||||
{e instanceof Error ? e.message : JSON.stringify(e)}
|
||||
</>
|
||||
),
|
||||
});
|
||||
}
|
||||
}, [
|
||||
dbmlContent,
|
||||
closeImportDBMLDialog,
|
||||
tables,
|
||||
relationships,
|
||||
removeTables,
|
||||
removeRelationships,
|
||||
addTables,
|
||||
addRelationships,
|
||||
errorMessage,
|
||||
toast,
|
||||
setReorder,
|
||||
t,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
closeImportDBMLDialog();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogContent
|
||||
className="flex h-[80vh] max-h-screen w-full flex-col md:max-w-[900px]"
|
||||
showClose
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle>
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.example_title')
|
||||
: t('import_dbml_dialog.title')}
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
{t('import_dbml_dialog.description')}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogInternalContent>
|
||||
<Suspense fallback={<Spinner />}>
|
||||
<Editor
|
||||
value={dbmlContent}
|
||||
onChange={(value) => setDBMLContent(value || '')}
|
||||
language="dbml"
|
||||
onMount={handleEditorDidMount}
|
||||
theme={
|
||||
effectiveTheme === 'dark'
|
||||
? 'dbml-dark'
|
||||
: 'dbml-light'
|
||||
}
|
||||
beforeMount={setupDBMLLanguage}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
glyphMargin: true,
|
||||
lineNumbers: 'on',
|
||||
scrollbar: {
|
||||
vertical: 'visible',
|
||||
horizontal: 'visible',
|
||||
},
|
||||
}}
|
||||
className="size-full"
|
||||
/>
|
||||
</Suspense>
|
||||
</DialogInternalContent>
|
||||
<DialogFooter>
|
||||
<div className="flex w-full items-center justify-between">
|
||||
<div className="flex items-center gap-4">
|
||||
<DialogClose asChild>
|
||||
<Button variant="secondary">
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.skip_and_empty')
|
||||
: t('import_dbml_dialog.cancel')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
{errorMessage ? (
|
||||
<div className="flex items-center gap-1">
|
||||
<AlertCircle className="size-4 text-destructive" />
|
||||
|
||||
<span className="text-xs text-destructive">
|
||||
{errorMessage ||
|
||||
t(
|
||||
'import_dbml_dialog.error.description'
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
<Button
|
||||
onClick={handleImport}
|
||||
disabled={!dbmlContent.trim() || !!errorMessage}
|
||||
>
|
||||
{withCreateEmptyDiagram
|
||||
? t('import_dbml_dialog.show_example')
|
||||
: t('import_dbml_dialog.import')}
|
||||
</Button>
|
||||
</div>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,98 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/dropdown-menu/dropdown-menu';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Ellipsis, Layers2, SquareArrowOutUpRight, Trash2 } from 'lucide-react';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import type { Diagram } from '@/lib/domain';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { cloneDiagram } from '@/lib/clone';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface DiagramRowActionsMenuProps {
|
||||
diagram: Diagram;
|
||||
onOpen: () => void;
|
||||
refetch: () => void;
|
||||
numberOfDiagrams: number;
|
||||
}
|
||||
|
||||
export const DiagramRowActionsMenu: React.FC<DiagramRowActionsMenuProps> = ({
|
||||
diagram,
|
||||
onOpen,
|
||||
refetch,
|
||||
numberOfDiagrams,
|
||||
}) => {
|
||||
const { diagramId } = useChartDB();
|
||||
const { deleteDiagram, addDiagram } = useStorage();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const onDelete = useCallback(async () => {
|
||||
deleteDiagram(diagram.id);
|
||||
refetch();
|
||||
|
||||
if (diagram.id === diagramId || numberOfDiagrams <= 1) {
|
||||
window.location.href = '/';
|
||||
}
|
||||
}, [deleteDiagram, diagram.id, diagramId, refetch, numberOfDiagrams]);
|
||||
|
||||
const onDuplicate = useCallback(async () => {
|
||||
const duplicatedDiagram = cloneDiagram(diagram);
|
||||
|
||||
const diagramToAdd = duplicatedDiagram.diagram;
|
||||
|
||||
if (!diagramToAdd) {
|
||||
return;
|
||||
}
|
||||
|
||||
diagramToAdd.name = `${diagram.name} (Copy)`;
|
||||
|
||||
addDiagram({ diagram: diagramToAdd });
|
||||
refetch();
|
||||
}, [addDiagram, refetch, diagram]);
|
||||
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-8 p-0"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<Ellipsis className="size-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem
|
||||
onClick={onOpen}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.open')}
|
||||
<SquareArrowOutUpRight className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
|
||||
<DropdownMenuItem
|
||||
onClick={onDuplicate}
|
||||
className="flex justify-between gap-4"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.duplicate')}
|
||||
<Layers2 className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={onDelete}
|
||||
className="flex justify-between gap-4 text-red-700"
|
||||
>
|
||||
{t('open_diagram_dialog.diagram_actions.delete')}
|
||||
<Trash2 className="size-3.5 text-red-700" />
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
};
|
||||
@@ -27,6 +27,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import { DiagramRowActionsMenu } from './diagram-row-actions-menu/diagram-row-actions-menu';
|
||||
|
||||
export interface OpenDiagramDialogProps extends BaseDialogProps {
|
||||
canClose?: boolean;
|
||||
@@ -46,21 +47,22 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
string | undefined
|
||||
>();
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedDiagramId(undefined);
|
||||
}, [dialog.open]);
|
||||
const fetchDiagrams = useCallback(async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
}, [listDiagrams]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDiagrams = async () => {
|
||||
const diagrams = await listDiagrams({ includeTables: true });
|
||||
setDiagrams(
|
||||
diagrams.sort(
|
||||
(a, b) => b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
)
|
||||
);
|
||||
};
|
||||
if (!dialog.open) {
|
||||
return;
|
||||
}
|
||||
setSelectedDiagramId(undefined);
|
||||
fetchDiagrams();
|
||||
}, [listDiagrams, setDiagrams, dialog.open]);
|
||||
}, [dialog.open, fetchDiagrams]);
|
||||
|
||||
const openDiagram = useCallback(
|
||||
(diagramId: string) => {
|
||||
@@ -166,6 +168,7 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
'open_diagram_dialog.table_columns.tables_count'
|
||||
)}
|
||||
</TableHead>
|
||||
<TableHead />
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
@@ -221,6 +224,19 @@ export const OpenDiagramDialog: React.FC<OpenDiagramDialogProps> = ({
|
||||
<TableCell className="text-center">
|
||||
{diagram.tables?.length}
|
||||
</TableCell>
|
||||
<TableCell className="items-center p-0 pr-1 text-right">
|
||||
<DiagramRowActionsMenu
|
||||
diagram={diagram}
|
||||
onOpen={() => {
|
||||
openDiagram(diagram.id);
|
||||
closeOpenDiagramDialog();
|
||||
}}
|
||||
numberOfDiagrams={
|
||||
diagrams.length
|
||||
}
|
||||
refetch={fetchDiagrams}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
|
||||
142
src/hooks/use-focus-on.ts
Normal file
142
src/hooks/use-focus-on.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
|
||||
interface FocusOptions {
|
||||
select?: boolean;
|
||||
}
|
||||
|
||||
export const useFocusOn = () => {
|
||||
const { fitView, setNodes, setEdges } = useReactFlow();
|
||||
const { hideSidePanel } = useLayout();
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
const focusOnArea = useCallback(
|
||||
(areaId: string, options: FocusOptions = {}) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) =>
|
||||
node.id === areaId
|
||||
? {
|
||||
...node,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...node,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: areaId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setNodes, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
const focusOnTable = useCallback(
|
||||
(tableId: string, options: FocusOptions = {}) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setNodes((nodes) =>
|
||||
nodes.map((node) =>
|
||||
node.id === tableId
|
||||
? {
|
||||
...node,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...node,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: tableId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setNodes, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
const focusOnRelationship = useCallback(
|
||||
(
|
||||
relationshipId: string,
|
||||
sourceTableId: string,
|
||||
targetTableId: string,
|
||||
options: FocusOptions = {}
|
||||
) => {
|
||||
const { select = true } = options;
|
||||
|
||||
if (select) {
|
||||
setEdges((edges) =>
|
||||
edges.map((edge) =>
|
||||
edge.id === relationshipId
|
||||
? {
|
||||
...edge,
|
||||
selected: true,
|
||||
}
|
||||
: {
|
||||
...edge,
|
||||
selected: false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fitView({
|
||||
duration: 500,
|
||||
maxZoom: 1,
|
||||
minZoom: 1,
|
||||
nodes: [
|
||||
{
|
||||
id: sourceTableId,
|
||||
},
|
||||
{
|
||||
id: targetTableId,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!isDesktop) {
|
||||
hideSidePanel();
|
||||
}
|
||||
},
|
||||
[fitView, setEdges, hideSidePanel, isDesktop]
|
||||
);
|
||||
|
||||
return {
|
||||
focusOnArea,
|
||||
focusOnTable,
|
||||
focusOnRelationship,
|
||||
};
|
||||
};
|
||||
328
src/hooks/use-update-table-field.ts
Normal file
328
src/hooks/use-update-table-field.ts
Normal file
@@ -0,0 +1,328 @@
|
||||
import { useCallback, useMemo, useState, useEffect, useRef } from 'react';
|
||||
import { useChartDB } from './use-chartdb';
|
||||
import { useDebounce } from './use-debounce-v2';
|
||||
import type { DBField, DBTable } from '@/lib/domain';
|
||||
import type {
|
||||
SelectBoxOption,
|
||||
SelectBoxProps,
|
||||
} from '@/components/select-box/select-box';
|
||||
import {
|
||||
dataTypeDataToDataType,
|
||||
sortedDataTypeMap,
|
||||
} from '@/lib/data/data-types/data-types';
|
||||
import { generateDBFieldSuffix } from '@/lib/domain/db-field';
|
||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
|
||||
const generateFieldRegexPatterns = (
|
||||
dataType: DataTypeData
|
||||
): {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
} => {
|
||||
if (!dataType.fieldAttributes) {
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
}
|
||||
|
||||
const typeName = dataType.name;
|
||||
const fieldAttributes = dataType.fieldAttributes;
|
||||
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
if (fieldAttributes.hasCharMaxLengthOption) {
|
||||
return {
|
||||
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
|
||||
extractRegex: /\((\d+|max)\)/i,
|
||||
};
|
||||
}
|
||||
return {
|
||||
regex: `^${typeName}\\(\\d+\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
|
||||
extractRegex: new RegExp(
|
||||
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
};
|
||||
|
||||
export const useUpdateTableField = (
|
||||
table: DBTable,
|
||||
field: DBField,
|
||||
customUpdateField?: (attrs: Partial<DBField>) => void
|
||||
) => {
|
||||
const {
|
||||
databaseType,
|
||||
customTypes,
|
||||
updateField: chartDBUpdateField,
|
||||
removeField: chartDBRemoveField,
|
||||
} = useChartDB();
|
||||
|
||||
// Local state for responsive UI
|
||||
const [localFieldName, setLocalFieldName] = useState(field.name);
|
||||
const [localNullable, setLocalNullable] = useState(field.nullable);
|
||||
const [localPrimaryKey, setLocalPrimaryKey] = useState(field.primaryKey);
|
||||
|
||||
const lastFieldNameRef = useRef<string>(field.name);
|
||||
|
||||
useEffect(() => {
|
||||
if (localFieldName === lastFieldNameRef.current) {
|
||||
lastFieldNameRef.current = field.name;
|
||||
setLocalFieldName(field.name);
|
||||
}
|
||||
}, [field.name, localFieldName]);
|
||||
|
||||
// Update local state when field properties change externally
|
||||
useEffect(() => {
|
||||
setLocalNullable(field.nullable);
|
||||
setLocalPrimaryKey(field.primaryKey);
|
||||
}, [field.nullable, field.primaryKey]);
|
||||
|
||||
// Use custom updateField if provided, otherwise use the chartDB one
|
||||
const updateField = useMemo(
|
||||
() =>
|
||||
customUpdateField
|
||||
? (
|
||||
_tableId: string,
|
||||
_fieldId: string,
|
||||
attrs: Partial<DBField>
|
||||
) => customUpdateField(attrs)
|
||||
: chartDBUpdateField,
|
||||
[customUpdateField, chartDBUpdateField]
|
||||
);
|
||||
|
||||
// Calculate primary key fields for validation
|
||||
const primaryKeyFields = useMemo(() => {
|
||||
return table.fields.filter((f) => f.primaryKey);
|
||||
}, [table.fields]);
|
||||
|
||||
const primaryKeyCount = useMemo(
|
||||
() => primaryKeyFields.length,
|
||||
[primaryKeyFields.length]
|
||||
);
|
||||
|
||||
// Generate data type options for select box
|
||||
const dataFieldOptions = useMemo(() => {
|
||||
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
||||
databaseType
|
||||
].map((type) => {
|
||||
const regexPatterns = generateFieldRegexPatterns(type);
|
||||
|
||||
return {
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: regexPatterns.regex,
|
||||
extractRegex: regexPatterns.extractRegex,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
if (!customTypes?.length) {
|
||||
return standardTypes;
|
||||
}
|
||||
|
||||
// Add custom types as options
|
||||
const customTypeOptions: SelectBoxOption[] = customTypes.map(
|
||||
(type) => ({
|
||||
label: type.name,
|
||||
value: type.name,
|
||||
description:
|
||||
type.kind === 'enum' ? `${type.values?.join(' | ')}` : '',
|
||||
group: 'Custom Types',
|
||||
})
|
||||
);
|
||||
|
||||
return [...standardTypes, ...customTypeOptions];
|
||||
}, [databaseType, customTypes]);
|
||||
|
||||
// Handle data type change
|
||||
const handleDataTypeChange = useCallback<
|
||||
NonNullable<SelectBoxProps['onChange']>
|
||||
>(
|
||||
(value, regexMatches) => {
|
||||
const dataType = sortedDataTypeMap[databaseType].find(
|
||||
(v) => v.id === value
|
||||
) ?? {
|
||||
id: value as string,
|
||||
name: value as string,
|
||||
};
|
||||
|
||||
let characterMaximumLength: string | undefined = undefined;
|
||||
let precision: number | undefined = undefined;
|
||||
let scale: number | undefined = undefined;
|
||||
|
||||
if (regexMatches?.length) {
|
||||
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
||||
} else if (
|
||||
dataType?.fieldAttributes?.precision &&
|
||||
dataType?.fieldAttributes?.scale
|
||||
) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
scale = regexMatches[2]
|
||||
? parseInt(regexMatches[2])
|
||||
: undefined;
|
||||
} else if (dataType?.fieldAttributes?.precision) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
dataType?.fieldAttributes?.hasCharMaxLength &&
|
||||
field.characterMaximumLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.precision && field.precision) {
|
||||
precision = field.precision;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.scale && field.scale) {
|
||||
scale = field.scale;
|
||||
}
|
||||
}
|
||||
|
||||
updateField(table.id, field.id, {
|
||||
characterMaximumLength,
|
||||
precision,
|
||||
scale,
|
||||
increment: undefined,
|
||||
default: undefined,
|
||||
type: dataTypeDataToDataType(
|
||||
dataType ?? {
|
||||
id: value as string,
|
||||
name: value as string,
|
||||
}
|
||||
),
|
||||
});
|
||||
},
|
||||
[
|
||||
updateField,
|
||||
databaseType,
|
||||
field.characterMaximumLength,
|
||||
field.precision,
|
||||
field.scale,
|
||||
field.id,
|
||||
table.id,
|
||||
]
|
||||
);
|
||||
|
||||
// Debounced update for field name
|
||||
const debouncedNameUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: string) => {
|
||||
if (value.trim() !== field.name) {
|
||||
updateField(table.id, field.id, { name: value });
|
||||
}
|
||||
},
|
||||
[updateField, table.id, field.id, field.name]
|
||||
),
|
||||
300 // 300ms debounce for text input
|
||||
);
|
||||
|
||||
// Debounced update for nullable toggle
|
||||
const debouncedNullableUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: boolean) => {
|
||||
updateField(table.id, field.id, { nullable: value });
|
||||
},
|
||||
[updateField, table.id, field.id]
|
||||
),
|
||||
100 // 100ms debounce for toggle
|
||||
);
|
||||
|
||||
// Debounced update for primary key toggle
|
||||
const debouncedPrimaryKeyUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: boolean, primaryKeyCount: number) => {
|
||||
if (value) {
|
||||
// When setting as primary key
|
||||
const updates: Partial<DBField> = {
|
||||
primaryKey: true,
|
||||
};
|
||||
// Only auto-set unique if this will be the only primary key
|
||||
if (primaryKeyCount === 0) {
|
||||
updates.unique = true;
|
||||
}
|
||||
updateField(table.id, field.id, updates);
|
||||
} else {
|
||||
// When removing primary key
|
||||
updateField(table.id, field.id, {
|
||||
primaryKey: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
[updateField, table.id, field.id]
|
||||
),
|
||||
100 // 100ms debounce for toggle
|
||||
);
|
||||
|
||||
// Handle primary key toggle with optimistic update
|
||||
const handlePrimaryKeyToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
setLocalPrimaryKey(value);
|
||||
debouncedPrimaryKeyUpdate(value, primaryKeyCount);
|
||||
},
|
||||
[primaryKeyCount, debouncedPrimaryKeyUpdate]
|
||||
);
|
||||
|
||||
// Handle nullable toggle with optimistic update
|
||||
const handleNullableToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
setLocalNullable(value);
|
||||
debouncedNullableUpdate(value);
|
||||
},
|
||||
[debouncedNullableUpdate]
|
||||
);
|
||||
|
||||
// Handle name change with optimistic update
|
||||
const handleNameChange = useCallback(
|
||||
(value: string) => {
|
||||
setLocalFieldName(value);
|
||||
debouncedNameUpdate(value);
|
||||
},
|
||||
[debouncedNameUpdate]
|
||||
);
|
||||
|
||||
// Utility function to generate field suffix for display
|
||||
const generateFieldSuffix = useCallback(
|
||||
(typeId?: string) => {
|
||||
return generateDBFieldSuffix(field, {
|
||||
databaseType,
|
||||
forceExtended: true,
|
||||
typeId,
|
||||
});
|
||||
},
|
||||
[field, databaseType]
|
||||
);
|
||||
|
||||
const removeField = useCallback(() => {
|
||||
chartDBRemoveField(table.id, field.id);
|
||||
}, [chartDBRemoveField, table.id, field.id]);
|
||||
|
||||
return {
|
||||
dataFieldOptions,
|
||||
handleDataTypeChange,
|
||||
handlePrimaryKeyToggle,
|
||||
handleNullableToggle,
|
||||
handleNameChange,
|
||||
generateFieldSuffix,
|
||||
primaryKeyCount,
|
||||
fieldName: localFieldName,
|
||||
nullable: localNullable,
|
||||
primaryKey: localPrimaryKey,
|
||||
removeField,
|
||||
};
|
||||
};
|
||||
42
src/hooks/use-update-table.ts
Normal file
42
src/hooks/use-update-table.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { useCallback, useState, useEffect } from 'react';
|
||||
import { useChartDB } from './use-chartdb';
|
||||
import { useDebounce } from './use-debounce-v2';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
|
||||
// Hook for updating table properties with debouncing for performance
|
||||
export const useUpdateTable = (table: DBTable) => {
|
||||
const { updateTable: chartDBUpdateTable } = useChartDB();
|
||||
const [localTableName, setLocalTableName] = useState(table.name);
|
||||
|
||||
// Debounced update function
|
||||
const debouncedUpdate = useDebounce(
|
||||
useCallback(
|
||||
(value: string) => {
|
||||
if (value.trim() && value.trim() !== table.name) {
|
||||
chartDBUpdateTable(table.id, { name: value.trim() });
|
||||
}
|
||||
},
|
||||
[chartDBUpdateTable, table.id, table.name]
|
||||
),
|
||||
1000 // 1000ms debounce
|
||||
);
|
||||
|
||||
// Update local state immediately for responsive UI
|
||||
const handleTableNameChange = useCallback(
|
||||
(value: string) => {
|
||||
setLocalTableName(value);
|
||||
debouncedUpdate(value);
|
||||
},
|
||||
[debouncedUpdate]
|
||||
);
|
||||
|
||||
// Update local state when table name changes externally
|
||||
useEffect(() => {
|
||||
setLocalTableName(table.name);
|
||||
}, [table.name]);
|
||||
|
||||
return {
|
||||
tableName: localTableName,
|
||||
handleTableNameChange,
|
||||
};
|
||||
};
|
||||
@@ -12,15 +12,15 @@ export const ar: LanguageTranslation = {
|
||||
custom_types: 'الأنواع المخصصة',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'قواعد البيانات',
|
||||
new: 'مخطط جديد',
|
||||
actions: {
|
||||
actions: 'الإجراءات',
|
||||
new: 'جديد...',
|
||||
browse: 'تصفح...',
|
||||
save: 'حفظ',
|
||||
import: 'استيراد قاعدة بيانات',
|
||||
export_sql: 'SQL تصدير',
|
||||
export_as: 'تصدير كـ',
|
||||
delete_diagram: 'حذف الرسم البياني',
|
||||
delete_diagram: 'حذف',
|
||||
},
|
||||
edit: {
|
||||
edit: 'تحرير',
|
||||
@@ -74,10 +74,10 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'إعادة ترتيب الرسم البياني',
|
||||
title: 'ترتيب تلقائي للرسم البياني',
|
||||
description:
|
||||
'هذا الإجراء سيقوم بإعادة ترتيب الجداول في المخطط بشكل تلقائي. هل تريد المتابعة؟',
|
||||
reorder: 'إعادة ترتيب',
|
||||
reorder: 'ترتيب تلقائي',
|
||||
cancel: 'إلغاء',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const ar: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'لم يتم تحديد قيم التعداد',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const ar: LanguageTranslation = {
|
||||
show_all: 'عرض الكل',
|
||||
undo: 'تراجع',
|
||||
redo: 'إعادة',
|
||||
reorder_diagram: 'إعادة ترتيب الرسم البياني',
|
||||
reorder_diagram: 'ترتيب تلقائي للرسم البياني',
|
||||
highlight_overlapping_tables: 'تمييز الجداول المتداخلة',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -313,7 +314,7 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'فتح مخطط',
|
||||
title: 'فتح قاعدة بيانات',
|
||||
description: 'اختر مخططًا لفتحه من القائمة ادناه',
|
||||
table_columns: {
|
||||
name: 'الإسم',
|
||||
@@ -323,6 +324,12 @@ export const ar: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'إلغاء',
|
||||
open: 'فتح',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'فتح',
|
||||
duplicate: 'تكرار',
|
||||
delete: 'حذف',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const bn: LanguageTranslation = {
|
||||
custom_types: 'কাস্টম টাইপ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'ডাটাবেস',
|
||||
new: 'নতুন ডায়াগ্রাম',
|
||||
actions: {
|
||||
actions: 'কার্য',
|
||||
new: 'নতুন...',
|
||||
browse: 'ব্রাউজ করুন...',
|
||||
save: 'সংরক্ষণ করুন',
|
||||
import: 'ডাটাবেস আমদানি করুন',
|
||||
export_sql: 'SQL রপ্তানি করুন',
|
||||
export_as: 'রূপে রপ্তানি করুন',
|
||||
delete_diagram: 'ডায়াগ্রাম মুছুন',
|
||||
delete_diagram: 'মুছুন',
|
||||
},
|
||||
edit: {
|
||||
edit: 'সম্পাদনা',
|
||||
@@ -75,10 +75,10 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
title: 'স্বয়ংক্রিয় ডায়াগ্রাম সাজান',
|
||||
description:
|
||||
'এই কাজটি ডায়াগ্রামের সমস্ত টেবিল পুনর্বিন্যাস করবে। আপনি কি চালিয়ে যেতে চান?',
|
||||
reorder: 'পুনর্বিন্যাস করুন',
|
||||
reorder: 'স্বয়ংক্রিয় সাজান',
|
||||
cancel: 'বাতিল করুন',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const bn: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'কোন enum মান সংজ্ঞায়িত নেই',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const bn: LanguageTranslation = {
|
||||
show_all: 'সব দেখান',
|
||||
undo: 'পূর্বাবস্থায় ফিরুন',
|
||||
redo: 'পুনরায় করুন',
|
||||
reorder_diagram: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
reorder_diagram: 'স্বয়ংক্রিয় ডায়াগ্রাম সাজান',
|
||||
highlight_overlapping_tables: 'ওভারল্যাপিং টেবিল হাইলাইট করুন',
|
||||
|
||||
// TODO: Translate
|
||||
@@ -315,7 +316,7 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'চিত্র খুলুন',
|
||||
title: 'ডেটাবেস খুলুন',
|
||||
description: 'নিচের তালিকা থেকে একটি চিত্র নির্বাচন করুন।',
|
||||
table_columns: {
|
||||
name: 'নাম',
|
||||
@@ -325,6 +326,12 @@ export const bn: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'বাতিল করুন',
|
||||
open: 'খুলুন',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'খুলুন',
|
||||
duplicate: 'ডুপ্লিকেট',
|
||||
delete: 'মুছুন',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const de: LanguageTranslation = {
|
||||
custom_types: 'Benutzerdefinierte Typen',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Datenbanken',
|
||||
new: 'Neues Diagramm',
|
||||
actions: {
|
||||
actions: 'Aktionen',
|
||||
new: 'Neu...',
|
||||
browse: 'Durchsuchen...',
|
||||
save: 'Speichern',
|
||||
import: 'Datenbank importieren',
|
||||
export_sql: 'SQL exportieren',
|
||||
export_as: 'Exportieren als',
|
||||
delete_diagram: 'Diagramm löschen',
|
||||
delete_diagram: 'Löschen',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Bearbeiten',
|
||||
@@ -75,10 +75,10 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Diagramm neu anordnen',
|
||||
title: 'Diagramm automatisch anordnen',
|
||||
description:
|
||||
'Diese Aktion wird alle Tabellen im Diagramm neu anordnen. Möchten Sie fortfahren?',
|
||||
reorder: 'Neu anordnen',
|
||||
reorder: 'Automatisch anordnen',
|
||||
cancel: 'Abbrechen',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const de: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Keine Enum-Werte definiert',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const de: LanguageTranslation = {
|
||||
show_all: 'Alle anzeigen',
|
||||
undo: 'Rückgängig',
|
||||
redo: 'Wiederholen',
|
||||
reorder_diagram: 'Diagramm neu anordnen',
|
||||
reorder_diagram: 'Diagramm automatisch anordnen',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
@@ -318,7 +319,7 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Diagramm öffnen',
|
||||
title: 'Datenbank öffnen',
|
||||
description: 'Wählen Sie ein Diagramm aus der Liste unten aus.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -328,6 +329,12 @@ export const de: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Abbrechen',
|
||||
open: 'Öffnen',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Öffnen',
|
||||
duplicate: 'Duplizieren',
|
||||
delete: 'Löschen',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const en = {
|
||||
custom_types: 'Custom Types',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Databases',
|
||||
new: 'New Diagram',
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'New...',
|
||||
browse: 'Browse...',
|
||||
save: 'Save',
|
||||
import: 'Import',
|
||||
export_sql: 'Export SQL',
|
||||
export_as: 'Export as',
|
||||
delete_diagram: 'Delete Diagram',
|
||||
delete_diagram: 'Delete',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Edit',
|
||||
@@ -73,10 +73,10 @@ export const en = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reorder Diagram',
|
||||
title: 'Auto Arrange Diagram',
|
||||
description:
|
||||
'This action will rearrange all tables in the diagram. Do you want to continue?',
|
||||
reorder: 'Reorder',
|
||||
reorder: 'Auto Arrange',
|
||||
cancel: 'Cancel',
|
||||
},
|
||||
|
||||
@@ -143,7 +143,6 @@ export const en = {
|
||||
title: 'Field Attributes',
|
||||
unique: 'Unique',
|
||||
auto_increment: 'Auto Increment',
|
||||
array: 'Declare Array',
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
@@ -243,6 +242,7 @@ export const en = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'No enum values defined',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -265,7 +265,7 @@ export const en = {
|
||||
show_all: 'Show All',
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Reorder Diagram',
|
||||
reorder_diagram: 'Auto Arrange Diagram',
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -307,7 +307,7 @@ export const en = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Open Diagram',
|
||||
title: 'Open Database',
|
||||
description: 'Select a diagram to open from the list below.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -317,6 +317,12 @@ export const en = {
|
||||
},
|
||||
cancel: 'Cancel',
|
||||
open: 'Open',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Open',
|
||||
duplicate: 'Duplicate',
|
||||
delete: 'Delete',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const es: LanguageTranslation = {
|
||||
custom_types: 'Tipos Personalizados',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bases de Datos',
|
||||
new: 'Nuevo Diagrama',
|
||||
actions: {
|
||||
actions: 'Acciones',
|
||||
new: 'Nuevo...',
|
||||
browse: 'Examinar...',
|
||||
save: 'Guardar',
|
||||
import: 'Importar Base de Datos',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
delete_diagram: 'Eliminar Diagrama',
|
||||
delete_diagram: 'Eliminar',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Editar',
|
||||
@@ -74,10 +74,10 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reordenar Diagrama',
|
||||
title: 'Organizar Diagrama Automáticamente',
|
||||
description:
|
||||
'Esta acción reorganizará todas las tablas en el diagrama. ¿Deseas continuar?',
|
||||
reorder: 'Reordenar',
|
||||
reorder: 'Organizar Automáticamente',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const es: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'No hay valores de enum definidos',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const es: LanguageTranslation = {
|
||||
show_all: 'Mostrar Todo',
|
||||
undo: 'Deshacer',
|
||||
redo: 'Rehacer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
reorder_diagram: 'Organizar Diagrama Automáticamente',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -315,7 +316,7 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Abrir Diagrama',
|
||||
title: 'Abrir Base de Datos',
|
||||
description:
|
||||
'Selecciona un diagrama para abrir de la lista a continuación.',
|
||||
table_columns: {
|
||||
@@ -326,6 +327,12 @@ export const es: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Cancelar',
|
||||
open: 'Abrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Eliminar',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const fr: LanguageTranslation = {
|
||||
custom_types: 'Types Personnalisés',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bases de Données',
|
||||
new: 'Nouveau Diagramme',
|
||||
actions: {
|
||||
actions: 'Actions',
|
||||
new: 'Nouveau...',
|
||||
browse: 'Parcourir...',
|
||||
save: 'Enregistrer',
|
||||
import: 'Importer Base de Données',
|
||||
export_sql: 'Exporter SQL',
|
||||
export_as: 'Exporter en tant que',
|
||||
delete_diagram: 'Supprimer le Diagramme',
|
||||
delete_diagram: 'Supprimer',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Édition',
|
||||
@@ -73,10 +73,10 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Réorganiser le Diagramme',
|
||||
title: 'Organiser Automatiquement le Diagramme',
|
||||
description:
|
||||
'Cette action réorganisera toutes les tables dans le diagramme. Voulez-vous continuer ?',
|
||||
reorder: 'Réorganiser',
|
||||
reorder: 'Organiser Automatiquement',
|
||||
cancel: 'Annuler',
|
||||
},
|
||||
|
||||
@@ -246,6 +246,7 @@ export const fr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: "Aucune valeur d'énumération définie",
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -268,7 +269,7 @@ export const fr: LanguageTranslation = {
|
||||
show_all: 'Afficher Tout',
|
||||
undo: 'Annuler',
|
||||
redo: 'Rétablir',
|
||||
reorder_diagram: 'Réorganiser le Diagramme',
|
||||
reorder_diagram: 'Organiser Automatiquement le Diagramme',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Ouvrir Diagramme',
|
||||
title: 'Ouvrir Base de Données',
|
||||
description:
|
||||
'Sélectionnez un diagramme à ouvrir dans la liste ci-dessous.',
|
||||
table_columns: {
|
||||
@@ -323,6 +324,12 @@ export const fr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Annuler',
|
||||
open: 'Ouvrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Ouvrir',
|
||||
duplicate: 'Dupliquer',
|
||||
delete: 'Supprimer',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const gu: LanguageTranslation = {
|
||||
custom_types: 'કસ્ટમ ટાઇપ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'ડેટાબેસેસ',
|
||||
new: 'નવું ડાયાગ્રામ',
|
||||
actions: {
|
||||
actions: 'ક્રિયાઓ',
|
||||
new: 'નવું...',
|
||||
browse: 'બ્રાઉજ કરો...',
|
||||
save: 'સાચવો',
|
||||
import: 'ડેટાબેસ આયાત કરો',
|
||||
export_sql: 'SQL નિકાસ કરો',
|
||||
export_as: 'રૂપે નિકાસ કરો',
|
||||
delete_diagram: 'ડાયાગ્રામ કાઢી નાખો',
|
||||
delete_diagram: 'કાઢી નાખો',
|
||||
},
|
||||
edit: {
|
||||
edit: 'ફેરફાર',
|
||||
@@ -75,10 +75,10 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ડાયાગ્રામ ફરી વ્યવસ્થિત કરો',
|
||||
title: 'ડાયાગ્રામ ઑટોમેટિક ગોઠવો',
|
||||
description:
|
||||
'આ ક્રિયા ડાયાગ્રામમાં બધી ટેબલ્સને ફરીથી વ્યવસ્થિત કરશે. શું તમે ચાલુ રાખવા માંગો છો?',
|
||||
reorder: 'ફરી વ્યવસ્થિત કરો',
|
||||
reorder: 'ઑટોમેટિક ગોઠવો',
|
||||
cancel: 'રદ કરો',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const gu: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'કોઈ enum મૂલ્યો વ્યાખ્યાયિત નથી',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const gu: LanguageTranslation = {
|
||||
show_all: 'બધું બતાવો',
|
||||
undo: 'અનડુ',
|
||||
redo: 'રીડુ',
|
||||
reorder_diagram: 'ડાયાગ્રામ ફરીથી વ્યવસ્થિત કરો',
|
||||
reorder_diagram: 'ડાયાગ્રામ ઑટોમેટિક ગોઠવો',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -315,7 +316,7 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'ડાયાગ્રામ ખોલો',
|
||||
title: 'ડેટાબેસ ખોલો',
|
||||
description: 'નીચેની યાદીમાંથી એક ડાયાગ્રામ પસંદ કરો.',
|
||||
table_columns: {
|
||||
name: 'નામ',
|
||||
@@ -325,6 +326,12 @@ export const gu: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'રદ કરો',
|
||||
open: 'ખોલો',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'ખોલો',
|
||||
duplicate: 'ડુપ્લિકેટ',
|
||||
delete: 'કાઢી નાખો',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const hi: LanguageTranslation = {
|
||||
custom_types: 'कस्टम टाइप',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डेटाबेस',
|
||||
new: 'नया आरेख',
|
||||
actions: {
|
||||
actions: 'कार्य',
|
||||
new: 'नया...',
|
||||
browse: 'ब्राउज़ करें...',
|
||||
save: 'सहेजें',
|
||||
import: 'डेटाबेस आयात करें',
|
||||
export_sql: 'SQL निर्यात करें',
|
||||
export_as: 'के रूप में निर्यात करें',
|
||||
delete_diagram: 'आरेख हटाएँ',
|
||||
delete_diagram: 'हटाएँ',
|
||||
},
|
||||
edit: {
|
||||
edit: 'संपादित करें',
|
||||
@@ -74,10 +74,10 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'आरेख पुनः व्यवस्थित करें',
|
||||
title: 'आरेख स्वचालित व्यवस्थित करें',
|
||||
description:
|
||||
'यह क्रिया आरेख में सभी तालिकाओं को पुनः व्यवस्थित कर देगी। क्या आप जारी रखना चाहते हैं?',
|
||||
reorder: 'पुनः व्यवस्थित करें',
|
||||
reorder: 'स्वचालित व्यवस्थित करें',
|
||||
cancel: 'रद्द करें',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const hi: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कोई enum मान परिभाषित नहीं',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const hi: LanguageTranslation = {
|
||||
show_all: 'सभी दिखाएँ',
|
||||
undo: 'पूर्ववत करें',
|
||||
redo: 'पुनः करें',
|
||||
reorder_diagram: 'आरेख पुनः व्यवस्थित करें',
|
||||
reorder_diagram: 'आरेख स्वचालित व्यवस्थित करें',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -317,7 +318,7 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'आरेख खोलें',
|
||||
title: 'डेटाबेस खोलें',
|
||||
description: 'नीचे दी गई सूची से एक आरेख चुनें।',
|
||||
table_columns: {
|
||||
name: 'नाम',
|
||||
@@ -327,6 +328,12 @@ export const hi: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द करें',
|
||||
open: 'खोलें',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'खोलें',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'हटाएं',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const hr: LanguageTranslation = {
|
||||
custom_types: 'Prilagođeni Tipovi',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Baze Podataka',
|
||||
new: 'Novi Dijagram',
|
||||
actions: {
|
||||
actions: 'Akcije',
|
||||
new: 'Novi...',
|
||||
browse: 'Pregledaj...',
|
||||
save: 'Spremi',
|
||||
import: 'Uvezi',
|
||||
export_sql: 'Izvezi SQL',
|
||||
export_as: 'Izvezi kao',
|
||||
delete_diagram: 'Izbriši dijagram',
|
||||
delete_diagram: 'Izbriši',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Uredi',
|
||||
@@ -73,10 +73,10 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Preuredi dijagram',
|
||||
title: 'Automatski preuredi dijagram',
|
||||
description:
|
||||
'Ova radnja će preurediti sve tablice u dijagramu. Želite li nastaviti?',
|
||||
reorder: 'Preuredi',
|
||||
reorder: 'Automatski preuredi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const hr: LanguageTranslation = {
|
||||
enum_values: 'Enum vrijednosti',
|
||||
composite_fields: 'Polja',
|
||||
no_fields: 'Nema definiranih polja',
|
||||
no_values: 'Nema definiranih enum vrijednosti',
|
||||
field_name_placeholder: 'Naziv polja',
|
||||
field_type_placeholder: 'Odaberi tip',
|
||||
add_field: 'Dodaj polje',
|
||||
@@ -268,7 +269,7 @@ export const hr: LanguageTranslation = {
|
||||
show_all: 'Prikaži sve',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
reorder_diagram: 'Preuredi dijagram',
|
||||
reorder_diagram: 'Automatski preuredi dijagram',
|
||||
highlight_overlapping_tables: 'Istakni preklapajuće tablice',
|
||||
clear_custom_type_highlight: 'Ukloni isticanje za "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -310,7 +311,7 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Otvori dijagram',
|
||||
title: 'Otvori bazu podataka',
|
||||
description: 'Odaberite dijagram za otvaranje iz popisa ispod.',
|
||||
table_columns: {
|
||||
name: 'Naziv',
|
||||
@@ -320,6 +321,12 @@ export const hr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Odustani',
|
||||
open: 'Otvori',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Otvori',
|
||||
duplicate: 'Dupliciraj',
|
||||
delete: 'Obriši',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const id_ID: LanguageTranslation = {
|
||||
custom_types: 'Tipe Kustom',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Basis Data',
|
||||
new: 'Diagram Baru',
|
||||
actions: {
|
||||
actions: 'Aksi',
|
||||
new: 'Baru...',
|
||||
browse: 'Jelajahi...',
|
||||
save: 'Simpan',
|
||||
import: 'Impor Database',
|
||||
export_sql: 'Ekspor SQL',
|
||||
export_as: 'Ekspor Sebagai',
|
||||
delete_diagram: 'Hapus Diagram',
|
||||
delete_diagram: 'Hapus',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Ubah',
|
||||
@@ -74,10 +74,10 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Atur Ulang Diagram',
|
||||
title: 'Atur Otomatis Diagram',
|
||||
description:
|
||||
'Tindakan ini akan mengatur ulang semua tabel di diagram. Apakah Anda ingin melanjutkan?',
|
||||
reorder: 'Atur Ulang',
|
||||
reorder: 'Atur Otomatis',
|
||||
cancel: 'Batal',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const id_ID: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Tidak ada nilai enum yang ditentukan',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const id_ID: LanguageTranslation = {
|
||||
show_all: 'Tampilkan Semua',
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Atur Ulang Diagram',
|
||||
reorder_diagram: 'Atur Otomatis Diagram',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Buka Diagram',
|
||||
title: 'Buka Database',
|
||||
description: 'Pilih diagram untuk dibuka dari daftar di bawah.',
|
||||
table_columns: {
|
||||
name: 'Name',
|
||||
@@ -324,6 +325,12 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Batal',
|
||||
open: 'Buka',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Buka',
|
||||
duplicate: 'Duplikat',
|
||||
delete: 'Hapus',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const ja: LanguageTranslation = {
|
||||
custom_types: 'カスタムタイプ',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'データベース',
|
||||
new: '新しいダイアグラム',
|
||||
actions: {
|
||||
actions: 'アクション',
|
||||
new: '新規...',
|
||||
browse: '参照...',
|
||||
save: '保存',
|
||||
import: 'データベースをインポート',
|
||||
export_sql: 'SQLをエクスポート',
|
||||
export_as: '形式を指定してエクスポート',
|
||||
delete_diagram: 'ダイアグラムを削除',
|
||||
delete_diagram: '削除',
|
||||
},
|
||||
edit: {
|
||||
edit: '編集',
|
||||
@@ -76,10 +76,10 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'ダイアグラムを並べ替え',
|
||||
title: 'ダイアグラムを自動配置',
|
||||
description:
|
||||
'この操作によりダイアグラム内のすべてのテーブルが再配置されます。続行しますか?',
|
||||
reorder: '並べ替え',
|
||||
reorder: '自動配置',
|
||||
cancel: 'キャンセル',
|
||||
},
|
||||
|
||||
@@ -253,6 +253,7 @@ export const ja: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '列挙値が定義されていません',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -275,7 +276,7 @@ export const ja: LanguageTranslation = {
|
||||
show_all: 'すべて表示',
|
||||
undo: '元に戻す',
|
||||
redo: 'やり直し',
|
||||
reorder_diagram: 'ダイアグラムを並べ替え',
|
||||
reorder_diagram: 'ダイアグラムを自動配置',
|
||||
// TODO: Translate
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
@@ -319,7 +320,7 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'ダイアグラムを開く',
|
||||
title: 'データベースを開く',
|
||||
description: '以下のリストからダイアグラムを選択してください。',
|
||||
table_columns: {
|
||||
name: '名前',
|
||||
@@ -329,6 +330,12 @@ export const ja: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'キャンセル',
|
||||
open: '開く',
|
||||
|
||||
diagram_actions: {
|
||||
open: '開く',
|
||||
duplicate: '複製',
|
||||
delete: '削除',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const ko_KR: LanguageTranslation = {
|
||||
custom_types: '사용자 지정 타입',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '데이터베이스',
|
||||
new: '새 다이어그램',
|
||||
actions: {
|
||||
actions: '작업',
|
||||
new: '새로 만들기...',
|
||||
browse: '찾아보기...',
|
||||
save: '저장',
|
||||
import: '데이터베이스 가져오기',
|
||||
export_sql: 'SQL로 저장',
|
||||
export_as: '다른 형식으로 저장',
|
||||
delete_diagram: '다이어그램 삭제',
|
||||
delete_diagram: '삭제',
|
||||
},
|
||||
edit: {
|
||||
edit: '편집',
|
||||
@@ -74,10 +74,10 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '다이어그램 재정렬',
|
||||
title: '다이어그램 자동 정렬',
|
||||
description:
|
||||
'이 작업은 모든 다이어그램이 재정렬됩니다. 계속하시겠습니까?',
|
||||
reorder: '재정렬',
|
||||
reorder: '자동 정렬',
|
||||
cancel: '취소',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '정의된 열거형 값이 없습니다',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
show_all: '전체 저장',
|
||||
undo: '실행 취소',
|
||||
redo: '다시 실행',
|
||||
reorder_diagram: '다이어그램 재정렬',
|
||||
reorder_diagram: '다이어그램 자동 정렬',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '다이어그램 열기',
|
||||
title: '데이터베이스 열기',
|
||||
description: '아래의 목록에서 다이어그램을 선택하세요.',
|
||||
table_columns: {
|
||||
name: '이름',
|
||||
@@ -324,6 +325,12 @@ export const ko_KR: LanguageTranslation = {
|
||||
},
|
||||
cancel: '취소',
|
||||
open: '열기',
|
||||
|
||||
diagram_actions: {
|
||||
open: '열기',
|
||||
duplicate: '복제',
|
||||
delete: '삭제',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const mr: LanguageTranslation = {
|
||||
custom_types: 'कस्टम प्रकार',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डेटाबेस',
|
||||
new: 'नवीन आरेख',
|
||||
actions: {
|
||||
actions: 'क्रिया',
|
||||
new: 'नवीन...',
|
||||
browse: 'ब्राउज करा...',
|
||||
save: 'जतन करा',
|
||||
import: 'डेटाबेस इम्पोर्ट करा',
|
||||
export_sql: 'SQL एक्स्पोर्ट करा',
|
||||
export_as: 'म्हणून एक्स्पोर्ट करा',
|
||||
delete_diagram: 'आरेख हटवा',
|
||||
delete_diagram: 'हटवा',
|
||||
},
|
||||
edit: {
|
||||
edit: 'संपादन करा',
|
||||
@@ -75,10 +75,10 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'आरेख पुनःक्रमित करा',
|
||||
title: 'आरेख स्वयंचलित व्यवस्थित करा',
|
||||
description:
|
||||
'ही क्रिया आरेखातील सर्व टेबल्सची पुनर्रचना करेल. तुम्हाला पुढे जायचे आहे का?',
|
||||
reorder: 'पुनःक्रमित करा',
|
||||
reorder: 'स्वयंचलित व्यवस्थित करा',
|
||||
cancel: 'रद्द करा',
|
||||
},
|
||||
|
||||
@@ -252,6 +252,7 @@ export const mr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कोणतीही enum मूल्ये परिभाषित नाहीत',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -274,7 +275,7 @@ export const mr: LanguageTranslation = {
|
||||
show_all: 'सर्व दाखवा',
|
||||
undo: 'पूर्ववत करा',
|
||||
redo: 'पुन्हा करा',
|
||||
reorder_diagram: 'आरेख पुनःक्रमित करा',
|
||||
reorder_diagram: 'आरेख स्वयंचलित व्यवस्थित करा',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -320,7 +321,7 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'आरेख उघडा',
|
||||
title: 'डेटाबेस उघडा',
|
||||
description: 'खालील यादीतून उघडण्यासाठी एक आरेख निवडा.',
|
||||
table_columns: {
|
||||
name: 'नाव',
|
||||
@@ -330,6 +331,12 @@ export const mr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द करा',
|
||||
open: 'उघडा',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'उघडा',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'हटवा',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const ne: LanguageTranslation = {
|
||||
custom_types: 'कस्टम प्रकारहरू',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'डाटाबेसहरू',
|
||||
new: 'नयाँ डायाग्राम',
|
||||
actions: {
|
||||
actions: 'कार्यहरू',
|
||||
new: 'नयाँ...',
|
||||
browse: 'ब्राउज गर्नुहोस्...',
|
||||
save: 'सुरक्षित गर्नुहोस्',
|
||||
import: 'डाटाबेस आयात गर्नुहोस्',
|
||||
export_sql: 'SQL निर्यात गर्नुहोस्',
|
||||
export_as: 'निर्यात गर्नुहोस्',
|
||||
delete_diagram: 'डायाग्राम हटाउनुहोस्',
|
||||
delete_diagram: 'हटाउनुहोस्',
|
||||
},
|
||||
edit: {
|
||||
edit: 'सम्पादन',
|
||||
@@ -75,10 +75,10 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'डायाग्राम पुनः क्रमबद्ध गर्नुहोस्',
|
||||
title: 'डायाग्राम स्वचालित मिलाउनुहोस्',
|
||||
description:
|
||||
'यो कार्य पूर्ववत गर्न सकिँदैन। यो डायाग्राम स्थायी रूपमा हटाउनेछ।',
|
||||
reorder: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
reorder: 'स्वचालित मिलाउनुहोस्',
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const ne: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'कुनै enum मानहरू परिभाषित छैनन्',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const ne: LanguageTranslation = {
|
||||
show_all: 'सबै देखाउनुहोस्',
|
||||
undo: 'पूर्ववत',
|
||||
redo: 'पुनः गर्नुहोस्',
|
||||
reorder_diagram: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
reorder_diagram: 'डायाग्राम स्वचालित मिलाउनुहोस्',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -316,7 +317,7 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'डायाग्राम खोल्नुहोस्',
|
||||
title: 'डाटाबेस खोल्नुहोस्',
|
||||
description:
|
||||
'तलको सूचीबाट खोल्नका लागि एक डायाग्राम चयन गर्नुहोस्।',
|
||||
table_columns: {
|
||||
@@ -327,6 +328,12 @@ export const ne: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
open: 'खोल्नुहोस्',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'खोल्नुहोस्',
|
||||
duplicate: 'डुप्लिकेट',
|
||||
delete: 'मेटाउनुहोस्',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const pt_BR: LanguageTranslation = {
|
||||
custom_types: 'Tipos Personalizados',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Bancos de Dados',
|
||||
new: 'Novo Diagrama',
|
||||
actions: {
|
||||
actions: 'Ações',
|
||||
new: 'Novo...',
|
||||
browse: 'Navegar...',
|
||||
save: 'Salvar',
|
||||
import: 'Importar Banco de Dados',
|
||||
export_sql: 'Exportar SQL',
|
||||
export_as: 'Exportar como',
|
||||
delete_diagram: 'Excluir Diagrama',
|
||||
delete_diagram: 'Excluir',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Editar',
|
||||
@@ -75,10 +75,10 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Reordenar Diagrama',
|
||||
title: 'Organizar Diagrama Automaticamente',
|
||||
description:
|
||||
'Esta ação reorganizará todas as tabelas no diagrama. Deseja continuar?',
|
||||
reorder: 'Reordenar',
|
||||
reorder: 'Organizar Automaticamente',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Nenhum valor de enum definido',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -271,7 +272,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
show_all: 'Mostrar Tudo',
|
||||
undo: 'Desfazer',
|
||||
redo: 'Refazer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
reorder_diagram: 'Organizar Diagrama Automaticamente',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -316,7 +317,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Abrir Diagrama',
|
||||
title: 'Abrir Banco de Dados',
|
||||
description: 'Selecione um diagrama para abrir da lista abaixo.',
|
||||
table_columns: {
|
||||
name: 'Nome',
|
||||
@@ -326,6 +327,12 @@ export const pt_BR: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Cancelar',
|
||||
open: 'Abrir',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Abrir',
|
||||
duplicate: 'Duplicar',
|
||||
delete: 'Excluir',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const ru: LanguageTranslation = {
|
||||
custom_types: 'Пользовательские типы',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Базы данных',
|
||||
new: 'Новая диаграмма',
|
||||
actions: {
|
||||
actions: 'Действия',
|
||||
new: 'Новая...',
|
||||
browse: 'Обзор...',
|
||||
save: 'Сохранить',
|
||||
import: 'Импортировать базу данных',
|
||||
export_sql: 'Экспорт SQL',
|
||||
export_as: 'Экспортировать как',
|
||||
delete_diagram: 'Удалить диаграмму',
|
||||
delete_diagram: 'Удалить',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Изменение',
|
||||
@@ -73,10 +73,10 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Переупорядочить диаграмму',
|
||||
title: 'Автоматическая расстановка диаграммы',
|
||||
description:
|
||||
'Это действие переставит все таблицы на диаграмме. Хотите продолжить?',
|
||||
reorder: 'Изменить порядок',
|
||||
reorder: 'Автоматическая расстановка',
|
||||
cancel: 'Отменить',
|
||||
},
|
||||
|
||||
@@ -246,6 +246,7 @@ export const ru: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Значения перечисления не определены',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -268,7 +269,7 @@ export const ru: LanguageTranslation = {
|
||||
show_all: 'Показать все',
|
||||
undo: 'Отменить',
|
||||
redo: 'Вернуть',
|
||||
reorder_diagram: 'Переупорядочить диаграмму',
|
||||
reorder_diagram: 'Автоматическая расстановка диаграммы',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Открыть диаграмму',
|
||||
title: 'Открыть базу данных',
|
||||
description:
|
||||
'Выберите диаграмму, которую нужно открыть, из списка ниже.',
|
||||
table_columns: {
|
||||
@@ -323,6 +324,12 @@ export const ru: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Отмена',
|
||||
open: 'Открыть',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Открыть',
|
||||
duplicate: 'Дублировать',
|
||||
delete: 'Удалить',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const te: LanguageTranslation = {
|
||||
custom_types: 'కస్టమ్ టైప్స్',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'డేటాబేస్లు',
|
||||
new: 'కొత్త డైగ్రాం',
|
||||
actions: {
|
||||
actions: 'చర్యలు',
|
||||
new: 'కొత్తది...',
|
||||
browse: 'బ్రాఉజ్ చేయండి...',
|
||||
save: 'సేవ్',
|
||||
import: 'డేటాబేస్ను దిగుమతి చేసుకోండి',
|
||||
export_sql: 'SQL ఎగుమతి',
|
||||
export_as: 'వగా ఎగుమతి చేయండి',
|
||||
delete_diagram: 'చిత్రాన్ని తొలగించండి',
|
||||
delete_diagram: 'తొలగించండి',
|
||||
},
|
||||
edit: {
|
||||
edit: 'సవరించు',
|
||||
@@ -75,10 +75,10 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
title: 'చిత్రాన్ని స్వయంచాలకంగా అమర్చండి',
|
||||
description:
|
||||
'ఈ చర్య చిత్రంలోని అన్ని పట్టికలను పునఃస్థాపిస్తుంది. మీరు కొనసాగించాలనుకుంటున్నారా?',
|
||||
reorder: 'పునఃసరిచేయండి',
|
||||
reorder: 'స్వయంచాలకంగా అమర్చండి',
|
||||
cancel: 'రద్దు',
|
||||
},
|
||||
|
||||
@@ -250,6 +250,7 @@ export const te: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'ఏ enum విలువలు నిర్వచించబడలేదు',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -272,7 +273,7 @@ export const te: LanguageTranslation = {
|
||||
show_all: 'అన్ని చూపించు',
|
||||
undo: 'తిరిగి చేయు',
|
||||
redo: 'మరలా చేయు',
|
||||
reorder_diagram: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
reorder_diagram: 'చిత్రాన్ని స్వయంచాలకంగా అమర్చండి',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -317,7 +318,7 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'చిత్రం తెరవండి',
|
||||
title: 'డేటాబేస్ తెరవండి',
|
||||
description: 'కింద ఉన్న జాబితా నుండి చిత్రాన్ని ఎంచుకోండి.',
|
||||
table_columns: {
|
||||
name: 'పేరు',
|
||||
@@ -327,6 +328,12 @@ export const te: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'రద్దు',
|
||||
open: 'తెరవు',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'తెరవు',
|
||||
duplicate: 'నకలు',
|
||||
delete: 'తొలగించు',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const tr: LanguageTranslation = {
|
||||
custom_types: 'Özel Tipler',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Veritabanları',
|
||||
new: 'Yeni Diyagram',
|
||||
actions: {
|
||||
actions: 'Eylemler',
|
||||
new: 'Yeni...',
|
||||
browse: 'Gözat...',
|
||||
save: 'Kaydet',
|
||||
import: 'Veritabanı İçe Aktar',
|
||||
export_sql: 'SQL Olarak Dışa Aktar',
|
||||
export_as: 'Olarak Dışa Aktar',
|
||||
delete_diagram: 'Diyagramı Sil',
|
||||
delete_diagram: 'Sil',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Düzenle',
|
||||
@@ -75,10 +75,10 @@ export const tr: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Diyagramı Yeniden Sırala',
|
||||
title: 'Diyagramı Otomatik Düzenle',
|
||||
description:
|
||||
'Bu işlem tüm tabloları yeniden düzenleyecektir. Devam etmek istiyor musunuz?',
|
||||
reorder: 'Yeniden Sırala',
|
||||
reorder: 'Otomatik Düzenle',
|
||||
cancel: 'İptal',
|
||||
},
|
||||
|
||||
@@ -249,6 +249,7 @@ export const tr: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Tanımlanmış enum değeri yok',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const tr: LanguageTranslation = {
|
||||
show_all: 'Hepsini Gör',
|
||||
undo: 'Geri Al',
|
||||
redo: 'Yinele',
|
||||
reorder_diagram: 'Diyagramı Yeniden Sırala',
|
||||
reorder_diagram: 'Diyagramı Otomatik Düzenle',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -312,7 +313,7 @@ export const tr: LanguageTranslation = {
|
||||
import: 'İçe Aktar',
|
||||
},
|
||||
open_diagram_dialog: {
|
||||
title: 'Diyagramı Aç',
|
||||
title: 'Veritabanı Aç',
|
||||
description: 'Aşağıdaki listeden açmak için bir diyagram seçin.',
|
||||
table_columns: {
|
||||
name: 'Ad',
|
||||
@@ -322,6 +323,12 @@ export const tr: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'İptal',
|
||||
open: 'Aç',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Aç',
|
||||
duplicate: 'Kopyala',
|
||||
delete: 'Sil',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const uk: LanguageTranslation = {
|
||||
custom_types: 'Користувацькі типи',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Бази даних',
|
||||
new: 'Нова діаграма',
|
||||
actions: {
|
||||
actions: 'Дії',
|
||||
new: 'Нова...',
|
||||
browse: 'Огляд...',
|
||||
save: 'Зберегти',
|
||||
import: 'Імпорт бази даних',
|
||||
export_sql: 'Експорт SQL',
|
||||
export_as: 'Експортувати як',
|
||||
delete_diagram: 'Видалити діаграму',
|
||||
delete_diagram: 'Видалити',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Редагувати',
|
||||
@@ -73,10 +73,10 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Перевпорядкувати діаграму',
|
||||
title: 'Автоматичне розміщення діаграми',
|
||||
description:
|
||||
'Ця дія перевпорядкує всі таблиці на діаграмі. Хочете продовжити?',
|
||||
reorder: 'Перевпорядкувати',
|
||||
reorder: 'Автоматичне розміщення',
|
||||
cancel: 'Скасувати',
|
||||
},
|
||||
|
||||
@@ -247,6 +247,7 @@ export const uk: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Значення переліку не визначені',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -269,7 +270,7 @@ export const uk: LanguageTranslation = {
|
||||
show_all: 'Показати все',
|
||||
undo: 'Скасувати',
|
||||
redo: 'Повторити',
|
||||
reorder_diagram: 'Перевпорядкувати діаграму',
|
||||
reorder_diagram: 'Автоматичне розміщення діаграми',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -313,7 +314,7 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Відкрити діаграму',
|
||||
title: 'Відкрити базу даних',
|
||||
description:
|
||||
'Виберіть діаграму, яку потрібно відкрити, зі списку нижче.',
|
||||
table_columns: {
|
||||
@@ -324,6 +325,12 @@ export const uk: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Скасувати',
|
||||
open: 'Відкрити',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Відкрити',
|
||||
duplicate: 'Дублювати',
|
||||
delete: 'Видалити',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const vi: LanguageTranslation = {
|
||||
custom_types: 'Kiểu tùy chỉnh',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: 'Cơ sở dữ liệu',
|
||||
new: 'Sơ đồ mới',
|
||||
actions: {
|
||||
actions: 'Hành động',
|
||||
new: 'Mới...',
|
||||
browse: 'Duyệt...',
|
||||
save: 'Lưu',
|
||||
import: 'Nhập cơ sở dữ liệu',
|
||||
export_sql: 'Xuất SQL',
|
||||
export_as: 'Xuất thành',
|
||||
delete_diagram: 'Xóa sơ đồ',
|
||||
delete_diagram: 'Xóa',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Sửa',
|
||||
@@ -74,10 +74,10 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Sắp xếp lại sơ đồ',
|
||||
title: 'Tự động sắp xếp sơ đồ',
|
||||
description:
|
||||
'Hành động này sẽ sắp xếp lại tất cả các bảng trong sơ đồ. Bạn có muốn tiếp tục không?',
|
||||
reorder: 'Sắp xếp',
|
||||
reorder: 'Tự động sắp xếp',
|
||||
cancel: 'Hủy',
|
||||
},
|
||||
|
||||
@@ -248,6 +248,7 @@ export const vi: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: 'Không có giá trị enum được định nghĩa',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -270,7 +271,7 @@ export const vi: LanguageTranslation = {
|
||||
show_all: 'Hiển thị tất cả',
|
||||
undo: 'Hoàn tác',
|
||||
redo: 'Làm lại',
|
||||
reorder_diagram: 'Sắp xếp lại sơ đồ',
|
||||
reorder_diagram: 'Tự động sắp xếp sơ đồ',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -314,7 +315,7 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Mở sơ đồ',
|
||||
title: 'Mở cơ sở dữ liệu',
|
||||
description: 'Chọn sơ đồ để mở từ danh sách bên dưới.',
|
||||
table_columns: {
|
||||
name: 'Tên',
|
||||
@@ -324,6 +325,12 @@ export const vi: LanguageTranslation = {
|
||||
},
|
||||
cancel: 'Hủy',
|
||||
open: 'Mở',
|
||||
|
||||
diagram_actions: {
|
||||
open: 'Mở',
|
||||
duplicate: 'Nhân bản',
|
||||
delete: 'Xóa',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const zh_CN: LanguageTranslation = {
|
||||
custom_types: '自定义类型',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '数据库',
|
||||
new: '新建关系图',
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新建...',
|
||||
browse: '浏览...',
|
||||
save: '保存',
|
||||
import: '导入数据库',
|
||||
export_sql: '导出 SQL 语句',
|
||||
export_as: '导出为',
|
||||
delete_diagram: '删除关系图',
|
||||
delete_diagram: '删除',
|
||||
},
|
||||
edit: {
|
||||
edit: '编辑',
|
||||
@@ -72,9 +72,9 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '重新排列关系图',
|
||||
title: '自动排列关系图',
|
||||
description: '此操作将重新排列关系图中的所有表。是否要继续?',
|
||||
reorder: '重新排列',
|
||||
reorder: '自动排列',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '没有定义枚举值',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -267,7 +268,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
show_all: '展示全部',
|
||||
undo: '撤销',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列关系图',
|
||||
reorder_diagram: '自动排列关系图',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -311,7 +312,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '打开关系图',
|
||||
title: '打开数据库',
|
||||
description: '从下面的列表中选择一个图表打开。',
|
||||
table_columns: {
|
||||
name: '名称',
|
||||
@@ -321,6 +322,12 @@ export const zh_CN: LanguageTranslation = {
|
||||
},
|
||||
cancel: '取消',
|
||||
open: '打开',
|
||||
|
||||
diagram_actions: {
|
||||
open: '打开',
|
||||
duplicate: '复制',
|
||||
delete: '删除',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -12,15 +12,15 @@ export const zh_TW: LanguageTranslation = {
|
||||
custom_types: '自定義類型',
|
||||
},
|
||||
menu: {
|
||||
databases: {
|
||||
databases: '資料庫',
|
||||
new: '新增圖表',
|
||||
actions: {
|
||||
actions: '操作',
|
||||
new: '新增...',
|
||||
browse: '瀏覽...',
|
||||
save: '儲存',
|
||||
import: '匯入資料庫',
|
||||
export_sql: '匯出 SQL',
|
||||
export_as: '匯出為特定格式',
|
||||
delete_diagram: '刪除圖表',
|
||||
delete_diagram: '刪除',
|
||||
},
|
||||
edit: {
|
||||
edit: '編輯',
|
||||
@@ -72,9 +72,9 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: '重新排列圖表',
|
||||
title: '自動排列圖表',
|
||||
description: '此操作將重新排列圖表中的所有表格。是否繼續?',
|
||||
reorder: '重新排列',
|
||||
reorder: '自動排列',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
@@ -245,6 +245,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
enum_values: 'Enum Values',
|
||||
composite_fields: 'Fields',
|
||||
no_fields: 'No fields defined',
|
||||
no_values: '沒有定義列舉值',
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
@@ -267,7 +268,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
show_all: '顯示全部',
|
||||
undo: '復原',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列圖表',
|
||||
reorder_diagram: '自動排列圖表',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
@@ -310,7 +311,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: '開啟圖表',
|
||||
title: '開啟資料庫',
|
||||
description: '請從以下列表中選擇一個圖表。',
|
||||
table_columns: {
|
||||
name: '名稱',
|
||||
@@ -320,6 +321,12 @@ export const zh_TW: LanguageTranslation = {
|
||||
},
|
||||
cancel: '取消',
|
||||
open: '開啟',
|
||||
|
||||
diagram_actions: {
|
||||
open: '開啟',
|
||||
duplicate: '複製',
|
||||
delete: '刪除',
|
||||
},
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
|
||||
@@ -18,4 +18,7 @@
|
||||
|
||||
.marker-definitions {
|
||||
}
|
||||
|
||||
.nodrag {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,21 +165,3 @@ export const supportsAutoIncrementDataType = (
|
||||
'decimal',
|
||||
].includes(dataTypeName.toLocaleLowerCase());
|
||||
};
|
||||
|
||||
export const supportsArrayDataType = (dataTypeName: string): boolean => {
|
||||
// Types that do NOT support arrays in PostgreSQL
|
||||
const unsupportedTypes = [
|
||||
'serial',
|
||||
'bigserial',
|
||||
'smallserial',
|
||||
'serial2',
|
||||
'serial4',
|
||||
'serial8',
|
||||
'xml',
|
||||
'money',
|
||||
];
|
||||
|
||||
// Check if the type is in the unsupported list
|
||||
const normalizedType = dataTypeName.toLowerCase();
|
||||
return !unsupportedTypes.includes(normalizedType);
|
||||
};
|
||||
|
||||
@@ -12,6 +12,7 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
{ name: 'timestamptz', id: 'timestamptz', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
@@ -42,6 +43,7 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
id: 'timestamp_with_time_zone',
|
||||
usageLevel: 2,
|
||||
},
|
||||
{ name: 'int', id: 'int', usageLevel: 2 },
|
||||
|
||||
// Less common types
|
||||
{
|
||||
|
||||
21
src/lib/data/import-metadata/import/custom-types.ts
Normal file
21
src/lib/data/import-metadata/import/custom-types.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { DBCustomType, DBCustomTypeKind } from '@/lib/domain';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { DBCustomTypeInfo } from '../metadata-types/custom-type-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createCustomTypesFromMetadata = ({
|
||||
customTypes,
|
||||
}: {
|
||||
customTypes: DBCustomTypeInfo[];
|
||||
}): DBCustomType[] => {
|
||||
return customTypes.map((customType) => {
|
||||
return {
|
||||
id: generateId(),
|
||||
schema: schemaNameToDomainSchemaName(customType.schema),
|
||||
name: customType.type,
|
||||
kind: customType.kind as DBCustomTypeKind,
|
||||
values: customType.values,
|
||||
fields: customType.fields,
|
||||
};
|
||||
});
|
||||
};
|
||||
351
src/lib/data/import-metadata/import/dependencies.ts
Normal file
351
src/lib/data/import-metadata/import/dependencies.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type { AST } from 'node-sql-parser';
|
||||
import type { DBDependency, DBTable } from '@/lib/domain';
|
||||
import { DatabaseType, schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { ViewInfo } from '../metadata-types/view-info';
|
||||
import { decodeViewDefinition } from './tables';
|
||||
|
||||
const astDatabaseTypes: Record<DatabaseType, string> = {
|
||||
[DatabaseType.POSTGRESQL]: 'postgresql',
|
||||
[DatabaseType.MYSQL]: 'postgresql',
|
||||
[DatabaseType.MARIADB]: 'postgresql',
|
||||
[DatabaseType.GENERIC]: 'postgresql',
|
||||
[DatabaseType.SQLITE]: 'postgresql',
|
||||
[DatabaseType.SQL_SERVER]: 'postgresql',
|
||||
[DatabaseType.CLICKHOUSE]: 'postgresql',
|
||||
[DatabaseType.COCKROACHDB]: 'postgresql',
|
||||
[DatabaseType.ORACLE]: 'postgresql',
|
||||
};
|
||||
|
||||
export const createDependenciesFromMetadata = async ({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
}: {
|
||||
views: ViewInfo[];
|
||||
tables: DBTable[];
|
||||
databaseType: DatabaseType;
|
||||
}): Promise<DBDependency[]> => {
|
||||
if (!views || views.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
|
||||
const dependencies = views
|
||||
.flatMap((view) => {
|
||||
const viewSchema = schemaNameToDomainSchemaName(view.schema);
|
||||
const viewTable = tables.find(
|
||||
(table) =>
|
||||
table.name === view.view_name && viewSchema === table.schema
|
||||
);
|
||||
|
||||
if (!viewTable) {
|
||||
console.warn(
|
||||
`Source table for view ${view.view_name} not found (schema: ${viewSchema})`
|
||||
);
|
||||
return []; // Skip this view and proceed to the next
|
||||
}
|
||||
|
||||
if (view.view_definition) {
|
||||
try {
|
||||
const decodedViewDefinition = decodeViewDefinition(
|
||||
databaseType,
|
||||
view.view_definition
|
||||
);
|
||||
|
||||
let modifiedViewDefinition = '';
|
||||
if (
|
||||
databaseType === DatabaseType.MYSQL ||
|
||||
databaseType === DatabaseType.MARIADB
|
||||
) {
|
||||
modifiedViewDefinition = preprocessViewDefinitionMySQL(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
modifiedViewDefinition =
|
||||
preprocessViewDefinitionSQLServer(
|
||||
decodedViewDefinition
|
||||
);
|
||||
} else {
|
||||
modifiedViewDefinition = preprocessViewDefinition(
|
||||
decodedViewDefinition
|
||||
);
|
||||
}
|
||||
|
||||
// Parse using the appropriate dialect
|
||||
const ast = parser.astify(modifiedViewDefinition, {
|
||||
database: astDatabaseTypes[databaseType],
|
||||
type: 'select', // Parsing a SELECT statement
|
||||
});
|
||||
|
||||
let relatedTables = extractTablesFromAST(ast);
|
||||
|
||||
// Filter out duplicate tables without schema
|
||||
relatedTables = filterDuplicateTables(relatedTables);
|
||||
|
||||
return relatedTables.map((relTable) => {
|
||||
const relSchema = relTable.schema || view.schema; // Use view's schema if relSchema is undefined
|
||||
const relTableName = relTable.tableName;
|
||||
|
||||
const table = tables.find(
|
||||
(table) =>
|
||||
table.name === relTableName &&
|
||||
(table.schema || '') === relSchema
|
||||
);
|
||||
|
||||
if (table) {
|
||||
const dependency: DBDependency = {
|
||||
id: generateId(),
|
||||
schema: view.schema,
|
||||
tableId: table.id, // related table
|
||||
dependentSchema: table.schema,
|
||||
dependentTableId: viewTable.id, // dependent view
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
return dependency;
|
||||
} else {
|
||||
console.warn(
|
||||
`Dependent table ${relSchema}.${relTableName} not found for view ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error parsing view ${view.schema}.${view.view_name}:`,
|
||||
error
|
||||
);
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
console.warn(
|
||||
`View definition missing for ${view.schema}.${view.view_name}`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
})
|
||||
.filter((dependency) => dependency !== null);
|
||||
|
||||
return dependencies;
|
||||
};
|
||||
|
||||
// Add this new function to filter out duplicate tables
|
||||
function filterDuplicateTables(
|
||||
tables: { schema?: string; tableName: string }[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tableMap = new Map<string, { schema?: string; tableName: string }>();
|
||||
|
||||
for (const table of tables) {
|
||||
const key = table.tableName;
|
||||
const existingTable = tableMap.get(key);
|
||||
|
||||
if (!existingTable || (table.schema && !existingTable.schema)) {
|
||||
tableMap.set(key, table);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(tableMap.values());
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinition(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove leading and trailing whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Replace escaped double quotes with regular ones
|
||||
viewDefinition = viewDefinition.replace(/\\"/g, '"');
|
||||
|
||||
// Replace 'CREATE MATERIALIZED VIEW' with 'CREATE VIEW'
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/CREATE\s+MATERIALIZED\s+VIEW/i,
|
||||
'CREATE VIEW'
|
||||
);
|
||||
|
||||
// Regular expression to match 'CREATE VIEW [schema.]view_name [ (column definitions) ] AS'
|
||||
// This regex captures the view name and skips any content between the view name and 'AS'
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:(?:`[^`]+`|"[^"]+"|\w+)\.)?(?:`([^`]+)`|"([^"]+)"|(\w+))[\s\S]*?\bAS\b\s+/i;
|
||||
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[1] || match[2] || match[3];
|
||||
// Extract the SQL after the 'AS' keyword
|
||||
const restOfDefinition = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Replace double-quoted identifiers with unquoted ones
|
||||
let modifiedSQL = restOfDefinition.replace(/"(\w+)"/g, '$1');
|
||||
|
||||
// Replace '::' type casts with 'CAST' expressions
|
||||
modifiedSQL = modifiedSQL.replace(
|
||||
/\(([^()]+)\)::(\w+)/g,
|
||||
'CAST($1 AS $2)'
|
||||
);
|
||||
|
||||
// Remove ClickHouse-specific syntax that may still be present
|
||||
// For example, remove SETTINGS clauses inside the SELECT statement
|
||||
modifiedSQL = modifiedSQL.replace(/\bSETTINGS\b[\s\S]*$/i, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW ${viewName} AS ${modifiedSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition for SQL Server
|
||||
function preprocessViewDefinitionSQLServer(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove BOM if present
|
||||
viewDefinition = viewDefinition.replace(/^\uFEFF/, '');
|
||||
|
||||
// Normalize whitespace
|
||||
viewDefinition = viewDefinition.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Remove square brackets and replace with double quotes
|
||||
viewDefinition = viewDefinition.replace(/\[([^\]]+)\]/g, '"$1"');
|
||||
|
||||
// Remove database names from fully qualified identifiers
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/"([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"\."([a-zA-Z0-9_]+)"/g,
|
||||
'"$2"."$3"'
|
||||
);
|
||||
|
||||
// Replace SQL Server functions with PostgreSQL equivalents
|
||||
viewDefinition = viewDefinition.replace(/\bGETDATE\(\)/gi, 'NOW()');
|
||||
viewDefinition = viewDefinition.replace(/\bISNULL\(/gi, 'COALESCE(');
|
||||
|
||||
// Replace 'TOP N' with 'LIMIT N' at the end of the query
|
||||
const topMatch = viewDefinition.match(/SELECT\s+TOP\s+(\d+)/i);
|
||||
if (topMatch) {
|
||||
const topN = topMatch[1];
|
||||
viewDefinition = viewDefinition.replace(
|
||||
/SELECT\s+TOP\s+\d+/i,
|
||||
'SELECT'
|
||||
);
|
||||
viewDefinition = viewDefinition.replace(/;+\s*$/, ''); // Remove semicolons at the end
|
||||
viewDefinition += ` LIMIT ${topN}`;
|
||||
}
|
||||
|
||||
viewDefinition = viewDefinition.replace(/\n/g, ''); // Remove newlines
|
||||
|
||||
// Adjust CREATE VIEW syntax
|
||||
const regex =
|
||||
/CREATE\s+VIEW\s+(?:"?([^".\s]+)"?\.)?"?([^".\s]+)"?\s+AS\s+/i;
|
||||
const match = viewDefinition.match(regex);
|
||||
let modifiedDefinition: string;
|
||||
|
||||
if (match) {
|
||||
const viewName = match[2];
|
||||
const modifiedSQL = viewDefinition.substring(
|
||||
match.index! + match[0].length
|
||||
);
|
||||
|
||||
// Remove semicolons at the end
|
||||
const finalSQL = modifiedSQL.replace(/;+\s*$/, '');
|
||||
|
||||
modifiedDefinition = `CREATE VIEW "${viewName}" AS ${finalSQL}`;
|
||||
} else {
|
||||
console.warn('Could not preprocess view definition:', viewDefinition);
|
||||
modifiedDefinition = viewDefinition;
|
||||
}
|
||||
|
||||
return modifiedDefinition;
|
||||
}
|
||||
|
||||
// Preprocess the view_definition to remove schema from CREATE VIEW
|
||||
function preprocessViewDefinitionMySQL(viewDefinition: string): string {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove any trailing semicolons
|
||||
viewDefinition = viewDefinition.replace(/;\s*$/, '');
|
||||
|
||||
// Remove backticks from identifiers
|
||||
viewDefinition = viewDefinition.replace(/`/g, '');
|
||||
|
||||
// Remove unnecessary parentheses around joins and ON clauses
|
||||
viewDefinition = removeRedundantParentheses(viewDefinition);
|
||||
|
||||
return viewDefinition;
|
||||
}
|
||||
|
||||
function removeRedundantParentheses(sql: string): string {
|
||||
// Regular expressions to match unnecessary parentheses
|
||||
const patterns = [
|
||||
/\(\s*(JOIN\s+[^()]+?)\s*\)/gi,
|
||||
/\(\s*(ON\s+[^()]+?)\s*\)/gi,
|
||||
// Additional patterns if necessary
|
||||
];
|
||||
|
||||
let prevSql;
|
||||
do {
|
||||
prevSql = sql;
|
||||
patterns.forEach((pattern) => {
|
||||
sql = sql.replace(pattern, '$1');
|
||||
});
|
||||
} while (sql !== prevSql);
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
function extractTablesFromAST(
|
||||
ast: AST | AST[]
|
||||
): { schema?: string; tableName: string }[] {
|
||||
const tablesMap = new Map<string, { schema: string; tableName: string }>();
|
||||
const visitedNodes = new Set();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function traverse(node: any) {
|
||||
if (!node || visitedNodes.has(node)) return;
|
||||
visitedNodes.add(node);
|
||||
|
||||
if (Array.isArray(node)) {
|
||||
node.forEach(traverse);
|
||||
} else if (typeof node === 'object') {
|
||||
// Check if node represents a table
|
||||
if (
|
||||
Object.hasOwnProperty.call(node, 'table') &&
|
||||
typeof node.table === 'string'
|
||||
) {
|
||||
let schema = node.db || node.schema;
|
||||
const tableName = node.table;
|
||||
if (tableName) {
|
||||
// Assign default schema if undefined
|
||||
schema = schemaNameToDomainSchemaName(schema) || '';
|
||||
const key = `${schema}.${tableName}`;
|
||||
if (!tablesMap.has(key)) {
|
||||
tablesMap.set(key, { schema, tableName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively traverse all properties
|
||||
for (const key in node) {
|
||||
if (Object.hasOwnProperty.call(node, key)) {
|
||||
traverse(node[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
traverse(ast);
|
||||
|
||||
return Array.from(tablesMap.values());
|
||||
}
|
||||
67
src/lib/data/import-metadata/import/fields.ts
Normal file
67
src/lib/data/import-metadata/import/fields.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import type { DBField } from '@/lib/domain';
|
||||
import type { ColumnInfo } from '../metadata-types/column-info';
|
||||
import type { AggregatedIndexInfo } from '../metadata-types/index-info';
|
||||
import type { PrimaryKeyInfo } from '../metadata-types/primary-key-info';
|
||||
import type { TableInfo } from '../metadata-types/table-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createFieldsFromMetadata = ({
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
aggregatedIndexes,
|
||||
}: {
|
||||
tableColumns: ColumnInfo[];
|
||||
tableSchema?: string;
|
||||
tableInfo: TableInfo;
|
||||
tablePrimaryKeys: PrimaryKeyInfo[];
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
}) => {
|
||||
const uniqueColumns = tableColumns.reduce((acc, col) => {
|
||||
if (!acc.has(col.name)) {
|
||||
acc.set(col.name, col);
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, ColumnInfo>());
|
||||
|
||||
const sortedColumns = Array.from(uniqueColumns.values()).sort(
|
||||
(a, b) => a.ordinal_position - b.ordinal_position
|
||||
);
|
||||
|
||||
const tablePrimaryKeysColumns = tablePrimaryKeys.map((pk) =>
|
||||
pk.column.trim()
|
||||
);
|
||||
|
||||
return sortedColumns.map(
|
||||
(col: ColumnInfo): DBField => ({
|
||||
id: generateId(),
|
||||
name: col.name,
|
||||
type: {
|
||||
id: col.type.split(' ').join('_').toLowerCase(),
|
||||
name: col.type.toLowerCase(),
|
||||
},
|
||||
primaryKey: tablePrimaryKeysColumns.includes(col.name),
|
||||
unique: Object.values(aggregatedIndexes).some(
|
||||
(idx) =>
|
||||
idx.unique &&
|
||||
idx.columns.length === 1 &&
|
||||
idx.columns[0].name === col.name
|
||||
),
|
||||
nullable: Boolean(col.nullable),
|
||||
...(col.character_maximum_length &&
|
||||
col.character_maximum_length !== 'null'
|
||||
? { characterMaximumLength: col.character_maximum_length }
|
||||
: {}),
|
||||
...(col.precision?.precision
|
||||
? { precision: col.precision.precision }
|
||||
: {}),
|
||||
...(col.precision?.scale ? { scale: col.precision.scale } : {}),
|
||||
...(col.default ? { default: col.default } : {}),
|
||||
...(col.collation ? { collation: col.collation } : {}),
|
||||
...(col.is_identity !== undefined
|
||||
? { increment: col.is_identity }
|
||||
: {}),
|
||||
createdAt: Date.now(),
|
||||
comments: col.comment ? col.comment : undefined,
|
||||
})
|
||||
);
|
||||
};
|
||||
82
src/lib/data/import-metadata/import/index.ts
Normal file
82
src/lib/data/import-metadata/import/index.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import type { DatabaseEdition, Diagram } from '@/lib/domain';
|
||||
import { adjustTablePositions, DatabaseType } from '@/lib/domain';
|
||||
import { generateDiagramId } from '@/lib/utils';
|
||||
import type { DatabaseMetadata } from '../metadata-types/database-metadata';
|
||||
import { createCustomTypesFromMetadata } from './custom-types';
|
||||
import { createRelationshipsFromMetadata } from './relationships';
|
||||
import { createTablesFromMetadata } from './tables';
|
||||
import { createDependenciesFromMetadata } from './dependencies';
|
||||
|
||||
export const loadFromDatabaseMetadata = async ({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition,
|
||||
}: {
|
||||
databaseType: DatabaseType;
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
diagramNumber?: number;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}): Promise<Diagram> => {
|
||||
const {
|
||||
fk_info: foreignKeys,
|
||||
views: views,
|
||||
custom_types: customTypes,
|
||||
} = databaseMetadata;
|
||||
|
||||
const tables = createTablesFromMetadata({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const relationships = createRelationshipsFromMetadata({
|
||||
foreignKeys,
|
||||
tables,
|
||||
});
|
||||
|
||||
const dependencies = await createDependenciesFromMetadata({
|
||||
views,
|
||||
tables,
|
||||
databaseType,
|
||||
});
|
||||
|
||||
const dbCustomTypes = customTypes
|
||||
? createCustomTypesFromMetadata({
|
||||
customTypes,
|
||||
})
|
||||
: [];
|
||||
|
||||
const adjustedTables = adjustTablePositions({
|
||||
tables,
|
||||
relationships,
|
||||
mode: 'perSchema',
|
||||
});
|
||||
|
||||
const sortedTables = adjustedTables.sort((a, b) => {
|
||||
if (a.isView === b.isView) {
|
||||
// Both are either tables or views, so sort alphabetically by name
|
||||
return a.name.localeCompare(b.name);
|
||||
}
|
||||
// If one is a view and the other is not, put tables first
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
const diagram: Diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: databaseMetadata.database_name
|
||||
? `${databaseMetadata.database_name}-db`
|
||||
: diagramNumber
|
||||
? `Diagram ${diagramNumber}`
|
||||
: 'New Diagram',
|
||||
databaseType: databaseType ?? DatabaseType.GENERIC,
|
||||
databaseEdition,
|
||||
tables: sortedTables,
|
||||
relationships,
|
||||
dependencies,
|
||||
customTypes: dbCustomTypes,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
return diagram;
|
||||
};
|
||||
24
src/lib/data/import-metadata/import/indexes.ts
Normal file
24
src/lib/data/import-metadata/import/indexes.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { DBField, DBIndex, IndexType } from '@/lib/domain';
|
||||
import type { AggregatedIndexInfo } from '../metadata-types/index-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export const createIndexesFromMetadata = ({
|
||||
aggregatedIndexes,
|
||||
fields,
|
||||
}: {
|
||||
aggregatedIndexes: AggregatedIndexInfo[];
|
||||
fields: DBField[];
|
||||
}): DBIndex[] =>
|
||||
aggregatedIndexes.map(
|
||||
(idx): DBIndex => ({
|
||||
id: generateId(),
|
||||
name: idx.name,
|
||||
unique: Boolean(idx.unique),
|
||||
fieldIds: idx.columns
|
||||
.sort((a, b) => a.position - b.position)
|
||||
.map((c) => fields.find((f) => f.name === c.name)?.id)
|
||||
.filter((id): id is string => id !== undefined),
|
||||
createdAt: Date.now(),
|
||||
type: idx.index_type?.toLowerCase() as IndexType,
|
||||
})
|
||||
);
|
||||
85
src/lib/data/import-metadata/import/relationships.ts
Normal file
85
src/lib/data/import-metadata/import/relationships.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type {
|
||||
Cardinality,
|
||||
DBField,
|
||||
DBRelationship,
|
||||
DBTable,
|
||||
} from '@/lib/domain';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain';
|
||||
import type { ForeignKeyInfo } from '../metadata-types/foreign-key-info';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
const determineCardinality = (
|
||||
field: DBField,
|
||||
isTablePKComplex: boolean
|
||||
): Cardinality => {
|
||||
return field.unique || (field.primaryKey && !isTablePKComplex)
|
||||
? 'one'
|
||||
: 'many';
|
||||
};
|
||||
|
||||
export const createRelationshipsFromMetadata = ({
|
||||
foreignKeys,
|
||||
tables,
|
||||
}: {
|
||||
foreignKeys: ForeignKeyInfo[];
|
||||
tables: DBTable[];
|
||||
}): DBRelationship[] => {
|
||||
return foreignKeys
|
||||
.map((fk: ForeignKeyInfo): DBRelationship | null => {
|
||||
const schema = schemaNameToDomainSchemaName(fk.schema);
|
||||
const sourceTable = tables.find(
|
||||
(table) => table.name === fk.table && table.schema === schema
|
||||
);
|
||||
|
||||
const targetSchema = schemaNameToDomainSchemaName(
|
||||
fk.reference_schema
|
||||
);
|
||||
|
||||
const targetTable = tables.find(
|
||||
(table) =>
|
||||
table.name === fk.reference_table &&
|
||||
table.schema === targetSchema
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(field) => field.name === fk.column
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(field) => field.name === fk.reference_column
|
||||
);
|
||||
|
||||
const isSourceTablePKComplex =
|
||||
(sourceTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
const isTargetTablePKComplex =
|
||||
(targetTable?.fields.filter((field) => field.primaryKey) ?? [])
|
||||
.length > 1;
|
||||
|
||||
if (sourceTable && targetTable && sourceField && targetField) {
|
||||
const sourceCardinality = determineCardinality(
|
||||
sourceField,
|
||||
isSourceTablePKComplex
|
||||
);
|
||||
const targetCardinality = determineCardinality(
|
||||
targetField,
|
||||
isTargetTablePKComplex
|
||||
);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: fk.foreign_key_name,
|
||||
sourceSchema: schema,
|
||||
targetSchema: targetSchema,
|
||||
sourceTableId: sourceTable.id,
|
||||
targetTableId: targetTable.id,
|
||||
sourceFieldId: sourceField.id,
|
||||
targetFieldId: targetField.id,
|
||||
sourceCardinality,
|
||||
targetCardinality,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((rel) => rel !== null) as DBRelationship[];
|
||||
};
|
||||
228
src/lib/data/import-metadata/import/tables.ts
Normal file
228
src/lib/data/import-metadata/import/tables.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import type { DBIndex, DBTable } from '@/lib/domain';
|
||||
import {
|
||||
DatabaseType,
|
||||
generateTableKey,
|
||||
schemaNameToDomainSchemaName,
|
||||
} from '@/lib/domain';
|
||||
import type { DatabaseMetadata } from '../metadata-types/database-metadata';
|
||||
import type { TableInfo } from '../metadata-types/table-info';
|
||||
import { createAggregatedIndexes } from '../metadata-types/index-info';
|
||||
import {
|
||||
decodeBase64ToUtf16LE,
|
||||
decodeBase64ToUtf8,
|
||||
generateId,
|
||||
} from '@/lib/utils';
|
||||
import {
|
||||
defaultTableColor,
|
||||
materializedViewColor,
|
||||
viewColor,
|
||||
} from '@/lib/colors';
|
||||
import { createFieldsFromMetadata } from './fields';
|
||||
import { createIndexesFromMetadata } from './indexes';
|
||||
|
||||
export const decodeViewDefinition = (
|
||||
databaseType: DatabaseType,
|
||||
viewDefinition?: string
|
||||
): string => {
|
||||
if (!viewDefinition) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let decodedViewDefinition: string;
|
||||
if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
decodedViewDefinition = decodeBase64ToUtf16LE(viewDefinition);
|
||||
} else {
|
||||
decodedViewDefinition = decodeBase64ToUtf8(viewDefinition);
|
||||
}
|
||||
|
||||
return decodedViewDefinition;
|
||||
};
|
||||
|
||||
export const createTablesFromMetadata = ({
|
||||
databaseMetadata,
|
||||
databaseType,
|
||||
}: {
|
||||
databaseMetadata: DatabaseMetadata;
|
||||
databaseType: DatabaseType;
|
||||
}): DBTable[] => {
|
||||
const {
|
||||
tables: tableInfos,
|
||||
pk_info: primaryKeys,
|
||||
columns,
|
||||
indexes,
|
||||
views: views,
|
||||
} = databaseMetadata;
|
||||
|
||||
// Pre-compute view names for faster lookup if there are views
|
||||
const viewNamesSet = new Set<string>();
|
||||
const materializedViewNamesSet = new Set<string>();
|
||||
|
||||
if (views && views.length > 0) {
|
||||
views.forEach((view) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: view.schema,
|
||||
tableName: view.view_name,
|
||||
});
|
||||
viewNamesSet.add(key);
|
||||
|
||||
if (
|
||||
view.view_definition &&
|
||||
decodeViewDefinition(databaseType, view.view_definition)
|
||||
.toLowerCase()
|
||||
.includes('materialized')
|
||||
) {
|
||||
materializedViewNamesSet.add(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Pre-compute lookup maps for better performance
|
||||
const columnsByTable = new Map<string, (typeof columns)[0][]>();
|
||||
const indexesByTable = new Map<string, (typeof indexes)[0][]>();
|
||||
const primaryKeysByTable = new Map<string, (typeof primaryKeys)[0][]>();
|
||||
|
||||
// Group columns by table
|
||||
columns.forEach((col) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: col.schema,
|
||||
tableName: col.table,
|
||||
});
|
||||
if (!columnsByTable.has(key)) {
|
||||
columnsByTable.set(key, []);
|
||||
}
|
||||
columnsByTable.get(key)!.push(col);
|
||||
});
|
||||
|
||||
// Group indexes by table
|
||||
indexes.forEach((idx) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: idx.schema,
|
||||
tableName: idx.table,
|
||||
});
|
||||
if (!indexesByTable.has(key)) {
|
||||
indexesByTable.set(key, []);
|
||||
}
|
||||
indexesByTable.get(key)!.push(idx);
|
||||
});
|
||||
|
||||
// Group primary keys by table
|
||||
primaryKeys.forEach((pk) => {
|
||||
const key = generateTableKey({
|
||||
schemaName: pk.schema,
|
||||
tableName: pk.table,
|
||||
});
|
||||
if (!primaryKeysByTable.has(key)) {
|
||||
primaryKeysByTable.set(key, []);
|
||||
}
|
||||
primaryKeysByTable.get(key)!.push(pk);
|
||||
});
|
||||
|
||||
const result = tableInfos.map((tableInfo: TableInfo) => {
|
||||
const tableSchema = schemaNameToDomainSchemaName(tableInfo.schema);
|
||||
const tableKey = generateTableKey({
|
||||
schemaName: tableInfo.schema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
|
||||
// Use pre-computed lookups instead of filtering entire arrays
|
||||
const tableIndexes = indexesByTable.get(tableKey) || [];
|
||||
const tablePrimaryKeys = primaryKeysByTable.get(tableKey) || [];
|
||||
const tableColumns = columnsByTable.get(tableKey) || [];
|
||||
|
||||
// Aggregate indexes with multiple columns
|
||||
const aggregatedIndexes = createAggregatedIndexes({
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
tableIndexes,
|
||||
});
|
||||
|
||||
const fields = createFieldsFromMetadata({
|
||||
aggregatedIndexes,
|
||||
tableColumns,
|
||||
tablePrimaryKeys,
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
// Check for composite primary key and find matching index name
|
||||
const primaryKeyFields = fields.filter((f) => f.primaryKey);
|
||||
let pkMatchingIndexName: string | undefined;
|
||||
let pkIndex: DBIndex | undefined;
|
||||
|
||||
if (primaryKeyFields.length >= 1) {
|
||||
// We have a composite primary key, look for an index that matches all PK columns
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).sort();
|
||||
|
||||
// Find an index that matches the primary key columns exactly
|
||||
const matchingIndex = aggregatedIndexes.find((index) => {
|
||||
const indexColumnNames = index.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
return (
|
||||
indexColumnNames.length === pkFieldNames.length &&
|
||||
indexColumnNames.every((col, i) => col === pkFieldNames[i])
|
||||
);
|
||||
});
|
||||
|
||||
if (matchingIndex) {
|
||||
pkMatchingIndexName = matchingIndex.name;
|
||||
// Create a special PK index
|
||||
pkIndex = {
|
||||
id: generateId(),
|
||||
name: matchingIndex.name,
|
||||
unique: true,
|
||||
fieldIds: primaryKeyFields.map((f) => f.id),
|
||||
createdAt: Date.now(),
|
||||
isPrimaryKey: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the index that matches the composite PK (to avoid duplication)
|
||||
const filteredAggregatedIndexes = pkMatchingIndexName
|
||||
? aggregatedIndexes.filter(
|
||||
(idx) => idx.name !== pkMatchingIndexName
|
||||
)
|
||||
: aggregatedIndexes;
|
||||
|
||||
const dbIndexes = createIndexesFromMetadata({
|
||||
aggregatedIndexes: filteredAggregatedIndexes,
|
||||
fields,
|
||||
});
|
||||
|
||||
// Add the PK index if it exists
|
||||
if (pkIndex) {
|
||||
dbIndexes.push(pkIndex);
|
||||
}
|
||||
|
||||
// Determine if the current table is a view by checking against pre-computed sets
|
||||
const viewKey = generateTableKey({
|
||||
schemaName: tableSchema,
|
||||
tableName: tableInfo.table,
|
||||
});
|
||||
const isView = viewNamesSet.has(viewKey);
|
||||
const isMaterializedView = materializedViewNamesSet.has(viewKey);
|
||||
|
||||
// Initial random positions; these will be adjusted later
|
||||
return {
|
||||
id: generateId(),
|
||||
name: tableInfo.table,
|
||||
schema: tableSchema,
|
||||
x: Math.random() * 1000, // Placeholder X
|
||||
y: Math.random() * 800, // Placeholder Y
|
||||
fields,
|
||||
indexes: dbIndexes,
|
||||
color: isMaterializedView
|
||||
? materializedViewColor
|
||||
: isView
|
||||
? viewColor
|
||||
: defaultTableColor,
|
||||
isView: isView,
|
||||
isMaterializedView: isMaterializedView,
|
||||
createdAt: Date.now(),
|
||||
comments: tableInfo.comment ? tableInfo.comment : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
@@ -15,6 +15,7 @@ export interface ColumnInfo {
|
||||
default?: string | null; // Default value for the column, nullable
|
||||
collation?: string | null;
|
||||
comment?: string | null;
|
||||
is_identity?: boolean; // Indicates if the column is auto-increment/identity
|
||||
}
|
||||
|
||||
export const ColumnInfoSchema: z.ZodType<ColumnInfo> = z.object({
|
||||
@@ -35,4 +36,5 @@ export const ColumnInfoSchema: z.ZodType<ColumnInfo> = z.object({
|
||||
default: z.string().nullable().optional(),
|
||||
collation: z.string().nullable().optional(),
|
||||
comment: z.string().nullable().optional(),
|
||||
is_identity: z.boolean().optional(),
|
||||
});
|
||||
|
||||
@@ -127,7 +127,13 @@ cols AS (
|
||||
',"default":"', null,
|
||||
'","collation":"', COALESCE(cols.COLLATION_NAME::TEXT, ''),
|
||||
'","comment":"', COALESCE(replace(replace(dsc.description::TEXT, '"', '\\"'), '\\x', '\\\\x'), ''),
|
||||
'"}')), ',') AS cols_metadata
|
||||
'","is_identity":', CASE
|
||||
WHEN cols.is_identity = 'YES' THEN 'true'
|
||||
WHEN cols.column_default IS NOT NULL AND cols.column_default LIKE 'nextval(%' THEN 'true'
|
||||
WHEN cols.column_default LIKE 'unique_rowid()%' THEN 'true'
|
||||
ELSE 'false'
|
||||
END,
|
||||
'}')), ',') AS cols_metadata
|
||||
FROM information_schema.columns cols
|
||||
LEFT JOIN pg_catalog.pg_class c
|
||||
ON c.relname = cols.table_name
|
||||
|
||||
@@ -69,7 +69,9 @@ SELECT CAST(CONCAT(
|
||||
',"ordinal_position":', cols.ordinal_position,
|
||||
',"nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"default":"', ${withExtras ? withDefault : withoutDefault},
|
||||
'","collation":"', IFNULL(cols.collation_name, ''), '"}')
|
||||
'","collation":"', IFNULL(cols.collation_name, ''),
|
||||
'","is_identity":', IF(cols.extra LIKE '%auto_increment%', 'true', 'false'),
|
||||
'"}')
|
||||
) FROM (
|
||||
SELECT cols.table_schema,
|
||||
cols.table_name,
|
||||
@@ -81,7 +83,8 @@ SELECT CAST(CONCAT(
|
||||
cols.ordinal_position,
|
||||
cols.is_nullable,
|
||||
cols.column_default,
|
||||
cols.collation_name
|
||||
cols.collation_name,
|
||||
cols.extra
|
||||
FROM information_schema.columns cols
|
||||
WHERE cols.table_schema = DATABASE()
|
||||
) AS cols), ''),
|
||||
|
||||
@@ -92,7 +92,9 @@ export const getMySQLQuery = (
|
||||
',"ordinal_position":', cols.ordinal_position,
|
||||
',"nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"default":"', ${withExtras ? withDefault : withoutDefault},
|
||||
'","collation":"', IFNULL(cols.collation_name, ''), '"}'
|
||||
'","collation":"', IFNULL(cols.collation_name, ''),
|
||||
'","is_identity":', IF(cols.extra LIKE '%auto_increment%', 'true', 'false'),
|
||||
'}'
|
||||
)))))
|
||||
), indexes as (
|
||||
(SELECT (@indexes:=NULL),
|
||||
|
||||
@@ -194,7 +194,12 @@ cols AS (
|
||||
',"default":"', ${withExtras ? withDefault : withoutDefault},
|
||||
'","collation":"', COALESCE(cols.COLLATION_NAME, ''),
|
||||
'","comment":"', ${withExtras ? withComments : withoutComments},
|
||||
'"}')), ',') AS cols_metadata
|
||||
'","is_identity":', CASE
|
||||
WHEN cols.is_identity = 'YES' THEN 'true'
|
||||
WHEN cols.column_default IS NOT NULL AND cols.column_default LIKE 'nextval(%' THEN 'true'
|
||||
ELSE 'false'
|
||||
END,
|
||||
'}')), ',') AS cols_metadata
|
||||
FROM information_schema.columns cols
|
||||
LEFT JOIN pg_catalog.pg_class c
|
||||
ON c.relname = cols.table_name
|
||||
|
||||
@@ -119,7 +119,13 @@ WITH fk_info AS (
|
||||
END
|
||||
ELSE null
|
||||
END,
|
||||
'default', ${withExtras ? withDefault : withoutDefault}
|
||||
'default', ${withExtras ? withDefault : withoutDefault},
|
||||
'is_identity',
|
||||
CASE
|
||||
WHEN p.pk = 1 AND LOWER(p.type) LIKE '%int%' THEN json('true')
|
||||
WHEN LOWER((SELECT sql FROM sqlite_master WHERE name = m.name)) LIKE '%' || p.name || '%autoincrement%' THEN json('true')
|
||||
ELSE json('false')
|
||||
END
|
||||
)
|
||||
) AS cols_metadata
|
||||
FROM
|
||||
@@ -292,7 +298,13 @@ WITH fk_info AS (
|
||||
END
|
||||
ELSE null
|
||||
END,
|
||||
'default', ${withExtras ? withDefault : withoutDefault}
|
||||
'default', ${withExtras ? withDefault : withoutDefault},
|
||||
'is_identity',
|
||||
CASE
|
||||
WHEN p.pk = 1 AND LOWER(p.type) LIKE '%int%' THEN json('true')
|
||||
WHEN LOWER((SELECT sql FROM sqlite_master WHERE name = m.name)) LIKE '%' || p.name || '%autoincrement%' THEN json('true')
|
||||
ELSE json('false')
|
||||
END
|
||||
)
|
||||
) AS cols_metadata
|
||||
FROM
|
||||
|
||||
@@ -91,6 +91,11 @@ cols AS (
|
||||
WHEN cols.COLLATION_NAME IS NULL THEN 'null'
|
||||
ELSE '"' + STRING_ESCAPE(cols.COLLATION_NAME, 'json') + '"'
|
||||
END +
|
||||
', "is_identity": ' + CASE
|
||||
WHEN COLUMNPROPERTY(OBJECT_ID(cols.TABLE_SCHEMA + '.' + cols.TABLE_NAME), cols.COLUMN_NAME, 'IsIdentity') = 1
|
||||
THEN 'true'
|
||||
ELSE 'false'
|
||||
END +
|
||||
N'}') COLLATE DATABASE_DEFAULT
|
||||
), N','
|
||||
) +
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { exportBaseSQL } from '../export-sql-script';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
|
||||
// Mock the dbml/core importer
|
||||
vi.mock('@dbml/core', () => ({
|
||||
importer: {
|
||||
import: vi.fn((sql: string) => {
|
||||
// Return a simplified DBML for testing
|
||||
return sql;
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('DBML Export - SQL Generation Tests', () => {
|
||||
// Helper to generate test IDs and timestamps
|
||||
let idCounter = 0;
|
||||
@@ -116,7 +106,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should contain composite primary key syntax
|
||||
expect(sql).toContain('PRIMARY KEY (spell_id, component_id)');
|
||||
expect(sql).toContain('PRIMARY KEY ("spell_id", "component_id")');
|
||||
// Should NOT contain individual PRIMARY KEY constraints
|
||||
expect(sql).not.toMatch(/spell_id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
expect(sql).not.toMatch(
|
||||
@@ -202,7 +192,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
|
||||
// Should contain composite primary key constraint
|
||||
expect(sql).toContain(
|
||||
'PRIMARY KEY (master_user_id, tenant_id, tenant_user_id)'
|
||||
'PRIMARY KEY ("master_user_id", "tenant_id", "tenant_user_id")'
|
||||
);
|
||||
|
||||
// Should NOT contain the duplicate index for the primary key fields
|
||||
@@ -255,7 +245,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should contain inline PRIMARY KEY
|
||||
expect(sql).toMatch(/id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
expect(sql).toMatch(/"id"\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
// Should NOT contain separate PRIMARY KEY constraint
|
||||
expect(sql).not.toContain('PRIMARY KEY (id)');
|
||||
});
|
||||
@@ -316,8 +306,8 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
expect(sql).not.toContain('DEFAULT has default');
|
||||
expect(sql).not.toContain('DEFAULT DEFAULT has default');
|
||||
// The fields should still be in the table
|
||||
expect(sql).toContain('is_active boolean');
|
||||
expect(sql).toContain('stock_count integer NOT NULL'); // integer gets simplified to int
|
||||
expect(sql).toContain('"is_active" boolean');
|
||||
expect(sql).toContain('"stock_count" integer NOT NULL'); // integer gets simplified to int
|
||||
});
|
||||
|
||||
it('should handle valid default values correctly', () => {
|
||||
@@ -439,8 +429,8 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should convert NOW to NOW() and ('now') to now()
|
||||
expect(sql).toContain('created_at timestamp DEFAULT NOW');
|
||||
expect(sql).toContain('updated_at timestamp DEFAULT now()');
|
||||
expect(sql).toContain('"created_at" timestamp DEFAULT NOW');
|
||||
expect(sql).toContain('"updated_at" timestamp DEFAULT now()');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -495,9 +485,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should handle char with explicit length
|
||||
expect(sql).toContain('element_code char(2)');
|
||||
expect(sql).toContain('"element_code" char(2)');
|
||||
// Should add default length for char without length
|
||||
expect(sql).toContain('status char(1)');
|
||||
expect(sql).toContain('"status" char(1)');
|
||||
});
|
||||
|
||||
it('should not have spaces between char and parentheses', () => {
|
||||
@@ -606,7 +596,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should create a valid table without primary key
|
||||
expect(sql).toContain('CREATE TABLE experiment_logs');
|
||||
expect(sql).toContain('CREATE TABLE "experiment_logs"');
|
||||
expect(sql).not.toContain('PRIMARY KEY');
|
||||
});
|
||||
|
||||
@@ -721,11 +711,11 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should create both tables
|
||||
expect(sql).toContain('CREATE TABLE guilds');
|
||||
expect(sql).toContain('CREATE TABLE guild_members');
|
||||
expect(sql).toContain('CREATE TABLE "guilds"');
|
||||
expect(sql).toContain('CREATE TABLE "guild_members"');
|
||||
// Should create foreign key
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE guild_members ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY (guild_id) REFERENCES guilds (id)'
|
||||
'ALTER TABLE "guild_members" ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY ("guild_id") REFERENCES "guilds" ("id");'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -799,12 +789,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should create schemas
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS transportation');
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS magic');
|
||||
// Should use schema-qualified table names
|
||||
expect(sql).toContain('CREATE TABLE transportation.portals');
|
||||
expect(sql).toContain('CREATE TABLE magic.spells');
|
||||
expect(sql).toContain('CREATE TABLE "transportation"."portals"');
|
||||
expect(sql).toContain('CREATE TABLE "magic"."spells"');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -851,7 +838,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should still create table structure
|
||||
expect(sql).toContain('CREATE TABLE empty_table');
|
||||
expect(sql).toContain('CREATE TABLE "empty_table"');
|
||||
expect(sql).toContain('(\n\n)');
|
||||
});
|
||||
|
||||
@@ -952,9 +939,9 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
});
|
||||
|
||||
// Should include precision and scale
|
||||
expect(sql).toContain('amount numeric(15, 2)');
|
||||
expect(sql).toContain('"amount" numeric(15, 2)');
|
||||
// Should include precision only when scale is not provided
|
||||
expect(sql).toContain('interest_rate numeric(5)');
|
||||
expect(sql).toContain('"interest_rate" numeric(5)');
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -286,14 +286,10 @@ export function exportPostgreSQL({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle array types (check if the field has array property or type name ends with '[]')
|
||||
if (field.array || typeName.endsWith('[]')) {
|
||||
if (!typeName.endsWith('[]')) {
|
||||
typeWithSize = typeWithSize + '[]';
|
||||
} else {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
databaseTypesWithCommentSupport,
|
||||
} from '@/lib/domain/database-type';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DataType } from '../data-types/data-types';
|
||||
import { dataTypeMap, type DataType } from '../data-types/data-types';
|
||||
import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache';
|
||||
import { exportMSSQL } from './export-per-type/mssql';
|
||||
import { exportPostgreSQL } from './export-per-type/postgresql';
|
||||
@@ -20,6 +20,61 @@ const simplifyDataType = (typeName: string): string => {
|
||||
return typeMap[typeName.toLowerCase()] || typeName;
|
||||
};
|
||||
|
||||
// Helper function to properly quote table/schema names with special characters
|
||||
const getQuotedTableName = (
|
||||
table: DBTable,
|
||||
isDBMLFlow: boolean = false
|
||||
): string => {
|
||||
// Check if a name is already quoted
|
||||
const isAlreadyQuoted = (name: string) => {
|
||||
return (
|
||||
(name.startsWith('"') && name.endsWith('"')) ||
|
||||
(name.startsWith('`') && name.endsWith('`')) ||
|
||||
(name.startsWith('[') && name.endsWith(']'))
|
||||
);
|
||||
};
|
||||
|
||||
// Only add quotes if needed and not already quoted
|
||||
const quoteIfNeeded = (name: string) => {
|
||||
if (isAlreadyQuoted(name)) {
|
||||
return name;
|
||||
}
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(name) || isDBMLFlow;
|
||||
return needsQuoting ? `"${name}"` : name;
|
||||
};
|
||||
|
||||
if (table.schema) {
|
||||
const quotedSchema = quoteIfNeeded(table.schema);
|
||||
const quotedTable = quoteIfNeeded(table.name);
|
||||
return `${quotedSchema}.${quotedTable}`;
|
||||
} else {
|
||||
return quoteIfNeeded(table.name);
|
||||
}
|
||||
};
|
||||
|
||||
const getQuotedFieldName = (
|
||||
fieldName: string,
|
||||
isDBMLFlow: boolean = false
|
||||
): string => {
|
||||
// Check if a name is already quoted
|
||||
const isAlreadyQuoted = (name: string) => {
|
||||
return (
|
||||
(name.startsWith('"') && name.endsWith('"')) ||
|
||||
(name.startsWith('`') && name.endsWith('`')) ||
|
||||
(name.startsWith('[') && name.endsWith(']'))
|
||||
);
|
||||
};
|
||||
|
||||
if (isAlreadyQuoted(fieldName)) {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
// For DBML flow, always quote field names
|
||||
// Otherwise, only quote if it contains special characters
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(fieldName) || isDBMLFlow;
|
||||
return needsQuoting ? `"${fieldName}"` : fieldName;
|
||||
};
|
||||
|
||||
export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
@@ -63,18 +118,21 @@ export const exportBaseSQL = ({
|
||||
let sqlScript = '';
|
||||
|
||||
// First create the CREATE SCHEMA statements for all the found schemas based on tables
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
// Skip schema creation for DBML flow as DBML doesn't support CREATE SCHEMA syntax
|
||||
if (!isDBMLFlow) {
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add CREATE SCHEMA statements if any schemas exist
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS ${schema};\n`;
|
||||
});
|
||||
if (schemas.size > 0) sqlScript += '\n'; // Add newline only if schemas were added
|
||||
// Add CREATE SCHEMA statements if any schemas exist
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
if (schemas.size > 0) sqlScript += '\n'; // Add newline only if schemas were added
|
||||
}
|
||||
|
||||
// Add CREATE TYPE statements for ENUMs and COMPOSITE types from diagram.customTypes
|
||||
if (diagram.customTypes && diagram.customTypes.length > 0) {
|
||||
@@ -166,9 +224,7 @@ export const exportBaseSQL = ({
|
||||
|
||||
// Loop through each non-view table to generate the SQL statements
|
||||
nonViewTables.forEach((table) => {
|
||||
const tableName = table.schema
|
||||
? `${table.schema}.${table.name}`
|
||||
: table.name;
|
||||
const tableName = getQuotedTableName(table, isDBMLFlow);
|
||||
sqlScript += `CREATE TABLE ${tableName} (\n`;
|
||||
|
||||
// Check for composite primary keys
|
||||
@@ -237,7 +293,8 @@ export const exportBaseSQL = ({
|
||||
typeName = 'char';
|
||||
}
|
||||
|
||||
sqlScript += ` ${field.name} ${typeName}`;
|
||||
const quotedFieldName = getQuotedFieldName(field.name, isDBMLFlow);
|
||||
sqlScript += ` ${quotedFieldName} ${typeName}`;
|
||||
|
||||
// Add size for character types
|
||||
if (
|
||||
@@ -257,11 +314,26 @@ export const exportBaseSQL = ({
|
||||
sqlScript += `(1)`;
|
||||
}
|
||||
|
||||
// Add precision and scale for numeric types
|
||||
if (field.precision && field.scale) {
|
||||
sqlScript += `(${field.precision}, ${field.scale})`;
|
||||
} else if (field.precision) {
|
||||
sqlScript += `(${field.precision})`;
|
||||
// Add precision and scale for numeric types only
|
||||
const precisionAndScaleTypes = dataTypeMap[targetDatabaseType]
|
||||
.filter(
|
||||
(t) =>
|
||||
t.fieldAttributes?.precision && t.fieldAttributes?.scale
|
||||
)
|
||||
.map((t) => t.name);
|
||||
|
||||
const isNumericType = precisionAndScaleTypes.some(
|
||||
(t) =>
|
||||
field.type.name.toLowerCase().includes(t) ||
|
||||
typeName.toLowerCase().includes(t)
|
||||
);
|
||||
|
||||
if (isNumericType) {
|
||||
if (field.precision && field.scale) {
|
||||
sqlScript += `(${field.precision}, ${field.scale})`;
|
||||
} else if (field.precision) {
|
||||
sqlScript += `(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle NOT NULL constraint
|
||||
@@ -309,6 +381,16 @@ export const exportBaseSQL = ({
|
||||
fieldDefault = `now()`;
|
||||
}
|
||||
|
||||
// Fix CURRENT_DATE() for PostgreSQL in DBML flow - PostgreSQL uses CURRENT_DATE without parentheses
|
||||
if (
|
||||
isDBMLFlow &&
|
||||
targetDatabaseType === DatabaseType.POSTGRESQL
|
||||
) {
|
||||
if (fieldDefault.toUpperCase() === 'CURRENT_DATE()') {
|
||||
fieldDefault = 'CURRENT_DATE';
|
||||
}
|
||||
}
|
||||
|
||||
sqlScript += ` DEFAULT ${fieldDefault}`;
|
||||
}
|
||||
}
|
||||
@@ -334,7 +416,9 @@ export const exportBaseSQL = ({
|
||||
hasCompositePrimaryKey ||
|
||||
(primaryKeyFields.length === 1 && pkIndex?.name)
|
||||
) {
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).join(', ');
|
||||
const pkFieldNames = primaryKeyFields
|
||||
.map((f) => getQuotedFieldName(f.name, isDBMLFlow))
|
||||
.join(', ');
|
||||
if (pkIndex?.name) {
|
||||
sqlScript += `\n CONSTRAINT ${pkIndex.name} PRIMARY KEY (${pkFieldNames})`;
|
||||
} else {
|
||||
@@ -355,7 +439,11 @@ export const exportBaseSQL = ({
|
||||
table.fields.forEach((field) => {
|
||||
// Add column comment (only for databases that support COMMENT ON syntax)
|
||||
if (field.comments && supportsCommentOn) {
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${field.name} IS '${escapeSQLComment(field.comments)}';\n`;
|
||||
const quotedFieldName = getQuotedFieldName(
|
||||
field.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${quotedFieldName} IS '${escapeSQLComment(field.comments)}';\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -387,14 +475,20 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
|
||||
const fieldNames = indexFields
|
||||
.map((field) => field.name)
|
||||
.map((field) => getQuotedFieldName(field.name, isDBMLFlow))
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
const indexName =
|
||||
const rawIndexName =
|
||||
table.schema && !isDBMLFlow
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
// Quote index name if it contains special characters
|
||||
// For DBML flow, also quote if contains special characters
|
||||
const needsQuoting = /[^a-zA-Z0-9_]/.test(rawIndexName);
|
||||
const indexName = needsQuoting
|
||||
? `"${rawIndexName}"`
|
||||
: rawIndexName;
|
||||
sqlScript += `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${fieldNames});\n`;
|
||||
}
|
||||
});
|
||||
@@ -465,13 +559,18 @@ export const exportBaseSQL = ({
|
||||
return;
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `${fkTable.schema}.${fkTable.name}`
|
||||
: fkTable.name;
|
||||
const refTableName = refTable.schema
|
||||
? `${refTable.schema}.${refTable.name}`
|
||||
: refTable.name;
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${fkField.name}) REFERENCES ${refTableName} (${refField.name});\n`;
|
||||
const fkTableName = getQuotedTableName(fkTable, isDBMLFlow);
|
||||
const refTableName = getQuotedTableName(refTable, isDBMLFlow);
|
||||
const quotedFkFieldName = getQuotedFieldName(
|
||||
fkField.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
const quotedRefFieldName = getQuotedFieldName(
|
||||
refField.name,
|
||||
isDBMLFlow
|
||||
);
|
||||
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${quotedFkFieldName}) REFERENCES ${refTableName} (${quotedRefFieldName});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -29,7 +29,6 @@ export interface SQLColumn {
|
||||
comment?: string;
|
||||
default?: string;
|
||||
increment?: boolean;
|
||||
array?: boolean;
|
||||
}
|
||||
|
||||
export interface SQLTable {
|
||||
@@ -87,7 +86,7 @@ export interface SQLBinaryExpr extends SQLASTNode {
|
||||
|
||||
export interface SQLFunctionNode extends SQLASTNode {
|
||||
type: 'function';
|
||||
name: string;
|
||||
name: string | { name: Array<{ value: string }> };
|
||||
args?: {
|
||||
value: SQLASTArg[];
|
||||
};
|
||||
@@ -109,6 +108,31 @@ export interface SQLStringLiteral extends SQLASTNode {
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface SQLDefaultNode extends SQLASTNode {
|
||||
type: 'default';
|
||||
value: SQLASTNode;
|
||||
}
|
||||
|
||||
export interface SQLCastNode extends SQLASTNode {
|
||||
type: 'cast';
|
||||
expr: SQLASTNode;
|
||||
target: Array<{ dataType: string }>;
|
||||
}
|
||||
|
||||
export interface SQLBooleanNode extends SQLASTNode {
|
||||
type: 'bool';
|
||||
value: boolean;
|
||||
}
|
||||
|
||||
export interface SQLNullNode extends SQLASTNode {
|
||||
type: 'null';
|
||||
}
|
||||
|
||||
export interface SQLNumberNode extends SQLASTNode {
|
||||
type: 'number';
|
||||
value: number;
|
||||
}
|
||||
|
||||
export type SQLASTArg =
|
||||
| SQLColumnRef
|
||||
| SQLStringLiteral
|
||||
@@ -147,6 +171,22 @@ export function buildSQLFromAST(
|
||||
): string {
|
||||
if (!ast) return '';
|
||||
|
||||
// Handle default value wrapper
|
||||
if (ast.type === 'default' && 'value' in ast) {
|
||||
const defaultNode = ast as SQLDefaultNode;
|
||||
return buildSQLFromAST(defaultNode.value, dbType);
|
||||
}
|
||||
|
||||
// Handle PostgreSQL cast expressions (e.g., 'value'::type)
|
||||
if (ast.type === 'cast' && 'expr' in ast && 'target' in ast) {
|
||||
const castNode = ast as SQLCastNode;
|
||||
const expr = buildSQLFromAST(castNode.expr, dbType);
|
||||
if (castNode.target.length > 0 && castNode.target[0].dataType) {
|
||||
return `${expr}::${castNode.target[0].dataType.toLowerCase()}`;
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
if (ast.type === 'binary_expr') {
|
||||
const expr = ast as SQLBinaryExpr;
|
||||
const leftSQL = buildSQLFromAST(expr.left, dbType);
|
||||
@@ -156,7 +196,59 @@ export function buildSQLFromAST(
|
||||
|
||||
if (ast.type === 'function') {
|
||||
const func = ast as SQLFunctionNode;
|
||||
let expr = func.name;
|
||||
let funcName = '';
|
||||
|
||||
// Handle nested function name structure
|
||||
if (typeof func.name === 'object' && func.name && 'name' in func.name) {
|
||||
const nameObj = func.name as { name: Array<{ value: string }> };
|
||||
if (nameObj.name.length > 0) {
|
||||
funcName = nameObj.name[0].value || '';
|
||||
}
|
||||
} else if (typeof func.name === 'string') {
|
||||
funcName = func.name;
|
||||
}
|
||||
|
||||
if (!funcName) return '';
|
||||
|
||||
// Normalize PostgreSQL function names to uppercase for consistency
|
||||
if (dbType === DatabaseType.POSTGRESQL) {
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (pgFunctions.includes(funcName.toLowerCase())) {
|
||||
funcName = funcName.toUpperCase();
|
||||
}
|
||||
}
|
||||
|
||||
// Some PostgreSQL functions don't have parentheses (like CURRENT_TIMESTAMP)
|
||||
if (funcName === 'CURRENT_TIMESTAMP' && !func.args) {
|
||||
return funcName;
|
||||
}
|
||||
|
||||
// Handle SQL Server function defaults that were preprocessed as strings
|
||||
// The preprocessor converts NEWID() to 'newid', GETDATE() to 'getdate', etc.
|
||||
if (dbType === DatabaseType.SQL_SERVER) {
|
||||
const sqlServerFunctions: Record<string, string> = {
|
||||
newid: 'NEWID()',
|
||||
newsequentialid: 'NEWSEQUENTIALID()',
|
||||
getdate: 'GETDATE()',
|
||||
sysdatetime: 'SYSDATETIME()',
|
||||
};
|
||||
|
||||
const lowerFuncName = funcName.toLowerCase();
|
||||
if (sqlServerFunctions[lowerFuncName]) {
|
||||
return sqlServerFunctions[lowerFuncName];
|
||||
}
|
||||
}
|
||||
|
||||
let expr = funcName;
|
||||
if (func.args) {
|
||||
expr +=
|
||||
'(' +
|
||||
@@ -176,12 +268,31 @@ export function buildSQLFromAST(
|
||||
})
|
||||
.join(', ') +
|
||||
')';
|
||||
} else {
|
||||
expr += '()';
|
||||
}
|
||||
return expr;
|
||||
} else if (ast.type === 'column_ref') {
|
||||
return quoteIdentifier((ast as SQLColumnRef).column, dbType);
|
||||
} else if (ast.type === 'expr_list') {
|
||||
return (ast as SQLExprList).value.map((v) => v.value).join(' AND ');
|
||||
} else if (ast.type === 'single_quote_string') {
|
||||
// String literal with single quotes
|
||||
const strNode = ast as SQLStringLiteral;
|
||||
return `'${strNode.value}'`;
|
||||
} else if (ast.type === 'double_quote_string') {
|
||||
// String literal with double quotes
|
||||
const strNode = ast as SQLStringLiteral;
|
||||
return `"${strNode.value}"`;
|
||||
} else if (ast.type === 'bool') {
|
||||
// Boolean value
|
||||
const boolNode = ast as SQLBooleanNode;
|
||||
return boolNode.value ? 'TRUE' : 'FALSE';
|
||||
} else if (ast.type === 'null') {
|
||||
return 'NULL';
|
||||
} else if (ast.type === 'number') {
|
||||
const numNode = ast as SQLNumberNode;
|
||||
return String(numNode.value);
|
||||
} else {
|
||||
const valueNode = ast as { type: string; value: string | number };
|
||||
return typeof valueNode.value === 'string'
|
||||
@@ -613,7 +724,6 @@ export function convertToChartDBDiagram(
|
||||
default: column.default || '',
|
||||
createdAt: Date.now(),
|
||||
increment: column.increment,
|
||||
array: column.array,
|
||||
};
|
||||
|
||||
// Add type arguments if present
|
||||
@@ -781,10 +891,10 @@ export function convertToChartDBDiagram(
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.name === rel.sourceColumn
|
||||
(f) => f.name.toLowerCase() === rel.sourceColumn.toLowerCase()
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.name === rel.targetColumn
|
||||
(f) => f.name.toLowerCase() === rel.targetColumn.toLowerCase()
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
|
||||
describe('MySQL Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE tavern_patrons (
|
||||
patron_id INT NOT NULL,
|
||||
membership_status VARCHAR(50) DEFAULT 'regular',
|
||||
PRIMARY KEY (patron_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const statusColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'membership_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'regular'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with escaped quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizard_spellbooks (
|
||||
spellbook_id INT NOT NULL,
|
||||
incantation VARCHAR(255) DEFAULT 'Dragon\\'s flame',
|
||||
spell_metadata TEXT DEFAULT '{"type": "fire"}',
|
||||
PRIMARY KEY (spellbook_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const incantationColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'incantation'
|
||||
);
|
||||
expect(incantationColumn?.default).toBeTruthy();
|
||||
const metadataColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'spell_metadata'
|
||||
);
|
||||
expect(metadataColumn?.default).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dungeon_levels (
|
||||
level_id INT NOT NULL,
|
||||
monster_count INT DEFAULT 0,
|
||||
max_treasure INT DEFAULT 1000,
|
||||
PRIMARY KEY (level_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const monsterColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'monster_count'
|
||||
);
|
||||
expect(monsterColumn?.default).toBe('0');
|
||||
const treasureColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_treasure'
|
||||
);
|
||||
expect(treasureColumn?.default).toBe('1000');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE merchant_inventory (
|
||||
item_id INT NOT NULL,
|
||||
base_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
loyalty_discount FLOAT DEFAULT 0.15,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'base_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const discountColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'loyalty_discount'
|
||||
);
|
||||
expect(discountColumn?.default).toBe('0.15');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse boolean defaults in MySQL (using TINYINT)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE character_status (
|
||||
character_id INT NOT NULL,
|
||||
is_alive TINYINT(1) DEFAULT 1,
|
||||
is_cursed TINYINT(1) DEFAULT 0,
|
||||
has_magic BOOLEAN DEFAULT TRUE,
|
||||
PRIMARY KEY (character_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const aliveColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_alive'
|
||||
);
|
||||
expect(aliveColumn?.default).toBe('1');
|
||||
const cursedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_cursed'
|
||||
);
|
||||
expect(cursedColumn?.default).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('NULL Default Values', () => {
|
||||
it('should parse NULL defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE companion_animals (
|
||||
companion_id INT NOT NULL,
|
||||
special_trait VARCHAR(255) DEFAULT NULL,
|
||||
PRIMARY KEY (companion_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const traitColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'special_trait'
|
||||
);
|
||||
expect(traitColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Function Default Values', () => {
|
||||
it('should parse function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE quest_entries (
|
||||
entry_id INT NOT NULL AUTO_INCREMENT,
|
||||
quest_accepted TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
quest_uuid VARCHAR(36) DEFAULT (UUID()),
|
||||
PRIMARY KEY (entry_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const acceptedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_accepted'
|
||||
);
|
||||
expect(acceptedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
const updatedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_updated'
|
||||
);
|
||||
expect(updatedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
});
|
||||
});
|
||||
|
||||
describe('AUTO_INCREMENT', () => {
|
||||
it('should handle AUTO_INCREMENT columns correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE hero_registry (
|
||||
hero_id INT NOT NULL AUTO_INCREMENT,
|
||||
hero_name VARCHAR(100),
|
||||
PRIMARY KEY (hero_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromMySQL(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const idColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'hero_id'
|
||||
);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
// AUTO_INCREMENT columns typically don't have a default value
|
||||
expect(idColumn?.default).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example', () => {
|
||||
it('should handle complex table with multiple default types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE adventurer_profiles (
|
||||
adventurer_id BIGINT NOT NULL AUTO_INCREMENT,
|
||||
character_name VARCHAR(50) NOT NULL,
|
||||
guild_email VARCHAR(255) NOT NULL,
|
||||
rank VARCHAR(20) DEFAULT 'novice',
|
||||
is_guild_verified TINYINT(1) DEFAULT 0,
|
||||
gold_coins INT DEFAULT 100,
|
||||
account_balance DECIMAL(10, 2) DEFAULT 0.00,
|
||||
joined_realm TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_quest TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
inventory_data JSON DEFAULT NULL,
|
||||
PRIMARY KEY (adventurer_id),
|
||||
UNIQUE KEY uk_guild_email (guild_email),
|
||||
INDEX idx_rank (rank)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQL(sql);
|
||||
const table = result.tables[0];
|
||||
expect(table).toBeDefined();
|
||||
|
||||
// Check various default values
|
||||
const rankColumn = table.columns.find((c) => c.name === 'rank');
|
||||
expect(rankColumn?.default).toBe("'novice'");
|
||||
|
||||
const verifiedColumn = table.columns.find(
|
||||
(c) => c.name === 'is_guild_verified'
|
||||
);
|
||||
expect(verifiedColumn?.default).toBe('0');
|
||||
|
||||
const goldColumn = table.columns.find(
|
||||
(c) => c.name === 'gold_coins'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('100');
|
||||
|
||||
const balanceColumn = table.columns.find(
|
||||
(c) => c.name === 'account_balance'
|
||||
);
|
||||
expect(balanceColumn?.default).toBe('0.00');
|
||||
|
||||
const joinedColumn = table.columns.find(
|
||||
(c) => c.name === 'joined_realm'
|
||||
);
|
||||
expect(joinedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
|
||||
const inventoryColumn = table.columns.find(
|
||||
(c) => c.name === 'inventory_data'
|
||||
);
|
||||
expect(inventoryColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -101,12 +101,28 @@ function extractColumnsFromCreateTable(statement: string): SQLColumn[] {
|
||||
const typeMatch = definition.match(/^([^\s(]+)(?:\(([^)]+)\))?/);
|
||||
const dataType = typeMatch ? typeMatch[1] : '';
|
||||
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
const defaultMatch = definition.match(
|
||||
/DEFAULT\s+('[^']*'|"[^"]*"|NULL|CURRENT_TIMESTAMP|\S+)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1];
|
||||
}
|
||||
|
||||
// Check for AUTO_INCREMENT
|
||||
const increment = definition
|
||||
.toUpperCase()
|
||||
.includes('AUTO_INCREMENT');
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable,
|
||||
primaryKey,
|
||||
unique: definition.toUpperCase().includes('UNIQUE'),
|
||||
default: defaultValue,
|
||||
increment,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -721,7 +737,28 @@ export async function fromMySQL(sqlContent: string): Promise<SQLParserResult> {
|
||||
parseError
|
||||
);
|
||||
|
||||
// Error handling without logging
|
||||
// Try fallback parser when main parser fails
|
||||
const tableMatch = trimmedStmt.match(
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?`?([^`\s(]+)`?\s*\(/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const tableName = tableMatch[1];
|
||||
const tableId = generateId();
|
||||
tableMap[tableName] = tableId;
|
||||
|
||||
const extractedColumns =
|
||||
extractColumnsFromCreateTable(trimmedStmt);
|
||||
if (extractedColumns.length > 0) {
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: undefined,
|
||||
columns: extractedColumns,
|
||||
indexes: [],
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,215 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE ADD COLUMN Tests', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN statements', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."location" (
|
||||
"id" bigint NOT NULL,
|
||||
CONSTRAINT "pk_table_7_id" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Add new fields to existing location table
|
||||
ALTER TABLE location ADD COLUMN country_id INT;
|
||||
ALTER TABLE location ADD COLUMN state_id INT;
|
||||
ALTER TABLE location ADD COLUMN location_type_id INT;
|
||||
ALTER TABLE location ADD COLUMN city_id INT;
|
||||
ALTER TABLE location ADD COLUMN street TEXT;
|
||||
ALTER TABLE location ADD COLUMN block TEXT;
|
||||
ALTER TABLE location ADD COLUMN building TEXT;
|
||||
ALTER TABLE location ADD COLUMN floor TEXT;
|
||||
ALTER TABLE location ADD COLUMN apartment TEXT;
|
||||
ALTER TABLE location ADD COLUMN lat INT;
|
||||
ALTER TABLE location ADD COLUMN long INT;
|
||||
ALTER TABLE location ADD COLUMN elevation INT;
|
||||
ALTER TABLE location ADD COLUMN erp_site_id INT;
|
||||
ALTER TABLE location ADD COLUMN is_active TEXT;
|
||||
ALTER TABLE location ADD COLUMN remarks TEXT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const locationTable = result.tables[0];
|
||||
|
||||
expect(locationTable.name).toBe('location');
|
||||
expect(locationTable.schema).toBe('public');
|
||||
|
||||
// Should have the original id column plus all the added columns
|
||||
expect(locationTable.columns).toHaveLength(16);
|
||||
|
||||
// Check that the id column is present
|
||||
const idColumn = locationTable.columns.find((col) => col.name === 'id');
|
||||
expect(idColumn).toBeDefined();
|
||||
expect(idColumn?.type).toBe('BIGINT');
|
||||
expect(idColumn?.primaryKey).toBe(true);
|
||||
|
||||
// Check some of the added columns
|
||||
const countryIdColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'country_id'
|
||||
);
|
||||
expect(countryIdColumn).toBeDefined();
|
||||
expect(countryIdColumn?.type).toBe('INTEGER');
|
||||
|
||||
const streetColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'street'
|
||||
);
|
||||
expect(streetColumn).toBeDefined();
|
||||
expect(streetColumn?.type).toBe('TEXT');
|
||||
|
||||
const remarksColumn = locationTable.columns.find(
|
||||
(col) => col.name === 'remarks'
|
||||
);
|
||||
expect(remarksColumn).toBeDefined();
|
||||
expect(remarksColumn?.type).toBe('TEXT');
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD COLUMN with schema qualification', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE public.users (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE public.users ADD COLUMN email VARCHAR(255);
|
||||
ALTER TABLE public.users ADD COLUMN created_at TIMESTAMP;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const usersTable = result.tables[0];
|
||||
|
||||
expect(usersTable.columns).toHaveLength(3);
|
||||
|
||||
const emailColumn = usersTable.columns.find(
|
||||
(col) => col.name === 'email'
|
||||
);
|
||||
expect(emailColumn).toBeDefined();
|
||||
expect(emailColumn?.type).toBe('VARCHAR(255)');
|
||||
|
||||
const createdAtColumn = usersTable.columns.find(
|
||||
(col) => col.name === 'created_at'
|
||||
);
|
||||
expect(createdAtColumn).toBeDefined();
|
||||
expect(createdAtColumn?.type).toBe('TIMESTAMP');
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD COLUMN with constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE products (
|
||||
id SERIAL PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE products ADD COLUMN name VARCHAR(100) NOT NULL;
|
||||
ALTER TABLE products ADD COLUMN sku VARCHAR(50) UNIQUE;
|
||||
ALTER TABLE products ADD COLUMN price DECIMAL(10,2) DEFAULT 0.00;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const productsTable = result.tables[0];
|
||||
|
||||
expect(productsTable.columns).toHaveLength(4);
|
||||
|
||||
const nameColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'name'
|
||||
);
|
||||
expect(nameColumn).toBeDefined();
|
||||
expect(nameColumn?.nullable).toBe(false);
|
||||
|
||||
const skuColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'sku'
|
||||
);
|
||||
expect(skuColumn).toBeDefined();
|
||||
expect(skuColumn?.unique).toBe(true);
|
||||
|
||||
const priceColumn = productsTable.columns.find(
|
||||
(col) => col.name === 'price'
|
||||
);
|
||||
expect(priceColumn).toBeDefined();
|
||||
expect(priceColumn?.default).toBe('0');
|
||||
});
|
||||
|
||||
it('should not add duplicate columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
ALTER TABLE items ADD COLUMN description TEXT;
|
||||
ALTER TABLE items ADD COLUMN name VARCHAR(200); -- Should not be added as duplicate
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const itemsTable = result.tables[0];
|
||||
|
||||
// Should only have 3 columns: id, name (original), and description
|
||||
expect(itemsTable.columns).toHaveLength(3);
|
||||
|
||||
const nameColumns = itemsTable.columns.filter(
|
||||
(col) => col.name === 'name'
|
||||
);
|
||||
expect(nameColumns).toHaveLength(1);
|
||||
expect(nameColumns[0].type).toBe('VARCHAR(100)'); // Should keep original type
|
||||
});
|
||||
|
||||
it('should use default schema when not specified', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE test_table ADD COLUMN value TEXT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const testTable = result.tables[0];
|
||||
|
||||
expect(testTable.schema).toBe('public');
|
||||
expect(testTable.columns).toHaveLength(2);
|
||||
|
||||
const valueColumn = testTable.columns.find(
|
||||
(col) => col.name === 'value'
|
||||
);
|
||||
expect(valueColumn).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle quoted identifiers in ALTER TABLE ADD COLUMN', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "my-table" (
|
||||
"id" INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE "my-table" ADD COLUMN "my-column" VARCHAR(50);
|
||||
ALTER TABLE "my-table" ADD COLUMN "another-column" INTEGER;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const myTable = result.tables[0];
|
||||
|
||||
expect(myTable.name).toBe('my-table');
|
||||
expect(myTable.columns).toHaveLength(3);
|
||||
|
||||
const myColumn = myTable.columns.find(
|
||||
(col) => col.name === 'my-column'
|
||||
);
|
||||
expect(myColumn).toBeDefined();
|
||||
expect(myColumn?.type).toBe('VARCHAR(50)');
|
||||
|
||||
const anotherColumn = myTable.columns.find(
|
||||
(col) => col.name === 'another-column'
|
||||
);
|
||||
expect(anotherColumn).toBeDefined();
|
||||
expect(anotherColumn?.type).toBe('INTEGER');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,118 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE ALTER COLUMN TYPE', () => {
|
||||
it('should handle ALTER TABLE ALTER COLUMN TYPE statements', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."table_12" (
|
||||
"id" SERIAL,
|
||||
"field1" varchar(200),
|
||||
"field2" varchar(200),
|
||||
"field3" varchar(200),
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field2 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field3 TYPE VARCHAR(254);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
expect(table.name).toBe('table_12');
|
||||
expect(table.columns).toHaveLength(4); // id, field1, field2, field3
|
||||
|
||||
// Check that the columns have the updated type
|
||||
const field1 = table.columns.find((col) => col.name === 'field1');
|
||||
expect(field1).toBeDefined();
|
||||
expect(field1?.type).toBe('VARCHAR(254)'); // Should be updated from 200 to 254
|
||||
|
||||
const field2 = table.columns.find((col) => col.name === 'field2');
|
||||
expect(field2).toBeDefined();
|
||||
expect(field2?.type).toBe('VARCHAR(254)');
|
||||
|
||||
const field3 = table.columns.find((col) => col.name === 'field3');
|
||||
expect(field3).toBeDefined();
|
||||
expect(field3?.type).toBe('VARCHAR(254)');
|
||||
});
|
||||
|
||||
it('should handle various ALTER COLUMN TYPE scenarios', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(50),
|
||||
age SMALLINT,
|
||||
score NUMERIC(5,2)
|
||||
);
|
||||
|
||||
-- Change varchar length
|
||||
ALTER TABLE test_table ALTER COLUMN name TYPE VARCHAR(100);
|
||||
|
||||
-- Change numeric type
|
||||
ALTER TABLE test_table ALTER COLUMN age TYPE INTEGER;
|
||||
|
||||
-- Change precision
|
||||
ALTER TABLE test_table ALTER COLUMN score TYPE NUMERIC(10,4);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
|
||||
const nameCol = table.columns.find((col) => col.name === 'name');
|
||||
expect(nameCol?.type).toBe('VARCHAR(100)');
|
||||
|
||||
const ageCol = table.columns.find((col) => col.name === 'age');
|
||||
expect(ageCol?.type).toBe('INTEGER');
|
||||
|
||||
const scoreCol = table.columns.find((col) => col.name === 'score');
|
||||
expect(scoreCol?.type).toBe('NUMERIC(10,4)');
|
||||
});
|
||||
|
||||
it('should handle multiple type changes on the same column', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."table_12" (
|
||||
"id" SERIAL,
|
||||
"field1" varchar(200),
|
||||
"field2" varchar(200),
|
||||
"field3" varchar(200),
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field2 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field3 TYPE VARCHAR(254);
|
||||
ALTER TABLE table_12 ALTER COLUMN field1 TYPE BIGINT;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
expect(table.name).toBe('table_12');
|
||||
expect(table.schema).toBe('public');
|
||||
expect(table.columns).toHaveLength(4);
|
||||
|
||||
// Check that field1 has the final type (BIGINT), not the intermediate VARCHAR(254)
|
||||
const field1 = table.columns.find((col) => col.name === 'field1');
|
||||
expect(field1).toBeDefined();
|
||||
expect(field1?.type).toBe('BIGINT'); // Should be BIGINT, not VARCHAR(254)
|
||||
|
||||
// Check that field2 and field3 still have VARCHAR(254)
|
||||
const field2 = table.columns.find((col) => col.name === 'field2');
|
||||
expect(field2).toBeDefined();
|
||||
expect(field2?.type).toBe('VARCHAR(254)');
|
||||
|
||||
const field3 = table.columns.find((col) => col.name === 'field3');
|
||||
expect(field3).toBeDefined();
|
||||
expect(field3?.type).toBe('VARCHAR(254)');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,117 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL ALTER TABLE with Foreign Keys', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN followed by ALTER TABLE ADD FOREIGN KEY', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA IF NOT EXISTS "public";
|
||||
|
||||
CREATE TABLE "public"."location" (
|
||||
"id" bigint NOT NULL,
|
||||
CONSTRAINT "pk_table_7_id" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Add new fields to existing location table
|
||||
ALTER TABLE location ADD COLUMN country_id INT;
|
||||
ALTER TABLE location ADD COLUMN state_id INT;
|
||||
ALTER TABLE location ADD COLUMN location_type_id INT;
|
||||
ALTER TABLE location ADD COLUMN city_id INT;
|
||||
ALTER TABLE location ADD COLUMN street TEXT;
|
||||
ALTER TABLE location ADD COLUMN block TEXT;
|
||||
ALTER TABLE location ADD COLUMN building TEXT;
|
||||
ALTER TABLE location ADD COLUMN floor TEXT;
|
||||
ALTER TABLE location ADD COLUMN apartment TEXT;
|
||||
ALTER TABLE location ADD COLUMN lat INT;
|
||||
ALTER TABLE location ADD COLUMN long INT;
|
||||
ALTER TABLE location ADD COLUMN elevation INT;
|
||||
ALTER TABLE location ADD COLUMN erp_site_id INT;
|
||||
ALTER TABLE location ADD COLUMN is_active TEXT;
|
||||
ALTER TABLE location ADD COLUMN remarks TEXT;
|
||||
|
||||
-- Create lookup tables
|
||||
CREATE TABLE country (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
code VARCHAR(3) UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE state (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
country_id INT NOT NULL,
|
||||
FOREIGN KEY (country_id) REFERENCES country(id)
|
||||
);
|
||||
|
||||
CREATE TABLE location_type (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE city (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
state_id INT NOT NULL,
|
||||
FOREIGN KEY (state_id) REFERENCES state(id)
|
||||
);
|
||||
|
||||
-- Add foreign key constraints from location to lookup tables
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_country
|
||||
FOREIGN KEY (country_id) REFERENCES country(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_state
|
||||
FOREIGN KEY (state_id) REFERENCES state(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_location_type
|
||||
FOREIGN KEY (location_type_id) REFERENCES location_type(id);
|
||||
ALTER TABLE location ADD CONSTRAINT fk_location_city
|
||||
FOREIGN KEY (city_id) REFERENCES city(id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
const locationTable = result.tables.find((t) => t.name === 'location');
|
||||
|
||||
// Check tables
|
||||
expect(result.tables).toHaveLength(5); // location, country, state, location_type, city
|
||||
|
||||
// Check location table has all columns
|
||||
expect(locationTable).toBeDefined();
|
||||
expect(locationTable?.columns).toHaveLength(16); // id + 15 added columns
|
||||
|
||||
// Check foreign key relationships
|
||||
const locationRelationships = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'location'
|
||||
);
|
||||
|
||||
// Should have 4 FKs from location to lookup tables + 2 from state/city
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(6);
|
||||
|
||||
// Check specific foreign keys from location
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'country_id' &&
|
||||
r.targetTable === 'country'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'state_id' && r.targetTable === 'state'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'location_type_id' &&
|
||||
r.targetTable === 'location_type'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
locationRelationships.some(
|
||||
(r) => r.sourceColumn === 'city_id' && r.targetTable === 'city'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,395 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE heroes (
|
||||
hero_id INTEGER NOT NULL,
|
||||
hero_status CHARACTER VARYING DEFAULT 'questing',
|
||||
PRIMARY KEY (hero_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const statusColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'hero_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'questing'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with special characters that need escaping', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_scrolls (
|
||||
scroll_id INTEGER NOT NULL,
|
||||
incantation CHARACTER VARYING DEFAULT 'Dragon''s breath',
|
||||
rune_inscription TEXT DEFAULT 'Ancient rune
|
||||
Sacred symbol',
|
||||
PRIMARY KEY (scroll_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const incantationColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'incantation'
|
||||
);
|
||||
expect(incantationColumn?.default).toBe("'Dragon''s breath'");
|
||||
});
|
||||
|
||||
it('should parse elvish text default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE elven_greetings (
|
||||
greeting_id INTEGER NOT NULL,
|
||||
elvish_welcome CHARACTER VARYING DEFAULT 'Mae govannen',
|
||||
PRIMARY KEY (greeting_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const greetingColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'elvish_welcome'
|
||||
);
|
||||
expect(greetingColumn?.default).toBe("'Mae govannen'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon_hoards (
|
||||
hoard_id INTEGER NOT NULL,
|
||||
gold_pieces INTEGER DEFAULT 0,
|
||||
max_treasure_value INTEGER DEFAULT 10000,
|
||||
PRIMARY KEY (hoard_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const goldColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'gold_pieces'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('0');
|
||||
const treasureColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_treasure_value'
|
||||
);
|
||||
expect(treasureColumn?.default).toBe('10000');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_items (
|
||||
item_id INTEGER NOT NULL,
|
||||
market_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
magic_power_rating NUMERIC DEFAULT 0.85,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'market_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const powerColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'magic_power_rating'
|
||||
);
|
||||
expect(powerColumn?.default).toBe('0.85');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse boolean defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magical_artifacts (
|
||||
artifact_id INTEGER NOT NULL,
|
||||
is_cursed BOOLEAN DEFAULT TRUE,
|
||||
is_destroyed BOOLEAN DEFAULT FALSE,
|
||||
is_legendary BOOLEAN DEFAULT '1',
|
||||
is_identified BOOLEAN DEFAULT '0',
|
||||
PRIMARY KEY (artifact_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const cursedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_cursed'
|
||||
);
|
||||
expect(cursedColumn?.default).toBe('TRUE');
|
||||
const destroyedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_destroyed'
|
||||
);
|
||||
expect(destroyedColumn?.default).toBe('FALSE');
|
||||
const legendaryColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_legendary'
|
||||
);
|
||||
expect(legendaryColumn?.default).toBe("'1'");
|
||||
const identifiedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_identified'
|
||||
);
|
||||
expect(identifiedColumn?.default).toBe("'0'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('NULL Default Values', () => {
|
||||
it('should parse NULL defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizard_familiars (
|
||||
familiar_id INTEGER NOT NULL,
|
||||
special_ability CHARACTER VARYING DEFAULT NULL,
|
||||
PRIMARY KEY (familiar_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const abilityColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'special_ability'
|
||||
);
|
||||
expect(abilityColumn?.default).toBe('NULL');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Function Default Values', () => {
|
||||
it('should parse function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE quest_logs (
|
||||
quest_id UUID DEFAULT gen_random_uuid(),
|
||||
quest_started TIMESTAMP DEFAULT NOW(),
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
difficulty_roll INTEGER DEFAULT random()
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const questIdColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_id'
|
||||
);
|
||||
expect(questIdColumn?.default).toBe('GEN_RANDOM_UUID()');
|
||||
const startedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'quest_started'
|
||||
);
|
||||
expect(startedColumn?.default).toBe('NOW()');
|
||||
const updatedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_updated'
|
||||
);
|
||||
expect(updatedColumn?.default).toBe('CURRENT_TIMESTAMP');
|
||||
const difficultyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'difficulty_roll'
|
||||
);
|
||||
expect(difficultyColumn?.default).toBe('RANDOM()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example', () => {
|
||||
it('should handle a complex guild management table correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "realm"(
|
||||
"realm_id" integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "guild"(
|
||||
"guild_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
PRIMARY KEY ("guild_id")
|
||||
);
|
||||
|
||||
CREATE TABLE "guild_schedule"(
|
||||
"schedule_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
PRIMARY KEY ("schedule_id")
|
||||
);
|
||||
|
||||
CREATE TABLE "guild_quests"(
|
||||
"is_active" CHARACTER VARYING NOT NULL DEFAULT 'active',
|
||||
"quest_description" CHARACTER VARYING,
|
||||
"quest_type" CHARACTER VARYING,
|
||||
"quest_status" CHARACTER VARYING DEFAULT 'pending',
|
||||
"quest_id" CHARACTER VARYING NOT NULL UNIQUE,
|
||||
"reward_gold" CHARACTER VARYING,
|
||||
"quest_giver" CHARACTER VARYING,
|
||||
"party_size" CHARACTER VARYING,
|
||||
"difficulty_level" CHARACTER VARYING,
|
||||
"monster_type" CHARACTER VARYING,
|
||||
"dungeon_location" CHARACTER VARYING,
|
||||
"main_guild_ref" CHARACTER VARYING NOT NULL,
|
||||
"schedule_ref" CHARACTER VARYING,
|
||||
"last_attempt" CHARACTER VARYING,
|
||||
"max_attempts" INTEGER,
|
||||
"failed_attempts" INTEGER,
|
||||
"party_members" INTEGER,
|
||||
"loot_distributor" CHARACTER VARYING,
|
||||
"quest_validator" CHARACTER VARYING,
|
||||
"scout_report" CHARACTER VARYING,
|
||||
"completion_xp" INTEGER,
|
||||
"bonus_xp" INTEGER,
|
||||
"map_coordinates" CHARACTER VARYING,
|
||||
"quest_correlation" CHARACTER VARYING,
|
||||
"is_completed" BOOLEAN NOT NULL DEFAULT '0',
|
||||
"reward_items" CHARACTER VARYING,
|
||||
"quest_priority" INTEGER,
|
||||
"started_at" CHARACTER VARYING,
|
||||
"status" CHARACTER VARYING,
|
||||
"completed_at" CHARACTER VARYING,
|
||||
"party_level" INTEGER,
|
||||
"quest_master" CHARACTER VARYING,
|
||||
PRIMARY KEY ("quest_id"),
|
||||
FOREIGN KEY ("main_guild_ref") REFERENCES "guild"("guild_id"),
|
||||
FOREIGN KEY ("schedule_ref") REFERENCES "guild_schedule"("schedule_id")
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Find the guild_quests table
|
||||
const questTable = result.tables.find(
|
||||
(t) => t.name === 'guild_quests'
|
||||
);
|
||||
expect(questTable).toBeDefined();
|
||||
|
||||
// Check specific default values
|
||||
const activeColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'is_active'
|
||||
);
|
||||
expect(activeColumn?.default).toBe("'active'");
|
||||
|
||||
const statusColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'quest_status'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("'pending'");
|
||||
|
||||
const completedColumn = questTable?.columns.find(
|
||||
(c) => c.name === 'is_completed'
|
||||
);
|
||||
expect(completedColumn?.default).toBe("'0'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('ALTER TABLE ADD COLUMN with defaults', () => {
|
||||
it('should handle ALTER TABLE ADD COLUMN with default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE adventurers (
|
||||
adventurer_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (adventurer_id)
|
||||
);
|
||||
|
||||
ALTER TABLE adventurers ADD COLUMN class_type VARCHAR(50) DEFAULT 'warrior';
|
||||
ALTER TABLE adventurers ADD COLUMN experience_points INTEGER DEFAULT 0;
|
||||
ALTER TABLE adventurers ADD COLUMN is_guild_member BOOLEAN DEFAULT TRUE;
|
||||
ALTER TABLE adventurers ADD COLUMN joined_at TIMESTAMP DEFAULT NOW();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const classColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'class_type'
|
||||
);
|
||||
expect(classColumn?.default).toBe("'warrior'");
|
||||
|
||||
const xpColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'experience_points'
|
||||
);
|
||||
expect(xpColumn?.default).toBe('0');
|
||||
|
||||
const guildColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_guild_member'
|
||||
);
|
||||
expect(guildColumn?.default).toBe('TRUE');
|
||||
|
||||
const joinedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'joined_at'
|
||||
);
|
||||
expect(joinedColumn?.default).toBe('NOW()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Special Characters', () => {
|
||||
it('should handle defaults with parentheses in strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_formulas (
|
||||
formula_id INTEGER NOT NULL,
|
||||
damage_calculation VARCHAR DEFAULT '(strength + magic) * 2',
|
||||
mana_cost TEXT DEFAULT 'cast(level * 10 - wisdom)',
|
||||
PRIMARY KEY (formula_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const damageColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'damage_calculation'
|
||||
);
|
||||
expect(damageColumn?.default).toBe("'(strength + magic) * 2'");
|
||||
const manaColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'mana_cost'
|
||||
);
|
||||
expect(manaColumn?.default).toBe("'cast(level * 10 - wisdom)'");
|
||||
});
|
||||
|
||||
it('should handle defaults with JSON strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE item_enchantments (
|
||||
enchantment_id INTEGER NOT NULL,
|
||||
properties JSON DEFAULT '{"element": "fire"}',
|
||||
modifiers JSONB DEFAULT '[]',
|
||||
PRIMARY KEY (enchantment_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const propertiesColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'properties'
|
||||
);
|
||||
expect(propertiesColumn?.default).toBe(`'{"element": "fire"}'`);
|
||||
const modifiersColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'modifiers'
|
||||
);
|
||||
expect(modifiersColumn?.default).toBe("'[]'");
|
||||
});
|
||||
|
||||
it('should handle casting in defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_runes (
|
||||
rune_id INTEGER NOT NULL,
|
||||
rune_type VARCHAR DEFAULT 'healing'::text,
|
||||
PRIMARY KEY (rune_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const runeColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'rune_type'
|
||||
);
|
||||
expect(runeColumn?.default).toBe("'healing'::text");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Serial Types', () => {
|
||||
it('should not set default for SERIAL types as they auto-increment', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE monster_spawns (
|
||||
spawn_id SERIAL PRIMARY KEY,
|
||||
minion_id SMALLSERIAL,
|
||||
boss_id BIGSERIAL
|
||||
);
|
||||
`;
|
||||
const result = await fromPostgres(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const spawnColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'spawn_id'
|
||||
);
|
||||
expect(spawnColumn?.default).toBeUndefined();
|
||||
expect(spawnColumn?.increment).toBe(true);
|
||||
|
||||
const minionColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'minion_id'
|
||||
);
|
||||
expect(minionColumn?.default).toBeUndefined();
|
||||
expect(minionColumn?.increment).toBe(true);
|
||||
|
||||
const bossColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'boss_id'
|
||||
);
|
||||
expect(bossColumn?.default).toBeUndefined();
|
||||
expect(bossColumn?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,350 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Import - Quoted Identifiers with Special Characters', () => {
|
||||
describe('CREATE TABLE with quoted identifiers', () => {
|
||||
it('should handle tables with quoted schema and table names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "my-schema"."user-profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
name text NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('my-schema');
|
||||
expect(table.name).toBe('user-profiles');
|
||||
});
|
||||
|
||||
it('should handle tables with spaces in schema and table names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "user schema"."profile table" (
|
||||
"user id" integer PRIMARY KEY,
|
||||
"full name" varchar(255)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('user schema');
|
||||
expect(table.name).toBe('profile table');
|
||||
expect(table.columns).toBeDefined();
|
||||
expect(table.columns.length).toBeGreaterThan(0);
|
||||
// Note: Column names with spaces might be parsed differently
|
||||
});
|
||||
|
||||
it('should handle mixed quoted and unquoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "special-schema".users (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
CREATE TABLE public."special-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
expect(result.tables[0].schema).toBe('special-schema');
|
||||
expect(result.tables[0].name).toBe('users');
|
||||
expect(result.tables[1].schema).toBe('public');
|
||||
expect(result.tables[1].name).toBe('special-table');
|
||||
});
|
||||
|
||||
it('should handle tables with dots in names', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema.with.dots"."table.with.dots" (
|
||||
id serial PRIMARY KEY,
|
||||
data text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('schema.with.dots');
|
||||
expect(table.name).toBe('table.with.dots');
|
||||
});
|
||||
});
|
||||
|
||||
describe('FOREIGN KEY with quoted identifiers', () => {
|
||||
it('should handle inline REFERENCES with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "auth-schema"."users" (
|
||||
"user-id" serial PRIMARY KEY,
|
||||
email text UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE "app-schema"."user-profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
"user-id" integer REFERENCES "auth-schema"."users"("user-id"),
|
||||
bio text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('user-profiles');
|
||||
expect(relationship.targetTable).toBe('users');
|
||||
expect(relationship.sourceColumn).toBe('user-id');
|
||||
expect(relationship.targetColumn).toBe('user-id');
|
||||
});
|
||||
|
||||
it('should handle FOREIGN KEY constraints with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema one"."table one" (
|
||||
"id field" serial PRIMARY KEY,
|
||||
"data field" text
|
||||
);
|
||||
|
||||
CREATE TABLE "schema two"."table two" (
|
||||
id serial PRIMARY KEY,
|
||||
"ref id" integer,
|
||||
FOREIGN KEY ("ref id") REFERENCES "schema one"."table one"("id field")
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('table two');
|
||||
expect(relationship.targetTable).toBe('table one');
|
||||
expect(relationship.sourceColumn).toBe('ref id');
|
||||
expect(relationship.targetColumn).toBe('id field');
|
||||
});
|
||||
|
||||
it('should handle named constraints with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "auth"."users" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE "app"."profiles" (
|
||||
id serial PRIMARY KEY,
|
||||
user_id integer,
|
||||
CONSTRAINT "fk-user-profile" FOREIGN KEY (user_id) REFERENCES "auth"."users"(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
// Note: Constraint names with special characters might be normalized
|
||||
expect(relationship.name).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD CONSTRAINT with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "user-schema"."user-accounts" (
|
||||
"account-id" serial PRIMARY KEY,
|
||||
username text
|
||||
);
|
||||
|
||||
CREATE TABLE "order-schema"."user-orders" (
|
||||
"order-id" serial PRIMARY KEY,
|
||||
"account-id" integer
|
||||
);
|
||||
|
||||
ALTER TABLE "order-schema"."user-orders"
|
||||
ADD CONSTRAINT "fk_orders_accounts"
|
||||
FOREIGN KEY ("account-id")
|
||||
REFERENCES "user-schema"."user-accounts"("account-id");
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.name).toBe('fk_orders_accounts');
|
||||
expect(relationship.sourceTable).toBe('user-orders');
|
||||
expect(relationship.targetTable).toBe('user-accounts');
|
||||
expect(relationship.sourceColumn).toBe('account-id');
|
||||
expect(relationship.targetColumn).toBe('account-id');
|
||||
});
|
||||
|
||||
it('should handle complex mixed quoting scenarios', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE auth.users (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE "app-data"."user_profiles" (
|
||||
profile_id serial PRIMARY KEY,
|
||||
"user-id" integer REFERENCES auth.users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE "app-data".posts (
|
||||
id serial PRIMARY KEY,
|
||||
profile_id integer
|
||||
);
|
||||
|
||||
ALTER TABLE "app-data".posts
|
||||
ADD CONSTRAINT fk_posts_profiles
|
||||
FOREIGN KEY (profile_id)
|
||||
REFERENCES "app-data"."user_profiles"(profile_id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Verify the relationships were correctly identified
|
||||
const profilesTable = result.tables.find(
|
||||
(t) => t.name === 'user_profiles'
|
||||
);
|
||||
expect(profilesTable?.schema).toBe('app-data');
|
||||
|
||||
const postsTable = result.tables.find((t) => t.name === 'posts');
|
||||
expect(postsTable?.schema).toBe('app-data');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases and special scenarios', () => {
|
||||
it('should handle Unicode characters in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "схема"."таблица" (
|
||||
"идентификатор" serial PRIMARY KEY,
|
||||
"данные" text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('схема');
|
||||
expect(table.name).toBe('таблица');
|
||||
expect(table.columns).toBeDefined();
|
||||
expect(table.columns.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should handle parentheses in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "schema(prod)"."users(archived)" (
|
||||
id serial PRIMARY KEY,
|
||||
data text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('schema(prod)');
|
||||
expect(table.name).toBe('users(archived)');
|
||||
});
|
||||
|
||||
it('should handle forward slashes in quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "api/v1"."users/profiles" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('api/v1');
|
||||
expect(table.name).toBe('users/profiles');
|
||||
});
|
||||
|
||||
it('should handle IF NOT EXISTS with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE IF NOT EXISTS "test-schema"."test-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('test-schema');
|
||||
expect(table.name).toBe('test-table');
|
||||
});
|
||||
|
||||
it('should handle ONLY keyword with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ONLY "parent-schema"."parent-table" (
|
||||
id serial PRIMARY KEY
|
||||
);
|
||||
|
||||
ALTER TABLE ONLY "parent-schema"."parent-table"
|
||||
ADD CONSTRAINT "unique-constraint" UNIQUE (id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// ONLY keyword might trigger warnings
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.schema).toBe('parent-schema');
|
||||
expect(table.name).toBe('parent-table');
|
||||
});
|
||||
|
||||
it('should handle self-referencing foreign keys with quoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "org-schema"."departments" (
|
||||
"dept-id" serial PRIMARY KEY,
|
||||
"parent-dept-id" integer REFERENCES "org-schema"."departments"("dept-id"),
|
||||
name text
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.warnings || []).toHaveLength(0);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const relationship = result.relationships[0];
|
||||
expect(relationship.sourceTable).toBe('departments');
|
||||
expect(relationship.targetTable).toBe('departments'); // Self-reference
|
||||
expect(relationship.sourceColumn).toBe('parent-dept-id');
|
||||
expect(relationship.targetColumn).toBe('dept-id');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,66 +0,0 @@
|
||||
import { describe, it } from 'vitest';
|
||||
|
||||
describe('node-sql-parser - CREATE TYPE handling', () => {
|
||||
it('should show exact parser error for CREATE TYPE', async () => {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
const parserOpts = {
|
||||
database: 'PostgreSQL',
|
||||
};
|
||||
|
||||
console.log('\n=== Testing CREATE TYPE statement ===');
|
||||
const createTypeSQL = `CREATE TYPE spell_element AS ENUM ('fire', 'water', 'earth', 'air');`;
|
||||
|
||||
try {
|
||||
parser.astify(createTypeSQL, parserOpts);
|
||||
console.log('CREATE TYPE parsed successfully');
|
||||
} catch (error) {
|
||||
console.log('CREATE TYPE parse error:', (error as Error).message);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE EXTENSION statement ===');
|
||||
const createExtensionSQL = `CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`;
|
||||
|
||||
try {
|
||||
parser.astify(createExtensionSQL, parserOpts);
|
||||
console.log('CREATE EXTENSION parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE EXTENSION parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with custom type ===');
|
||||
const createTableWithTypeSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element spell_element DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableWithTypeSQL, parserOpts);
|
||||
console.log('CREATE TABLE with custom type parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with custom type parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with standard types only ===');
|
||||
const createTableStandardSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element VARCHAR(20) DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableStandardSQL, parserOpts);
|
||||
console.log('CREATE TABLE with standard types parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with standard types parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -91,7 +91,38 @@ export interface AlterTableExprItem {
|
||||
action: string;
|
||||
resource?: string;
|
||||
type?: string;
|
||||
keyword?: string;
|
||||
constraint?: { constraint_type?: string };
|
||||
// Properties for ADD COLUMN
|
||||
column?:
|
||||
| {
|
||||
column?:
|
||||
| {
|
||||
expr?: {
|
||||
value?: string;
|
||||
};
|
||||
}
|
||||
| string;
|
||||
}
|
||||
| string
|
||||
| ColumnReference;
|
||||
definition?: {
|
||||
dataType?: string;
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
suffix?: unknown[];
|
||||
nullable?: { type: string };
|
||||
unique?: string;
|
||||
primary_key?: string;
|
||||
constraint?: string;
|
||||
default_val?: unknown;
|
||||
auto_increment?: string;
|
||||
};
|
||||
nullable?: { type: string; value?: string };
|
||||
unique?: string;
|
||||
default_val?: unknown;
|
||||
// Properties for constraints
|
||||
create_definitions?:
|
||||
| AlterTableConstraintDefinition
|
||||
| {
|
||||
|
||||
@@ -7,6 +7,8 @@ import type {
|
||||
SQLForeignKey,
|
||||
SQLEnumType,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
@@ -347,13 +349,20 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
|
||||
// Try to extract column definition
|
||||
// Match: column_name TYPE[(params)][array]
|
||||
// Updated regex to handle complex types like GEOGRAPHY(POINT, 4326) and custom types like subscription_status
|
||||
const columnMatch = trimmedLine.match(
|
||||
/^\s*["']?(\w+)["']?\s+([\w_]+(?:\([^)]+\))?(?:\[\])?)/i
|
||||
);
|
||||
// First extract column name and everything after it
|
||||
const columnMatch = trimmedLine.match(/^\s*["']?(\w+)["']?\s+(.+)/i);
|
||||
if (columnMatch) {
|
||||
const columnName = columnMatch[1];
|
||||
let columnType = columnMatch[2];
|
||||
const restOfLine = columnMatch[2];
|
||||
|
||||
// Now extract the type from the rest of the line
|
||||
// Match type which could be multi-word (like CHARACTER VARYING) with optional params
|
||||
const typeMatch = restOfLine.match(
|
||||
/^((?:CHARACTER\s+VARYING|DOUBLE\s+PRECISION|[\w]+)(?:\([^)]+\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (!typeMatch) continue;
|
||||
let columnType = typeMatch[1].trim();
|
||||
|
||||
// Normalize PostGIS types
|
||||
if (columnType.toUpperCase().startsWith('GEOGRAPHY')) {
|
||||
@@ -373,13 +382,6 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
// Check if it's an array type
|
||||
let isArrayType = false;
|
||||
if (columnType.endsWith('[]')) {
|
||||
isArrayType = true;
|
||||
columnType = columnType.slice(0, -2);
|
||||
}
|
||||
|
||||
// Normalize the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
|
||||
@@ -387,7 +389,65 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
const isPrimary = trimmedLine.match(/PRIMARY\s+KEY/i) !== null;
|
||||
const isNotNull = trimmedLine.match(/NOT\s+NULL/i) !== null;
|
||||
const isUnique = trimmedLine.match(/\bUNIQUE\b/i) !== null;
|
||||
const hasDefault = trimmedLine.match(/DEFAULT\s+/i) !== null;
|
||||
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
// Updated regex to handle casting with :: operator
|
||||
const defaultMatch = trimmedLine.match(
|
||||
/DEFAULT\s+((?:'[^']*'|"[^"]*"|\S+)(?:::\w+)?)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
let defVal = defaultMatch[1].trim();
|
||||
// Remove trailing comma if present
|
||||
defVal = defVal.replace(/,$/, '').trim();
|
||||
// Handle string literals
|
||||
if (defVal.startsWith("'") && defVal.endsWith("'")) {
|
||||
// Keep the quotes for string literals
|
||||
defaultValue = defVal;
|
||||
} else if (defVal.match(/^\d+(\.\d+)?$/)) {
|
||||
// Numeric value
|
||||
defaultValue = defVal;
|
||||
} else if (
|
||||
defVal.toUpperCase() === 'TRUE' ||
|
||||
defVal.toUpperCase() === 'FALSE'
|
||||
) {
|
||||
// Boolean value
|
||||
defaultValue = defVal.toUpperCase();
|
||||
} else if (defVal.toUpperCase() === 'NULL') {
|
||||
// NULL value
|
||||
defaultValue = 'NULL';
|
||||
} else if (defVal.includes('(') && defVal.includes(')')) {
|
||||
// Function call (like gen_random_uuid())
|
||||
// Normalize PostgreSQL function names to uppercase
|
||||
const funcMatch = defVal.match(/^(\w+)\(/);
|
||||
if (funcMatch) {
|
||||
const funcName = funcMatch[1];
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (pgFunctions.includes(funcName.toLowerCase())) {
|
||||
defaultValue = defVal.replace(
|
||||
funcName,
|
||||
funcName.toUpperCase()
|
||||
);
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
// Other expressions
|
||||
defaultValue = defVal;
|
||||
}
|
||||
}
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
@@ -395,14 +455,13 @@ function extractColumnsFromSQL(sql: string): SQLColumn[] {
|
||||
nullable: !isNotNull && !isPrimary,
|
||||
primaryKey: isPrimary,
|
||||
unique: isUnique || isPrimary,
|
||||
default: hasDefault ? 'has default' : undefined,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isSerialType ||
|
||||
trimmedLine.includes('gen_random_uuid()') ||
|
||||
trimmedLine.includes('uuid_generate_v4()') ||
|
||||
trimmedLine.includes('GENERATED ALWAYS AS IDENTITY') ||
|
||||
trimmedLine.includes('GENERATED BY DEFAULT AS IDENTITY'),
|
||||
array: isArrayType,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -498,16 +557,21 @@ function extractForeignKeysFromCreateTable(
|
||||
|
||||
const tableBody = tableBodyMatch[1];
|
||||
|
||||
// Pattern for inline REFERENCES - more flexible to handle various formats
|
||||
// Pattern for inline REFERENCES - handles quoted and unquoted identifiers
|
||||
const inlineRefPattern =
|
||||
/["']?(\w+)["']?\s+(?:\w+(?:\([^)]*\))?(?:\[[^\]]*\])?(?:\s+\w+)*\s+)?REFERENCES\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(\s*["']?(\w+)["']?\s*\)/gi;
|
||||
/(?:"([^"]+)"|([^"\s,()]+))\s+(?:\w+(?:\([^)]*\))?(?:\[[^\]]*\])?(?:\s+\w+)*\s+)?REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)/gi;
|
||||
|
||||
let match;
|
||||
while ((match = inlineRefPattern.exec(tableBody)) !== null) {
|
||||
const sourceColumn = match[1];
|
||||
const targetSchema = match[2] || 'public';
|
||||
const targetTable = match[3];
|
||||
const targetColumn = match[4];
|
||||
// Extract values from appropriate match groups
|
||||
// Groups: 1=quoted source col, 2=unquoted source col,
|
||||
// 3=quoted schema, 4=unquoted schema,
|
||||
// 5=quoted target table, 6=unquoted target table,
|
||||
// 7=quoted target col, 8=unquoted target col
|
||||
const sourceColumn = match[1] || match[2];
|
||||
const targetSchema = match[3] || match[4] || 'public';
|
||||
const targetTable = match[5] || match[6];
|
||||
const targetColumn = match[7] || match[8];
|
||||
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
@@ -529,15 +593,16 @@ function extractForeignKeysFromCreateTable(
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern for FOREIGN KEY constraints
|
||||
// Pattern for FOREIGN KEY constraints - handles quoted and unquoted identifiers
|
||||
const fkConstraintPattern =
|
||||
/FOREIGN\s+KEY\s*\(\s*["']?(\w+)["']?\s*\)\s*REFERENCES\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(\s*["']?(\w+)["']?\s*\)/gi;
|
||||
/FOREIGN\s+KEY\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)\s*REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\(\s*(?:"([^"]+)"|([^"\s,)]+))\s*\)/gi;
|
||||
|
||||
while ((match = fkConstraintPattern.exec(tableBody)) !== null) {
|
||||
const sourceColumn = match[1];
|
||||
const targetSchema = match[2] || 'public';
|
||||
const targetTable = match[3];
|
||||
const targetColumn = match[4];
|
||||
// Extract values from appropriate match groups
|
||||
const sourceColumn = match[1] || match[2];
|
||||
const targetSchema = match[3] || match[4] || 'public';
|
||||
const targetTable = match[5] || match[6];
|
||||
const targetColumn = match[7] || match[8];
|
||||
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
@@ -593,12 +658,16 @@ export async function fromPostgres(
|
||||
? stmt.sql.substring(createTableIndex)
|
||||
: stmt.sql;
|
||||
|
||||
// Updated regex to properly handle quoted identifiers with special characters
|
||||
// Matches: schema.table, "schema"."table", "schema".table, schema."table"
|
||||
const tableMatch = sqlFromCreate.match(
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?["'`]?([^"'`\s.(]+)["'`]?/i
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const schemaName = tableMatch[1] || 'public';
|
||||
const tableName = tableMatch[2];
|
||||
// Extract schema and table names from the appropriate match groups
|
||||
// Groups: 1=quoted schema, 2=unquoted schema, 3=quoted table, 4=unquoted table
|
||||
const schemaName = tableMatch[1] || tableMatch[2] || 'public';
|
||||
const tableName = tableMatch[3] || tableMatch[4];
|
||||
const tableKey = `${schemaName}.${tableName}`;
|
||||
tableMap[tableKey] = generateId();
|
||||
}
|
||||
@@ -790,16 +859,6 @@ export async function fromPostgres(
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
}
|
||||
|
||||
// Check if it's an array type
|
||||
let isArrayType = false;
|
||||
if (normalizedBaseType.endsWith('[]')) {
|
||||
isArrayType = true;
|
||||
normalizedBaseType = normalizedBaseType.slice(
|
||||
0,
|
||||
-2
|
||||
);
|
||||
}
|
||||
|
||||
// Now handle parameters - but skip for integer types that shouldn't have them
|
||||
let finalDataType = normalizedBaseType;
|
||||
|
||||
@@ -892,7 +951,6 @@ export async function fromPostgres(
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
array: isArrayType,
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
@@ -957,12 +1015,16 @@ export async function fromPostgres(
|
||||
? stmt.sql.substring(createTableIndex)
|
||||
: stmt.sql;
|
||||
|
||||
// Updated regex to properly handle quoted identifiers with special characters
|
||||
// Matches: schema.table, "schema"."table", "schema".table, schema."table"
|
||||
const tableMatch = sqlFromCreate.match(
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?["'`]?([^"'`\s.(]+)["'`]?/i
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))/i
|
||||
);
|
||||
if (tableMatch) {
|
||||
const schemaName = tableMatch[1] || 'public';
|
||||
const tableName = tableMatch[2];
|
||||
// Extract schema and table names from the appropriate match groups
|
||||
// Groups: 1=quoted schema, 2=unquoted schema, 3=quoted table, 4=unquoted table
|
||||
const schemaName = tableMatch[1] || tableMatch[2] || 'public';
|
||||
const tableName = tableMatch[3] || tableMatch[4];
|
||||
const tableKey = `${schemaName}.${tableName}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
@@ -1001,7 +1063,7 @@ export async function fromPostgres(
|
||||
}
|
||||
}
|
||||
|
||||
// Fourth pass: process ALTER TABLE statements for foreign keys
|
||||
// Fourth pass: process ALTER TABLE statements for foreign keys and ADD COLUMN
|
||||
for (const stmt of statements) {
|
||||
if (stmt.type === 'alter' && stmt.parsed) {
|
||||
const alterTableStmt = stmt.parsed as AlterTableStatement;
|
||||
@@ -1031,13 +1093,440 @@ export async function fromPostgres(
|
||||
);
|
||||
if (!table) continue;
|
||||
|
||||
// Process foreign key constraints in ALTER TABLE
|
||||
// Process ALTER TABLE expressions
|
||||
if (alterTableStmt.expr && Array.isArray(alterTableStmt.expr)) {
|
||||
alterTableStmt.expr.forEach((expr: AlterTableExprItem) => {
|
||||
if (expr.action === 'add' && expr.create_definitions) {
|
||||
// Handle ALTER COLUMN TYPE
|
||||
if (expr.action === 'alter' && expr.resource === 'column') {
|
||||
// Extract column name
|
||||
let columnName: string | undefined;
|
||||
if (
|
||||
typeof expr.column === 'object' &&
|
||||
'column' in expr.column
|
||||
) {
|
||||
const innerColumn = expr.column.column;
|
||||
if (
|
||||
typeof innerColumn === 'object' &&
|
||||
'expr' in innerColumn &&
|
||||
innerColumn.expr?.value
|
||||
) {
|
||||
columnName = innerColumn.expr.value;
|
||||
} else if (typeof innerColumn === 'string') {
|
||||
columnName = innerColumn;
|
||||
}
|
||||
} else if (typeof expr.column === 'string') {
|
||||
columnName = expr.column;
|
||||
}
|
||||
|
||||
// Check if it's a TYPE change
|
||||
if (
|
||||
columnName &&
|
||||
expr.type === 'alter' &&
|
||||
expr.definition?.dataType
|
||||
) {
|
||||
// Find the column in the table and update its type
|
||||
const column = table.columns.find(
|
||||
(col) => (col as SQLColumn).name === columnName
|
||||
);
|
||||
if (column) {
|
||||
const definition = expr.definition;
|
||||
const rawDataType = String(definition.dataType);
|
||||
|
||||
// console.log('ALTER TYPE expr:', JSON.stringify(expr, null, 2));
|
||||
|
||||
// Normalize the type
|
||||
let normalizedType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Handle type parameters
|
||||
if (
|
||||
definition.scale !== undefined &&
|
||||
definition.scale !== null
|
||||
) {
|
||||
// For NUMERIC/DECIMAL with scale, length is actually precision
|
||||
const precision =
|
||||
definition.length ||
|
||||
definition.precision;
|
||||
normalizedType = `${normalizedType}(${precision},${definition.scale})`;
|
||||
} else if (
|
||||
definition.length !== undefined &&
|
||||
definition.length !== null
|
||||
) {
|
||||
normalizedType = `${normalizedType}(${definition.length})`;
|
||||
} else if (definition.precision !== undefined) {
|
||||
normalizedType = `${normalizedType}(${definition.precision})`;
|
||||
} else if (
|
||||
definition.suffix &&
|
||||
Array.isArray(definition.suffix) &&
|
||||
definition.suffix.length > 0
|
||||
) {
|
||||
const params = definition.suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(s.value);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
normalizedType = `${normalizedType}(${params})`;
|
||||
}
|
||||
|
||||
// Update the column type
|
||||
(column as SQLColumn).type = normalizedType;
|
||||
|
||||
// Update typeArgs if applicable
|
||||
if (
|
||||
definition.scale !== undefined &&
|
||||
definition.scale !== null
|
||||
) {
|
||||
// For NUMERIC/DECIMAL with scale
|
||||
const precision =
|
||||
definition.length ||
|
||||
definition.precision;
|
||||
(column as SQLColumn).typeArgs = {
|
||||
precision: precision,
|
||||
scale: definition.scale,
|
||||
};
|
||||
} else if (definition.length) {
|
||||
(column as SQLColumn).typeArgs = {
|
||||
length: definition.length,
|
||||
};
|
||||
} else if (definition.precision) {
|
||||
(column as SQLColumn).typeArgs = {
|
||||
precision: definition.precision,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle ADD COLUMN
|
||||
} else if (
|
||||
expr.action === 'add' &&
|
||||
expr.resource === 'column'
|
||||
) {
|
||||
// Handle ADD COLUMN directly from expr structure
|
||||
// Extract column name from the nested structure
|
||||
let columnName: string | undefined;
|
||||
if (
|
||||
typeof expr.column === 'object' &&
|
||||
'column' in expr.column
|
||||
) {
|
||||
const innerColumn = expr.column.column;
|
||||
if (
|
||||
typeof innerColumn === 'object' &&
|
||||
'expr' in innerColumn &&
|
||||
innerColumn.expr?.value
|
||||
) {
|
||||
columnName = innerColumn.expr.value;
|
||||
} else if (typeof innerColumn === 'string') {
|
||||
columnName = innerColumn;
|
||||
}
|
||||
} else if (typeof expr.column === 'string') {
|
||||
columnName = expr.column;
|
||||
}
|
||||
|
||||
if (columnName && typeof columnName === 'string') {
|
||||
const definition = expr.definition || {};
|
||||
const rawDataType = String(
|
||||
definition?.dataType || 'TEXT'
|
||||
);
|
||||
// console.log('expr:', JSON.stringify(expr, null, 2));
|
||||
|
||||
// Normalize the type
|
||||
let normalizedBaseType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Check if it's a serial type
|
||||
const upperType = rawDataType.toUpperCase();
|
||||
const isSerialType = [
|
||||
'SERIAL',
|
||||
'SERIAL2',
|
||||
'SERIAL4',
|
||||
'SERIAL8',
|
||||
'BIGSERIAL',
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
if (isSerialType) {
|
||||
const typeLength = definition?.length as
|
||||
| number
|
||||
| undefined;
|
||||
if (upperType === 'SERIAL') {
|
||||
if (typeLength === 2) {
|
||||
normalizedBaseType = 'SMALLINT';
|
||||
} else if (typeLength === 8) {
|
||||
normalizedBaseType = 'BIGINT';
|
||||
} else {
|
||||
normalizedBaseType = 'INTEGER';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type parameters
|
||||
let finalDataType = normalizedBaseType;
|
||||
const isNormalizedIntegerType =
|
||||
['INTEGER', 'BIGINT', 'SMALLINT'].includes(
|
||||
normalizedBaseType
|
||||
) &&
|
||||
(upperType === 'INT' || upperType === 'SERIAL');
|
||||
|
||||
if (!isSerialType && !isNormalizedIntegerType) {
|
||||
const precision = definition?.precision;
|
||||
const scale = definition?.scale;
|
||||
const length = definition?.length;
|
||||
const suffix =
|
||||
(definition?.suffix as unknown[]) || [];
|
||||
|
||||
if (suffix.length > 0) {
|
||||
const params = suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(
|
||||
(s as { value: unknown })
|
||||
.value
|
||||
);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
finalDataType = `${normalizedBaseType}(${params})`;
|
||||
} else if (precision !== undefined) {
|
||||
if (scale !== undefined) {
|
||||
finalDataType = `${normalizedBaseType}(${precision},${scale})`;
|
||||
} else {
|
||||
finalDataType = `${normalizedBaseType}(${precision})`;
|
||||
}
|
||||
} else if (
|
||||
length !== undefined &&
|
||||
length !== null
|
||||
) {
|
||||
finalDataType = `${normalizedBaseType}(${length})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for nullable constraint
|
||||
let nullable = true;
|
||||
if (isSerialType) {
|
||||
nullable = false;
|
||||
} else if (
|
||||
expr.nullable &&
|
||||
expr.nullable.type === 'not null'
|
||||
) {
|
||||
nullable = false;
|
||||
} else if (
|
||||
definition?.nullable &&
|
||||
definition.nullable.type === 'not null'
|
||||
) {
|
||||
nullable = false;
|
||||
}
|
||||
|
||||
// Check for unique constraint
|
||||
const isUnique =
|
||||
expr.unique === 'unique' ||
|
||||
definition?.unique === 'unique';
|
||||
|
||||
// Check for default value
|
||||
let defaultValue: string | undefined;
|
||||
const defaultVal =
|
||||
expr.default_val || definition?.default_val;
|
||||
if (defaultVal && !isSerialType) {
|
||||
// Create a temporary columnDef to use the getDefaultValueString function
|
||||
const tempColumnDef = {
|
||||
default_val: defaultVal,
|
||||
} as ColumnDefinition;
|
||||
defaultValue =
|
||||
getDefaultValueString(tempColumnDef);
|
||||
}
|
||||
|
||||
// Create the new column object
|
||||
const newColumn: SQLColumn = {
|
||||
name: columnName,
|
||||
type: finalDataType,
|
||||
nullable: nullable,
|
||||
primaryKey:
|
||||
definition?.primary_key === 'primary key' ||
|
||||
definition?.constraint === 'primary key' ||
|
||||
isSerialType,
|
||||
unique: isUnique,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isSerialType ||
|
||||
definition?.auto_increment ===
|
||||
'auto_increment' ||
|
||||
(stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('GENERATED') &&
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
};
|
||||
|
||||
// Add the column to the table if it doesn't already exist
|
||||
const tableColumns = table.columns as SQLColumn[];
|
||||
if (
|
||||
!tableColumns.some(
|
||||
(col) => col.name === columnName
|
||||
)
|
||||
) {
|
||||
tableColumns.push(newColumn);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
expr.action === 'add' &&
|
||||
expr.create_definitions
|
||||
) {
|
||||
const createDefs = expr.create_definitions;
|
||||
|
||||
if (
|
||||
// Check if it's adding a column (legacy structure)
|
||||
if (createDefs.resource === 'column') {
|
||||
const columnDef =
|
||||
createDefs as unknown as ColumnDefinition;
|
||||
const columnName = extractColumnName(
|
||||
columnDef.column
|
||||
);
|
||||
|
||||
if (columnName) {
|
||||
// Extract the column type and properties
|
||||
const definition =
|
||||
columnDef.definition as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
const rawDataType = String(
|
||||
definition?.dataType || 'TEXT'
|
||||
);
|
||||
|
||||
// Normalize the type
|
||||
let normalizedBaseType =
|
||||
normalizePostgreSQLType(rawDataType);
|
||||
|
||||
// Check if it's a serial type
|
||||
const upperType = rawDataType.toUpperCase();
|
||||
const isSerialType = [
|
||||
'SERIAL',
|
||||
'SERIAL2',
|
||||
'SERIAL4',
|
||||
'SERIAL8',
|
||||
'BIGSERIAL',
|
||||
'SMALLSERIAL',
|
||||
].includes(upperType.split('(')[0]);
|
||||
|
||||
if (isSerialType) {
|
||||
const typeLength = definition?.length as
|
||||
| number
|
||||
| undefined;
|
||||
if (upperType === 'SERIAL') {
|
||||
if (typeLength === 2) {
|
||||
normalizedBaseType = 'SMALLINT';
|
||||
} else if (typeLength === 8) {
|
||||
normalizedBaseType = 'BIGINT';
|
||||
} else {
|
||||
normalizedBaseType = 'INTEGER';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type parameters
|
||||
let finalDataType = normalizedBaseType;
|
||||
const isNormalizedIntegerType =
|
||||
['INTEGER', 'BIGINT', 'SMALLINT'].includes(
|
||||
normalizedBaseType
|
||||
) &&
|
||||
(upperType === 'INT' ||
|
||||
upperType === 'SERIAL');
|
||||
|
||||
if (!isSerialType && !isNormalizedIntegerType) {
|
||||
const precision =
|
||||
columnDef.definition?.precision;
|
||||
const scale = columnDef.definition?.scale;
|
||||
const length = columnDef.definition?.length;
|
||||
const suffix =
|
||||
(definition?.suffix as unknown[]) || [];
|
||||
|
||||
if (suffix.length > 0) {
|
||||
const params = suffix
|
||||
.map((s: unknown) => {
|
||||
if (
|
||||
typeof s === 'object' &&
|
||||
s !== null &&
|
||||
'value' in s
|
||||
) {
|
||||
return String(
|
||||
(
|
||||
s as {
|
||||
value: unknown;
|
||||
}
|
||||
).value
|
||||
);
|
||||
}
|
||||
return String(s);
|
||||
})
|
||||
.join(',');
|
||||
finalDataType = `${normalizedBaseType}(${params})`;
|
||||
} else if (precision !== undefined) {
|
||||
if (scale !== undefined) {
|
||||
finalDataType = `${normalizedBaseType}(${precision},${scale})`;
|
||||
} else {
|
||||
finalDataType = `${normalizedBaseType}(${precision})`;
|
||||
}
|
||||
} else if (
|
||||
length !== undefined &&
|
||||
length !== null
|
||||
) {
|
||||
finalDataType = `${normalizedBaseType}(${length})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Create the new column object
|
||||
const newColumn: SQLColumn = {
|
||||
name: columnName,
|
||||
type: finalDataType,
|
||||
nullable: isSerialType
|
||||
? false
|
||||
: columnDef.nullable?.type !==
|
||||
'not null',
|
||||
primaryKey:
|
||||
columnDef.primary_key ===
|
||||
'primary key' ||
|
||||
columnDef.definition?.constraint ===
|
||||
'primary key' ||
|
||||
isSerialType,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(columnDef.definition),
|
||||
default: isSerialType
|
||||
? undefined
|
||||
: getDefaultValueString(columnDef),
|
||||
increment:
|
||||
isSerialType ||
|
||||
columnDef.auto_increment ===
|
||||
'auto_increment' ||
|
||||
(stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('GENERATED') &&
|
||||
stmt.sql
|
||||
.toUpperCase()
|
||||
.includes('IDENTITY')),
|
||||
};
|
||||
|
||||
// Add the column to the table if it doesn't already exist
|
||||
const tableColumns2 =
|
||||
table.columns as SQLColumn[];
|
||||
if (
|
||||
!tableColumns2.some(
|
||||
(col) => col.name === columnName
|
||||
)
|
||||
) {
|
||||
tableColumns2.push(newColumn);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
createDefs.constraint_type === 'FOREIGN KEY' ||
|
||||
createDefs.constraint_type === 'foreign key'
|
||||
) {
|
||||
@@ -1148,19 +1637,188 @@ export async function fromPostgres(
|
||||
}
|
||||
} else if (stmt.type === 'alter' && !stmt.parsed) {
|
||||
// Handle ALTER TABLE statements that failed to parse
|
||||
|
||||
// First try to extract ALTER COLUMN TYPE statements
|
||||
const alterTypeMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ALTER\s+COLUMN\s+(?:"([^"]+)"|([^"\s]+))\s+TYPE\s+([\w_]+(?:\([^)]*\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (alterTypeMatch) {
|
||||
const schemaName =
|
||||
alterTypeMatch[1] || alterTypeMatch[2] || 'public';
|
||||
const tableName = alterTypeMatch[3] || alterTypeMatch[4];
|
||||
const columnName = alterTypeMatch[5] || alterTypeMatch[6];
|
||||
let columnType = alterTypeMatch[7];
|
||||
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
if (table && columnName) {
|
||||
const column = (table.columns as SQLColumn[]).find(
|
||||
(col) => col.name === columnName
|
||||
);
|
||||
if (column) {
|
||||
// Normalize and update the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
column.type = columnType;
|
||||
|
||||
// Extract and update typeArgs if present
|
||||
const typeMatch = columnType.match(
|
||||
/^(\w+)(?:\(([^)]+)\))?$/
|
||||
);
|
||||
if (typeMatch && typeMatch[2]) {
|
||||
const params = typeMatch[2]
|
||||
.split(',')
|
||||
.map((p) => p.trim());
|
||||
if (params.length === 1) {
|
||||
column.typeArgs = {
|
||||
length: parseInt(params[0]),
|
||||
};
|
||||
} else if (params.length === 2) {
|
||||
column.typeArgs = {
|
||||
precision: parseInt(params[0]),
|
||||
scale: parseInt(params[1]),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then try to extract ADD COLUMN statements
|
||||
const alterColumnMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ADD\s+COLUMN\s+(?:"([^"]+)"|([^"\s]+))\s+([\w_]+(?:\([^)]*\))?(?:\[\])?)/i
|
||||
);
|
||||
|
||||
if (alterColumnMatch) {
|
||||
const schemaName =
|
||||
alterColumnMatch[1] || alterColumnMatch[2] || 'public';
|
||||
const tableName = alterColumnMatch[3] || alterColumnMatch[4];
|
||||
const columnName = alterColumnMatch[5] || alterColumnMatch[6];
|
||||
let columnType = alterColumnMatch[7];
|
||||
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
if (table && columnName) {
|
||||
const tableColumns = table.columns as SQLColumn[];
|
||||
if (!tableColumns.some((col) => col.name === columnName)) {
|
||||
// Normalize the type
|
||||
columnType = normalizePostgreSQLType(columnType);
|
||||
|
||||
// Check for constraints in the statement
|
||||
const columnDefPart = stmt.sql.substring(
|
||||
stmt.sql.indexOf(columnName)
|
||||
);
|
||||
const isPrimary =
|
||||
columnDefPart.match(/PRIMARY\s+KEY/i) !== null;
|
||||
const isNotNull =
|
||||
columnDefPart.match(/NOT\s+NULL/i) !== null;
|
||||
const isUnique =
|
||||
columnDefPart.match(/\bUNIQUE\b/i) !== null;
|
||||
// Extract default value
|
||||
let defaultValue: string | undefined;
|
||||
// Updated regex to handle casting with :: operator
|
||||
const defaultMatch = columnDefPart.match(
|
||||
/DEFAULT\s+((?:'[^']*'|"[^"]*"|\S+)(?:::\w+)?)/i
|
||||
);
|
||||
if (defaultMatch) {
|
||||
let defVal = defaultMatch[1].trim();
|
||||
// Remove trailing comma or semicolon if present
|
||||
defVal = defVal.replace(/[,;]$/, '').trim();
|
||||
// Handle string literals
|
||||
if (
|
||||
defVal.startsWith("'") &&
|
||||
defVal.endsWith("'")
|
||||
) {
|
||||
// Keep the quotes for string literals
|
||||
defaultValue = defVal;
|
||||
} else if (defVal.match(/^\d+(\.\d+)?$/)) {
|
||||
// Numeric value
|
||||
defaultValue = defVal;
|
||||
} else if (
|
||||
defVal.toUpperCase() === 'TRUE' ||
|
||||
defVal.toUpperCase() === 'FALSE'
|
||||
) {
|
||||
// Boolean value
|
||||
defaultValue = defVal.toUpperCase();
|
||||
} else if (defVal.toUpperCase() === 'NULL') {
|
||||
// NULL value
|
||||
defaultValue = 'NULL';
|
||||
} else if (
|
||||
defVal.includes('(') &&
|
||||
defVal.includes(')')
|
||||
) {
|
||||
// Function call
|
||||
// Normalize PostgreSQL function names to uppercase
|
||||
const funcMatch = defVal.match(/^(\w+)\(/);
|
||||
if (funcMatch) {
|
||||
const funcName = funcMatch[1];
|
||||
const pgFunctions = [
|
||||
'now',
|
||||
'current_timestamp',
|
||||
'current_date',
|
||||
'current_time',
|
||||
'gen_random_uuid',
|
||||
'random',
|
||||
'nextval',
|
||||
'currval',
|
||||
];
|
||||
if (
|
||||
pgFunctions.includes(
|
||||
funcName.toLowerCase()
|
||||
)
|
||||
) {
|
||||
defaultValue = defVal.replace(
|
||||
funcName,
|
||||
funcName.toUpperCase()
|
||||
);
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
defaultValue = defVal;
|
||||
}
|
||||
} else {
|
||||
// Other expressions
|
||||
defaultValue = defVal;
|
||||
}
|
||||
}
|
||||
|
||||
tableColumns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull && !isPrimary,
|
||||
primaryKey: isPrimary,
|
||||
unique: isUnique || isPrimary,
|
||||
default: defaultValue,
|
||||
increment: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract foreign keys using regex as fallback
|
||||
// Updated regex to handle quoted identifiers properly
|
||||
const alterFKMatch = stmt.sql.match(
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:"?([^"\s.]+)"?\.)?["']?([^"'\s.(]+)["']?\s+ADD\s+CONSTRAINT\s+["']?([^"'\s]+)["']?\s+FOREIGN\s+KEY\s*\(["']?([^"'\s)]+)["']?\)\s+REFERENCES\s+(?:"?([^"\s.]+)"?\.)?["']?([^"'\s.(]+)["']?\s*\(["']?([^"'\s)]+)["']?\)/i
|
||||
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s+ADD\s+CONSTRAINT\s+(?:"([^"]+)"|([^"\s]+))\s+FOREIGN\s+KEY\s*\((?:"([^"]+)"|([^"\s)]+))\)\s+REFERENCES\s+(?:(?:"([^"]+)"|([^"\s.]+))\.)?(?:"([^"]+)"|([^"\s.(]+))\s*\((?:"([^"]+)"|([^"\s)]+))\)/i
|
||||
);
|
||||
|
||||
if (alterFKMatch) {
|
||||
const sourceSchema = alterFKMatch[1] || 'public';
|
||||
const sourceTable = alterFKMatch[2];
|
||||
const constraintName = alterFKMatch[3];
|
||||
const sourceColumn = alterFKMatch[4];
|
||||
const targetSchema = alterFKMatch[5] || 'public';
|
||||
const targetTable = alterFKMatch[6];
|
||||
const targetColumn = alterFKMatch[7];
|
||||
// Extract values from appropriate match groups
|
||||
const sourceSchema =
|
||||
alterFKMatch[1] || alterFKMatch[2] || 'public';
|
||||
const sourceTable = alterFKMatch[3] || alterFKMatch[4];
|
||||
const constraintName = alterFKMatch[5] || alterFKMatch[6];
|
||||
const sourceColumn = alterFKMatch[7] || alterFKMatch[8];
|
||||
const targetSchema =
|
||||
alterFKMatch[9] || alterFKMatch[10] || 'public';
|
||||
const targetTable = alterFKMatch[11] || alterFKMatch[12];
|
||||
const targetColumn = alterFKMatch[13] || alterFKMatch[14];
|
||||
|
||||
const sourceTableId = getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
@@ -1294,58 +1952,10 @@ export async function fromPostgres(
|
||||
function getDefaultValueString(
|
||||
columnDef: ColumnDefinition
|
||||
): string | undefined {
|
||||
let defVal = columnDef.default_val;
|
||||
|
||||
if (
|
||||
defVal &&
|
||||
typeof defVal === 'object' &&
|
||||
defVal.type === 'default' &&
|
||||
'value' in defVal
|
||||
) {
|
||||
defVal = defVal.value;
|
||||
}
|
||||
const defVal = columnDef.default_val;
|
||||
|
||||
if (defVal === undefined || defVal === null) return undefined;
|
||||
|
||||
let value: string | undefined;
|
||||
|
||||
switch (typeof defVal) {
|
||||
case 'string':
|
||||
value = defVal;
|
||||
break;
|
||||
case 'number':
|
||||
value = String(defVal);
|
||||
break;
|
||||
case 'boolean':
|
||||
value = defVal ? 'TRUE' : 'FALSE';
|
||||
break;
|
||||
case 'object':
|
||||
if ('value' in defVal && typeof defVal.value === 'string') {
|
||||
value = defVal.value;
|
||||
} else if ('raw' in defVal && typeof defVal.raw === 'string') {
|
||||
value = defVal.raw;
|
||||
} else if (defVal.type === 'bool') {
|
||||
value = defVal.value ? 'TRUE' : 'FALSE';
|
||||
} else if (defVal.type === 'function' && defVal.name) {
|
||||
const fnName = defVal.name;
|
||||
if (
|
||||
fnName &&
|
||||
typeof fnName === 'object' &&
|
||||
Array.isArray(fnName.name) &&
|
||||
fnName.name.length > 0 &&
|
||||
fnName.name[0].value
|
||||
) {
|
||||
value = fnName.name[0].value.toUpperCase();
|
||||
} else if (typeof fnName === 'string') {
|
||||
value = fnName.toUpperCase();
|
||||
} else {
|
||||
value = 'UNKNOWN_FUNCTION';
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
value = undefined;
|
||||
}
|
||||
|
||||
return value;
|
||||
// Use buildSQLFromAST to reconstruct the default value
|
||||
return buildSQLFromAST(defVal, DatabaseType.POSTGRESQL);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,178 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLite } from '../sqlite';
|
||||
|
||||
describe('SQLite Import Tests', () => {
|
||||
it('should parse SQLite script with sqlite_sequence table and all relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
age INTEGER
|
||||
);
|
||||
CREATE TABLE sqlite_sequence(name,seq);
|
||||
CREATE TABLE products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
price REAL
|
||||
);
|
||||
CREATE TABLE user_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
product_id INTEGER NOT NULL,
|
||||
purchased_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLite(sql);
|
||||
|
||||
// ============= CHECK TOTAL COUNTS =============
|
||||
// Should have exactly 4 tables
|
||||
expect(result.tables).toHaveLength(4);
|
||||
|
||||
// Should have exactly 2 foreign key relationships
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// ============= CHECK USERS TABLE =============
|
||||
const usersTable = result.tables.find((t) => t.name === 'users');
|
||||
expect(usersTable).toBeDefined();
|
||||
expect(usersTable?.columns).toHaveLength(3); // id, name, age
|
||||
|
||||
// Check each column in users table
|
||||
expect(usersTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(usersTable?.columns[1]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(usersTable?.columns[2]).toMatchObject({
|
||||
name: 'age',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK SQLITE_SEQUENCE TABLE =============
|
||||
const sqliteSequenceTable = result.tables.find(
|
||||
(t) => t.name === 'sqlite_sequence'
|
||||
);
|
||||
expect(sqliteSequenceTable).toBeDefined();
|
||||
expect(sqliteSequenceTable?.columns).toHaveLength(2); // name, seq
|
||||
|
||||
// Check columns in sqlite_sequence table
|
||||
expect(sqliteSequenceTable?.columns[0]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT', // Should default to TEXT when no type specified
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(sqliteSequenceTable?.columns[1]).toMatchObject({
|
||||
name: 'seq',
|
||||
type: 'TEXT', // Should default to TEXT when no type specified
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK PRODUCTS TABLE =============
|
||||
const productsTable = result.tables.find((t) => t.name === 'products');
|
||||
expect(productsTable).toBeDefined();
|
||||
expect(productsTable?.columns).toHaveLength(3); // id, name, price
|
||||
|
||||
// Check each column in products table
|
||||
expect(productsTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(productsTable?.columns[1]).toMatchObject({
|
||||
name: 'name',
|
||||
type: 'TEXT',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
expect(productsTable?.columns[2]).toMatchObject({
|
||||
name: 'price',
|
||||
type: 'REAL',
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
});
|
||||
|
||||
// ============= CHECK USER_PRODUCTS TABLE =============
|
||||
const userProductsTable = result.tables.find(
|
||||
(t) => t.name === 'user_products'
|
||||
);
|
||||
expect(userProductsTable).toBeDefined();
|
||||
expect(userProductsTable?.columns).toHaveLength(4); // id, user_id, product_id, purchased_at
|
||||
|
||||
// Check each column in user_products table
|
||||
expect(userProductsTable?.columns[0]).toMatchObject({
|
||||
name: 'id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: true,
|
||||
increment: true,
|
||||
nullable: false,
|
||||
});
|
||||
expect(userProductsTable?.columns[1]).toMatchObject({
|
||||
name: 'user_id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: false, // NOT NULL constraint
|
||||
});
|
||||
expect(userProductsTable?.columns[2]).toMatchObject({
|
||||
name: 'product_id',
|
||||
type: 'INTEGER',
|
||||
primaryKey: false,
|
||||
nullable: false, // NOT NULL constraint
|
||||
});
|
||||
expect(userProductsTable?.columns[3]).toMatchObject({
|
||||
name: 'purchased_at',
|
||||
type: 'TIMESTAMP', // DATETIME should map to TIMESTAMP
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
default: 'CURRENT_TIMESTAMP',
|
||||
});
|
||||
|
||||
// ============= CHECK FOREIGN KEY RELATIONSHIPS =============
|
||||
// FK 1: user_products.user_id -> users.id
|
||||
const userIdFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'user_products' &&
|
||||
r.sourceColumn === 'user_id' &&
|
||||
r.targetTable === 'users' &&
|
||||
r.targetColumn === 'id'
|
||||
);
|
||||
expect(userIdFK).toBeDefined();
|
||||
expect(userIdFK).toMatchObject({
|
||||
sourceTable: 'user_products',
|
||||
sourceColumn: 'user_id',
|
||||
targetTable: 'users',
|
||||
targetColumn: 'id',
|
||||
});
|
||||
|
||||
// FK 2: user_products.product_id -> products.id
|
||||
const productIdFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'user_products' &&
|
||||
r.sourceColumn === 'product_id' &&
|
||||
r.targetTable === 'products' &&
|
||||
r.targetColumn === 'id'
|
||||
);
|
||||
expect(productIdFK).toBeDefined();
|
||||
expect(productIdFK).toMatchObject({
|
||||
sourceTable: 'user_products',
|
||||
sourceColumn: 'product_id',
|
||||
targetTable: 'products',
|
||||
targetColumn: 'id',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -32,11 +32,11 @@ export async function fromSQLite(sqlContent: string): Promise<SQLParserResult> {
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// SPECIAL HANDLING: Direct line-by-line parser for SQLite DDL
|
||||
// This ensures we preserve the exact data types from the original DDL
|
||||
// SPECIAL HANDLING: Direct regex-based parser for SQLite DDL
|
||||
// This ensures we handle all SQLite-specific syntax including tables without types
|
||||
const directlyParsedTables = parseCreateTableStatements(sqlContent);
|
||||
|
||||
// Check if we successfully parsed tables directly
|
||||
// Always try direct parsing first as it's more reliable for SQLite
|
||||
if (directlyParsedTables.length > 0) {
|
||||
// Map the direct parsing results to the expected SQLParserResult format
|
||||
directlyParsedTables.forEach((table) => {
|
||||
@@ -56,8 +56,19 @@ export async function fromSQLite(sqlContent: string): Promise<SQLParserResult> {
|
||||
// Process foreign keys using the regex approach
|
||||
findForeignKeysUsingRegex(sqlContent, tableMap, relationships);
|
||||
|
||||
// Return the result
|
||||
return { tables, relationships };
|
||||
// Create placeholder tables for any missing referenced tables
|
||||
addPlaceholderTablesForFKReferences(
|
||||
tables,
|
||||
relationships,
|
||||
tableMap
|
||||
);
|
||||
|
||||
// Filter out any invalid relationships
|
||||
const validRelationships = relationships.filter((rel) => {
|
||||
return isValidForeignKeyRelationship(rel, tables);
|
||||
});
|
||||
|
||||
return { tables, relationships: validRelationships };
|
||||
}
|
||||
|
||||
// Preprocess SQL to handle SQLite quoted identifiers
|
||||
@@ -130,101 +141,182 @@ function parseCreateTableStatements(sqlContent: string): {
|
||||
columns: SQLColumn[];
|
||||
}[] = [];
|
||||
|
||||
// Split SQL content into lines
|
||||
const lines = sqlContent.split('\n');
|
||||
|
||||
let currentTable: { name: string; columns: SQLColumn[] } | null = null;
|
||||
let inCreateTable = false;
|
||||
|
||||
// Process each line
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
|
||||
// Skip empty lines and comments
|
||||
if (!line || line.startsWith('--')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for CREATE TABLE statement
|
||||
if (line.toUpperCase().startsWith('CREATE TABLE')) {
|
||||
// Extract table name
|
||||
const tableNameMatch =
|
||||
/CREATE\s+TABLE\s+(?:if\s+not\s+exists\s+)?["'`]?(\w+)["'`]?/i.exec(
|
||||
line
|
||||
);
|
||||
if (tableNameMatch && tableNameMatch[1]) {
|
||||
inCreateTable = true;
|
||||
currentTable = {
|
||||
name: tableNameMatch[1],
|
||||
columns: [],
|
||||
};
|
||||
// Remove comments before processing
|
||||
const cleanedSQL = sqlContent
|
||||
.split('\n')
|
||||
.map((line) => {
|
||||
const commentIndex = line.indexOf('--');
|
||||
if (commentIndex >= 0) {
|
||||
return line.substring(0, commentIndex);
|
||||
}
|
||||
}
|
||||
// Check for end of CREATE TABLE statement
|
||||
else if (inCreateTable && line.includes(');')) {
|
||||
if (currentTable) {
|
||||
tables.push(currentTable);
|
||||
return line;
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
// Match all CREATE TABLE statements including those without column definitions
|
||||
const createTableRegex =
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["'`]?(\w+)["'`]?\s*\(([^;]+?)\)\s*;/gis;
|
||||
let match;
|
||||
|
||||
while ((match = createTableRegex.exec(cleanedSQL)) !== null) {
|
||||
const tableName = match[1];
|
||||
const tableBody = match[2].trim();
|
||||
|
||||
const table: { name: string; columns: SQLColumn[] } = {
|
||||
name: tableName,
|
||||
columns: [],
|
||||
};
|
||||
|
||||
// Special case: sqlite_sequence or tables with columns but no types
|
||||
if (tableName === 'sqlite_sequence' || !tableBody.includes(' ')) {
|
||||
// Parse simple column list without types (e.g., "name,seq")
|
||||
const simpleColumns = tableBody.split(',').map((col) => col.trim());
|
||||
for (const colName of simpleColumns) {
|
||||
if (
|
||||
colName &&
|
||||
!colName.toUpperCase().startsWith('FOREIGN KEY') &&
|
||||
!colName.toUpperCase().startsWith('PRIMARY KEY') &&
|
||||
!colName.toUpperCase().startsWith('UNIQUE') &&
|
||||
!colName.toUpperCase().startsWith('CHECK') &&
|
||||
!colName.toUpperCase().startsWith('CONSTRAINT')
|
||||
) {
|
||||
table.columns.push({
|
||||
name: colName.replace(/["'`]/g, ''),
|
||||
type: 'TEXT', // Default to TEXT for untyped columns
|
||||
nullable: true,
|
||||
primaryKey: false,
|
||||
unique: false,
|
||||
default: '',
|
||||
increment: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
inCreateTable = false;
|
||||
currentTable = null;
|
||||
}
|
||||
// Process column definitions inside CREATE TABLE
|
||||
else if (inCreateTable && currentTable && line.includes('"')) {
|
||||
// Column line pattern optimized for user's DDL format
|
||||
const columnPattern = /\s*["'`](\w+)["'`]\s+([A-Za-z0-9_]+)(.+)?/i;
|
||||
const match = columnPattern.exec(line);
|
||||
} else {
|
||||
// Parse normal table with typed columns
|
||||
// Split by commas not inside parentheses
|
||||
const columnDefs = [];
|
||||
let current = '';
|
||||
let parenDepth = 0;
|
||||
|
||||
if (match) {
|
||||
const columnName = match[1];
|
||||
const rawType = match[2].toUpperCase();
|
||||
const restOfLine = match[3] || '';
|
||||
for (let i = 0; i < tableBody.length; i++) {
|
||||
const char = tableBody[i];
|
||||
if (char === '(') parenDepth++;
|
||||
else if (char === ')') parenDepth--;
|
||||
else if (char === ',' && parenDepth === 0) {
|
||||
columnDefs.push(current.trim());
|
||||
current = '';
|
||||
continue;
|
||||
}
|
||||
current += char;
|
||||
}
|
||||
if (current.trim()) {
|
||||
columnDefs.push(current.trim());
|
||||
}
|
||||
|
||||
// Determine column properties
|
||||
const isPrimaryKey = restOfLine
|
||||
.toUpperCase()
|
||||
.includes('PRIMARY KEY');
|
||||
const isNotNull = restOfLine.toUpperCase().includes('NOT NULL');
|
||||
const isUnique = restOfLine.toUpperCase().includes('UNIQUE');
|
||||
for (const columnDef of columnDefs) {
|
||||
const line = columnDef.trim();
|
||||
|
||||
// Extract default value
|
||||
let defaultValue = '';
|
||||
const defaultMatch = /DEFAULT\s+([^,\s)]+)/i.exec(restOfLine);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1];
|
||||
// Skip constraints
|
||||
if (
|
||||
line.toUpperCase().startsWith('FOREIGN KEY') ||
|
||||
line.toUpperCase().startsWith('PRIMARY KEY') ||
|
||||
line.toUpperCase().startsWith('UNIQUE') ||
|
||||
line.toUpperCase().startsWith('CHECK') ||
|
||||
line.toUpperCase().startsWith('CONSTRAINT')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Map to appropriate SQLite storage class
|
||||
let columnType = rawType;
|
||||
if (rawType === 'INTEGER' || rawType === 'INT') {
|
||||
columnType = 'INTEGER';
|
||||
} else if (
|
||||
['REAL', 'FLOAT', 'DOUBLE', 'NUMERIC', 'DECIMAL'].includes(
|
||||
rawType
|
||||
)
|
||||
) {
|
||||
columnType = 'REAL';
|
||||
} else if (rawType === 'BLOB' || rawType === 'BINARY') {
|
||||
columnType = 'BLOB';
|
||||
} else if (
|
||||
['TIMESTAMP', 'DATETIME', 'DATE'].includes(rawType)
|
||||
) {
|
||||
columnType = 'TIMESTAMP';
|
||||
} else {
|
||||
columnType = 'TEXT';
|
||||
}
|
||||
// Parse column: handle both quoted and unquoted identifiers
|
||||
// Pattern: [quotes]columnName[quotes] dataType [constraints]
|
||||
const columnPattern = /^["'`]?([\w]+)["'`]?\s+(\w+)(.*)$/i;
|
||||
const columnMatch = columnPattern.exec(line);
|
||||
|
||||
// Add column to the table
|
||||
currentTable.columns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique || isPrimaryKey,
|
||||
default: defaultValue,
|
||||
increment: isPrimaryKey && columnType === 'INTEGER',
|
||||
});
|
||||
if (columnMatch) {
|
||||
const columnName = columnMatch[1];
|
||||
const rawType = columnMatch[2].toUpperCase();
|
||||
const restOfLine = columnMatch[3] || '';
|
||||
const upperRest = restOfLine.toUpperCase();
|
||||
|
||||
// Determine column properties
|
||||
const isPrimaryKey = upperRest.includes('PRIMARY KEY');
|
||||
const isAutoIncrement = upperRest.includes('AUTOINCREMENT');
|
||||
const isNotNull =
|
||||
upperRest.includes('NOT NULL') || isPrimaryKey;
|
||||
const isUnique =
|
||||
upperRest.includes('UNIQUE') || isPrimaryKey;
|
||||
|
||||
// Extract default value
|
||||
let defaultValue = '';
|
||||
const defaultMatch = /DEFAULT\s+([^,)]+)/i.exec(restOfLine);
|
||||
if (defaultMatch) {
|
||||
defaultValue = defaultMatch[1].trim();
|
||||
// Remove quotes if present
|
||||
if (
|
||||
(defaultValue.startsWith("'") &&
|
||||
defaultValue.endsWith("'")) ||
|
||||
(defaultValue.startsWith('"') &&
|
||||
defaultValue.endsWith('"'))
|
||||
) {
|
||||
defaultValue = defaultValue.slice(1, -1);
|
||||
}
|
||||
}
|
||||
|
||||
// Map to appropriate SQLite storage class
|
||||
let columnType = rawType;
|
||||
if (rawType === 'INTEGER' || rawType === 'INT') {
|
||||
columnType = 'INTEGER';
|
||||
} else if (
|
||||
[
|
||||
'REAL',
|
||||
'FLOAT',
|
||||
'DOUBLE',
|
||||
'NUMERIC',
|
||||
'DECIMAL',
|
||||
].includes(rawType)
|
||||
) {
|
||||
columnType = 'REAL';
|
||||
} else if (rawType === 'BLOB' || rawType === 'BINARY') {
|
||||
columnType = 'BLOB';
|
||||
} else if (
|
||||
['TIMESTAMP', 'DATETIME', 'DATE', 'TIME'].includes(
|
||||
rawType
|
||||
)
|
||||
) {
|
||||
columnType = 'TIMESTAMP';
|
||||
} else if (
|
||||
['TEXT', 'VARCHAR', 'CHAR', 'CLOB', 'STRING'].includes(
|
||||
rawType
|
||||
) ||
|
||||
rawType.startsWith('VARCHAR') ||
|
||||
rawType.startsWith('CHAR')
|
||||
) {
|
||||
columnType = 'TEXT';
|
||||
} else {
|
||||
// Default to TEXT for unknown types
|
||||
columnType = 'TEXT';
|
||||
}
|
||||
|
||||
// Add column to the table
|
||||
table.columns.push({
|
||||
name: columnName,
|
||||
type: columnType,
|
||||
nullable: !isNotNull,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isPrimaryKey &&
|
||||
isAutoIncrement &&
|
||||
columnType === 'INTEGER',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (table.columns.length > 0 || tableName === 'sqlite_sequence') {
|
||||
tables.push(table);
|
||||
}
|
||||
}
|
||||
|
||||
return tables;
|
||||
|
||||
@@ -0,0 +1,252 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Default Value Import', () => {
|
||||
describe('String Default Values', () => {
|
||||
it('should parse simple string defaults with single quotes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE kingdom_citizens (
|
||||
citizen_id INT NOT NULL,
|
||||
allegiance NVARCHAR(50) DEFAULT 'neutral',
|
||||
PRIMARY KEY (citizen_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const allegianceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'allegiance'
|
||||
);
|
||||
expect(allegianceColumn?.default).toBe("'neutral'");
|
||||
});
|
||||
|
||||
it('should parse string defaults with Unicode prefix', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_scrolls (
|
||||
scroll_id INT NOT NULL,
|
||||
runic_inscription NVARCHAR(255) DEFAULT N'Ancient wisdom',
|
||||
prophecy NVARCHAR(MAX) DEFAULT N'The chosen one shall rise',
|
||||
PRIMARY KEY (scroll_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const runicColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'runic_inscription'
|
||||
);
|
||||
expect(runicColumn?.default).toBe("N'Ancient wisdom'");
|
||||
const prophecyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'prophecy'
|
||||
);
|
||||
expect(prophecyColumn?.default).toBe(
|
||||
"N'The chosen one shall rise'"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric Default Values', () => {
|
||||
it('should parse integer defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE castle_treasury (
|
||||
treasury_id INT NOT NULL,
|
||||
gold_count INT DEFAULT 0,
|
||||
max_capacity BIGINT DEFAULT 100000,
|
||||
guard_posts SMALLINT DEFAULT 5,
|
||||
PRIMARY KEY (treasury_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const goldColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'gold_count'
|
||||
);
|
||||
expect(goldColumn?.default).toBe('0');
|
||||
const capacityColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'max_capacity'
|
||||
);
|
||||
expect(capacityColumn?.default).toBe('100000');
|
||||
const guardColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'guard_posts'
|
||||
);
|
||||
expect(guardColumn?.default).toBe('5');
|
||||
});
|
||||
|
||||
it('should parse decimal defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE blacksmith_shop (
|
||||
item_id INT NOT NULL,
|
||||
weapon_price DECIMAL(10, 2) DEFAULT 99.99,
|
||||
guild_discount FLOAT DEFAULT 0.15,
|
||||
enchantment_tax NUMERIC(5, 4) DEFAULT 0.0825,
|
||||
PRIMARY KEY (item_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const priceColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'weapon_price'
|
||||
);
|
||||
expect(priceColumn?.default).toBe('99.99');
|
||||
const discountColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'guild_discount'
|
||||
);
|
||||
expect(discountColumn?.default).toBe('0.15');
|
||||
const taxColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'enchantment_tax'
|
||||
);
|
||||
expect(taxColumn?.default).toBe('0.0825');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean Default Values', () => {
|
||||
it('should parse BIT defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magic_barriers (
|
||||
barrier_id INT NOT NULL,
|
||||
is_active BIT DEFAULT 1,
|
||||
is_breached BIT DEFAULT 0,
|
||||
PRIMARY KEY (barrier_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const activeColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_active'
|
||||
);
|
||||
expect(activeColumn?.default).toBe('1');
|
||||
const breachedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'is_breached'
|
||||
);
|
||||
expect(breachedColumn?.default).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Date and Time Default Values', () => {
|
||||
it('should parse date/time function defaults', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE battle_logs (
|
||||
battle_id INT NOT NULL,
|
||||
battle_started DATETIME DEFAULT GETDATE(),
|
||||
last_action DATETIME2 DEFAULT SYSDATETIME(),
|
||||
battle_date DATE DEFAULT GETDATE(),
|
||||
PRIMARY KEY (battle_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const startedColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'battle_started'
|
||||
);
|
||||
expect(startedColumn?.default).toBe('GETDATE()');
|
||||
const actionColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'last_action'
|
||||
);
|
||||
expect(actionColumn?.default).toBe('SYSDATETIME()');
|
||||
const dateColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'battle_date'
|
||||
);
|
||||
expect(dateColumn?.default).toBe('GETDATE()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('IDENTITY columns', () => {
|
||||
it('should handle IDENTITY columns correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE legendary_weapons (
|
||||
weapon_id INT IDENTITY(1,1) NOT NULL,
|
||||
legacy_id BIGINT IDENTITY(100,10) NOT NULL,
|
||||
weapon_name NVARCHAR(100),
|
||||
PRIMARY KEY (weapon_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const weaponColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'weapon_id'
|
||||
);
|
||||
expect(weaponColumn?.increment).toBe(true);
|
||||
const legacyColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'legacy_id'
|
||||
);
|
||||
expect(legacyColumn?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Real-World Example with Schema', () => {
|
||||
it('should handle complex table with schema and multiple default types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[QuestContracts] (
|
||||
[ContractID] INT IDENTITY(1,1) NOT NULL,
|
||||
[AdventurerID] INT NOT NULL,
|
||||
[QuestDate] DATETIME DEFAULT GETDATE(),
|
||||
[QuestStatus] NVARCHAR(20) DEFAULT N'Available',
|
||||
[RewardAmount] DECIMAL(10, 2) DEFAULT 0.00,
|
||||
[IsCompleted] BIT DEFAULT 0,
|
||||
[CompletedDate] DATETIME NULL,
|
||||
[QuestNotes] NVARCHAR(MAX) DEFAULT NULL,
|
||||
[DifficultyLevel] INT DEFAULT 5,
|
||||
[QuestGuid] UNIQUEIDENTIFIER DEFAULT NEWID(),
|
||||
PRIMARY KEY ([ContractID])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
const table = result.tables[0];
|
||||
expect(table).toBeDefined();
|
||||
expect(table.schema).toBe('dbo');
|
||||
|
||||
// Check various default values
|
||||
const questDateColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestDate'
|
||||
);
|
||||
expect(questDateColumn?.default).toBe('GETDATE()');
|
||||
|
||||
const statusColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestStatus'
|
||||
);
|
||||
expect(statusColumn?.default).toBe("N'Available'");
|
||||
|
||||
const rewardColumn = table.columns.find(
|
||||
(c) => c.name === 'RewardAmount'
|
||||
);
|
||||
expect(rewardColumn?.default).toBe('0.00');
|
||||
|
||||
const completedColumn = table.columns.find(
|
||||
(c) => c.name === 'IsCompleted'
|
||||
);
|
||||
expect(completedColumn?.default).toBe('0');
|
||||
|
||||
const difficultyColumn = table.columns.find(
|
||||
(c) => c.name === 'DifficultyLevel'
|
||||
);
|
||||
expect(difficultyColumn?.default).toBe('5');
|
||||
|
||||
const guidColumn = table.columns.find(
|
||||
(c) => c.name === 'QuestGuid'
|
||||
);
|
||||
expect(guidColumn?.default).toBe('NEWID()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Expressions in defaults', () => {
|
||||
it('should handle parentheses in default expressions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_calculations (
|
||||
calculation_id INT NOT NULL,
|
||||
base_damage INT DEFAULT (10 + 5),
|
||||
total_power DECIMAL(10,2) DEFAULT ((100.0 * 0.15) + 10),
|
||||
PRIMARY KEY (calculation_id)
|
||||
);
|
||||
`;
|
||||
const result = await fromSQLServer(sql);
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const damageColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'base_damage'
|
||||
);
|
||||
expect(damageColumn?.default).toBe('(10 + 5)');
|
||||
const powerColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'total_power'
|
||||
);
|
||||
expect(powerColumn?.default).toBe('((100.0 * 0.15) + 10)');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Complex Fantasy Case', () => {
|
||||
it('should parse complex SQL with SpellDefinition and SpellComponent tables', async () => {
|
||||
// Complex SQL with same structure as user's case but fantasy-themed
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
console.log('Testing complex fantasy SQL...');
|
||||
console.log(
|
||||
'Number of CREATE TABLE statements:',
|
||||
(sql.match(/CREATE\s+TABLE/gi) || []).length
|
||||
);
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
console.log(
|
||||
'Result tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Result relationships:', result.relationships.length);
|
||||
|
||||
// Debug: Show actual relationships
|
||||
if (result.relationships.length === 0) {
|
||||
console.log('WARNING: No relationships found!');
|
||||
} else {
|
||||
console.log('Relationships found:');
|
||||
result.relationships.forEach((r) => {
|
||||
console.log(
|
||||
` ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Should create TWO tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const spellDef = result.tables.find(
|
||||
(t) => t.name === 'SpellDefinition'
|
||||
);
|
||||
expect(spellDef).toBeDefined();
|
||||
expect(spellDef?.schema).toBe('DBO');
|
||||
expect(spellDef?.columns).toHaveLength(6);
|
||||
|
||||
// Check second table
|
||||
const spellComp = result.tables.find(
|
||||
(t) => t.name === 'SpellComponent'
|
||||
);
|
||||
expect(spellComp).toBeDefined();
|
||||
expect(spellComp?.schema).toBe('DBO');
|
||||
expect(spellComp?.columns).toHaveLength(6);
|
||||
|
||||
// Check foreign key relationships (should have at least 2)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Check FK from SpellDefinition to SpellComponent
|
||||
const fkDefToComp = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellDefinition' &&
|
||||
r.targetTable === 'SpellComponent' &&
|
||||
r.sourceColumn === 'itscomponentrel'
|
||||
);
|
||||
expect(fkDefToComp).toBeDefined();
|
||||
expect(fkDefToComp?.targetColumn).toBe('SPELLID');
|
||||
|
||||
// Check self-referential FK in SpellComponent
|
||||
const selfRefFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellComponent' &&
|
||||
r.targetTable === 'SpellComponent' &&
|
||||
r.sourceColumn === 'itsparentcomp'
|
||||
);
|
||||
expect(selfRefFK).toBeDefined();
|
||||
expect(selfRefFK?.targetColumn).toBe('SPELLID');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,102 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { sqlImportToDiagram } from '../../../index';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('SQL Server Full Import Flow', () => {
|
||||
it('should create relationships when importing through the full flow', async () => {
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
// Test the full import flow as the application uses it
|
||||
const diagram = await sqlImportToDiagram({
|
||||
sqlContent: sql,
|
||||
sourceDatabaseType: DatabaseType.SQL_SERVER,
|
||||
targetDatabaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
// Verify tables
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
const tableNames = diagram.tables?.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['SpellComponent', 'SpellDefinition']);
|
||||
|
||||
// Verify relationships are created in the diagram
|
||||
expect(diagram.relationships).toBeDefined();
|
||||
expect(diagram.relationships?.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Check specific relationships
|
||||
const fk1 = diagram.relationships?.find(
|
||||
(r) =>
|
||||
r.sourceFieldId &&
|
||||
r.targetFieldId && // Must have field IDs
|
||||
diagram.tables?.some(
|
||||
(t) =>
|
||||
t.id === r.sourceTableId && t.name === 'SpellDefinition'
|
||||
)
|
||||
);
|
||||
expect(fk1).toBeDefined();
|
||||
|
||||
const fk2 = diagram.relationships?.find(
|
||||
(r) =>
|
||||
r.sourceFieldId &&
|
||||
r.targetFieldId && // Must have field IDs
|
||||
diagram.tables?.some(
|
||||
(t) =>
|
||||
t.id === r.sourceTableId &&
|
||||
t.name === 'SpellComponent' &&
|
||||
t.id === r.targetTableId // self-reference
|
||||
)
|
||||
);
|
||||
expect(fk2).toBeDefined();
|
||||
|
||||
console.log(
|
||||
'Full flow test - Relationships created:',
|
||||
diagram.relationships?.length
|
||||
);
|
||||
diagram.relationships?.forEach((r) => {
|
||||
const sourceTable = diagram.tables?.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = diagram.tables?.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
console.log(
|
||||
` ${sourceTable?.name}.${sourceField?.name} -> ${targetTable?.name}.${targetField?.name}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle case-insensitive field matching', async () => {
|
||||
const sql = `CREATE TABLE DragonLair (
|
||||
[LAIRID] INT PRIMARY KEY,
|
||||
[parentLairId] INT, FOREIGN KEY (PARENTLAIRID) REFERENCES DragonLair(lairid)
|
||||
)`;
|
||||
|
||||
const diagram = await sqlImportToDiagram({
|
||||
sqlContent: sql,
|
||||
sourceDatabaseType: DatabaseType.SQL_SERVER,
|
||||
targetDatabaseType: DatabaseType.SQL_SERVER,
|
||||
});
|
||||
|
||||
// Should create the self-referential relationship despite case differences
|
||||
expect(diagram.relationships?.length).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,132 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Multiple Tables with Foreign Keys', () => {
|
||||
it('should parse multiple tables with foreign keys in user format', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [DBO].[QuestReward](
|
||||
[BOID] (VARCHAR)(32),
|
||||
[HASEXTRACOL] BOOLEAN,
|
||||
[REWARDCODE] [VARCHAR](128),
|
||||
[REWARDFIX] BOOLEAN,
|
||||
[ITSQUESTREL] [VARCHAR](32), FOREIGN KEY (itsquestrel) REFERENCES QuestRelation(BOID),
|
||||
[SHOWDETAILS] BOOLEAN,
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[QuestRelation](
|
||||
[ALIAS] CHAR (50),
|
||||
[BOID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTREL] [VARCHAR](32), FOREIGN KEY (itsparentrel) REFERENCES QuestRelation(BOID),
|
||||
[ITSGUILDMETA] [VARCHAR](32), FOREIGN KEY (itsguildmeta) REFERENCES GuildMeta(BOID),
|
||||
[KEYATTR] CHAR (100),
|
||||
) ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should create both tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const questReward = result.tables.find((t) => t.name === 'QuestReward');
|
||||
expect(questReward).toBeDefined();
|
||||
expect(questReward?.schema).toBe('DBO');
|
||||
expect(questReward?.columns).toHaveLength(6);
|
||||
|
||||
// Check second table
|
||||
const questRelation = result.tables.find(
|
||||
(t) => t.name === 'QuestRelation'
|
||||
);
|
||||
expect(questRelation).toBeDefined();
|
||||
expect(questRelation?.schema).toBe('DBO');
|
||||
expect(questRelation?.columns).toHaveLength(6);
|
||||
|
||||
// Check foreign key relationships
|
||||
expect(result.relationships).toHaveLength(2); // Should have 2 FKs (one self-referential in QuestRelation, one from QuestReward to QuestRelation)
|
||||
|
||||
// Check FK from QuestReward to QuestRelation
|
||||
const fkToRelation = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'QuestReward' &&
|
||||
r.targetTable === 'QuestRelation'
|
||||
);
|
||||
expect(fkToRelation).toBeDefined();
|
||||
expect(fkToRelation?.sourceColumn).toBe('itsquestrel');
|
||||
expect(fkToRelation?.targetColumn).toBe('BOID');
|
||||
|
||||
// Check self-referential FK in QuestRelation
|
||||
const selfRefFK = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'QuestRelation' &&
|
||||
r.targetTable === 'QuestRelation' &&
|
||||
r.sourceColumn === 'itsparentrel'
|
||||
);
|
||||
expect(selfRefFK).toBeDefined();
|
||||
expect(selfRefFK?.targetColumn).toBe('BOID');
|
||||
});
|
||||
|
||||
it('should parse multiple tables with circular dependencies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [DBO].[Dragon](
|
||||
[DRAGONID] (VARCHAR)(32),
|
||||
[NAME] [VARCHAR](100),
|
||||
[ITSLAIRREL] [VARCHAR](32), FOREIGN KEY (itslairrel) REFERENCES DragonLair(LAIRID),
|
||||
[POWER] INT,
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[DragonLair](
|
||||
[LAIRID] (VARCHAR)(32),
|
||||
[LOCATION] [VARCHAR](200),
|
||||
[ITSGUARDIAN] [VARCHAR](32), FOREIGN KEY (itsguardian) REFERENCES Dragon(DRAGONID),
|
||||
[TREASURES] INT,
|
||||
) ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should create both tables despite circular dependency
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
const dragon = result.tables.find((t) => t.name === 'Dragon');
|
||||
expect(dragon).toBeDefined();
|
||||
|
||||
const dragonLair = result.tables.find((t) => t.name === 'DragonLair');
|
||||
expect(dragonLair).toBeDefined();
|
||||
|
||||
// Check foreign key relationships (may have one or both depending on parser behavior with circular deps)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle exact user input format', async () => {
|
||||
// Exact copy of the user's input with fantasy theme
|
||||
const sql = `CREATE TABLE [DBO].[WizardDef](
|
||||
[BOID] (VARCHAR)(32),
|
||||
[HASEXTRACNTCOL] BOOLEAN,
|
||||
[HISTORYCD] [VARCHAR](128),
|
||||
[HISTORYCDFIX] BOOLEAN,
|
||||
[ITSADWIZARDREL] [VARCHAR](32), FOREIGN KEY (itsadwizardrel) REFERENCES WizardRel(BOID),
|
||||
[SHOWDETAILS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[WizardRel](
|
||||
[ALIAS] CHAR (50),
|
||||
[BOID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSARWIZARDREL] [VARCHAR](32), FOREIGN KEY (itsarwizardrel) REFERENCES WizardRel(BOID),
|
||||
[ITSARMETABO] [VARCHAR](32), FOREIGN KEY (itsarmetabo) REFERENCES MetaBO(BOID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// This should create TWO tables, not just one
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
const wizardDef = result.tables.find((t) => t.name === 'WizardDef');
|
||||
expect(wizardDef).toBeDefined();
|
||||
expect(wizardDef?.columns).toHaveLength(6);
|
||||
|
||||
const wizardRel = result.tables.find((t) => t.name === 'WizardRel');
|
||||
expect(wizardRel).toBeDefined();
|
||||
expect(wizardRel?.columns).toHaveLength(6);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server FK Verification', () => {
|
||||
it('should correctly parse FKs from complex fantasy SQL', async () => {
|
||||
const sql = `CREATE TABLE [DBO].[SpellDefinition](
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[HASVERBALCOMP] BOOLEAN,
|
||||
[INCANTATION] [VARCHAR](128),
|
||||
[INCANTATIONFIX] BOOLEAN,
|
||||
[ITSCOMPONENTREL] [VARCHAR](32), FOREIGN KEY (itscomponentrel) REFERENCES SpellComponent(SPELLID),
|
||||
[SHOWVISUALS] BOOLEAN, ) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[SpellComponent](
|
||||
[ALIAS] CHAR (50),
|
||||
[SPELLID] (VARCHAR)(32),
|
||||
[ISOPTIONAL] BOOLEAN,
|
||||
[ITSPARENTCOMP] [VARCHAR](32), FOREIGN KEY (itsparentcomp) REFERENCES SpellComponent(SPELLID),
|
||||
[ITSSCHOOLMETA] [VARCHAR](32), FOREIGN KEY (itsschoolmeta) REFERENCES MagicSchool(SCHOOLID),
|
||||
[KEYATTR] CHAR (100), ) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Verify tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'SpellComponent',
|
||||
'SpellDefinition',
|
||||
]);
|
||||
|
||||
// Verify that FKs were found (even if MagicSchool doesn't exist)
|
||||
// The parsing should find 3 FKs initially, but linkRelationships will filter out the one to MagicSchool
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Verify specific FKs that should exist
|
||||
const fk1 = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellDefinition' &&
|
||||
r.sourceColumn.toLowerCase() === 'itscomponentrel' &&
|
||||
r.targetTable === 'SpellComponent'
|
||||
);
|
||||
expect(fk1).toBeDefined();
|
||||
expect(fk1?.targetColumn).toBe('SPELLID');
|
||||
expect(fk1?.sourceTableId).toBeTruthy();
|
||||
expect(fk1?.targetTableId).toBeTruthy();
|
||||
|
||||
const fk2 = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellComponent' &&
|
||||
r.sourceColumn.toLowerCase() === 'itsparentcomp' &&
|
||||
r.targetTable === 'SpellComponent'
|
||||
);
|
||||
expect(fk2).toBeDefined();
|
||||
expect(fk2?.targetColumn).toBe('SPELLID');
|
||||
expect(fk2?.sourceTableId).toBeTruthy();
|
||||
expect(fk2?.targetTableId).toBeTruthy();
|
||||
|
||||
// Log for debugging
|
||||
console.log('\n=== FK Verification Results ===');
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => `${t.schema}.${t.name}`)
|
||||
);
|
||||
console.log('Total FKs found:', result.relationships.length);
|
||||
result.relationships.forEach((r, i) => {
|
||||
console.log(
|
||||
`FK ${i + 1}: ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
console.log(` IDs: ${r.sourceTableId} -> ${r.targetTableId}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse inline FOREIGN KEY syntax correctly', async () => {
|
||||
// Simplified test with just one FK to ensure parsing works
|
||||
const sql = `CREATE TABLE [DBO].[WizardTower](
|
||||
[TOWERID] INT,
|
||||
[MASTERKEY] [VARCHAR](32), FOREIGN KEY (MASTERKEY) REFERENCES ArcaneGuild(GUILDID),
|
||||
[NAME] VARCHAR(100)
|
||||
) ON [PRIMARY]
|
||||
|
||||
CREATE TABLE [DBO].[ArcaneGuild](
|
||||
[GUILDID] [VARCHAR](32),
|
||||
[GUILDNAME] VARCHAR(100)
|
||||
) ON [PRIMARY]`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceColumn).toBe('MASTERKEY');
|
||||
expect(result.relationships[0].targetColumn).toBe('GUILDID');
|
||||
});
|
||||
});
|
||||
@@ -342,6 +342,35 @@ function parseCreateTableManually(
|
||||
|
||||
// Process each part (column or constraint)
|
||||
for (const part of parts) {
|
||||
// Handle standalone FOREIGN KEY definitions (without CONSTRAINT keyword)
|
||||
// Format: FOREIGN KEY (column) REFERENCES Table(column)
|
||||
if (part.match(/^\s*FOREIGN\s+KEY/i)) {
|
||||
const fkMatch = part.match(
|
||||
/FOREIGN\s+KEY\s*\(([^)]+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkMatch) {
|
||||
const [
|
||||
,
|
||||
sourceCol,
|
||||
targetSchema = 'dbo',
|
||||
targetTable,
|
||||
targetCol,
|
||||
] = fkMatch;
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${sourceCol.trim().replace(/\[|\]/g, '')}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: sourceCol.trim().replace(/\[|\]/g, ''),
|
||||
targetTable: targetTable || targetSchema,
|
||||
targetSchema: targetTable ? targetSchema : 'dbo',
|
||||
targetColumn: targetCol.trim().replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle constraint definitions
|
||||
if (part.match(/^\s*CONSTRAINT/i)) {
|
||||
// Parse constraints
|
||||
@@ -435,6 +464,13 @@ function parseCreateTableManually(
|
||||
columnMatch = part.match(/^\s*(\w+)\s+(\w+)\s+([\d,\s]+)\s+(.*)$/i);
|
||||
}
|
||||
|
||||
// Handle unusual format: [COLUMN_NAME] (VARCHAR)(32)
|
||||
if (!columnMatch) {
|
||||
columnMatch = part.match(
|
||||
/^\s*\[?(\w+)\]?\s+\((\w+)\)\s*\(([\d,\s]+|max)\)(.*)$/i
|
||||
);
|
||||
}
|
||||
|
||||
if (columnMatch) {
|
||||
const [, colName, baseType, typeArgs, rest] = columnMatch;
|
||||
|
||||
@@ -446,7 +482,37 @@ function parseCreateTableManually(
|
||||
const inlineFkMatch = rest.match(
|
||||
/FOREIGN\s+KEY\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (inlineFkMatch) {
|
||||
|
||||
// Also check if there's a FOREIGN KEY after a comma with column name
|
||||
// Format: , FOREIGN KEY (columnname) REFERENCES Table(column)
|
||||
if (!inlineFkMatch && rest.includes('FOREIGN KEY')) {
|
||||
const fkWithColumnMatch = rest.match(
|
||||
/,\s*FOREIGN\s+KEY\s*\((\w+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkWithColumnMatch) {
|
||||
const [, srcCol, targetSchema, targetTable, targetCol] =
|
||||
fkWithColumnMatch;
|
||||
// Only process if srcCol matches current colName (case-insensitive)
|
||||
if (srcCol.toLowerCase() === colName.toLowerCase()) {
|
||||
// Create FK relationship
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${colName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: colName,
|
||||
targetTable: targetTable || targetSchema,
|
||||
targetSchema: targetTable
|
||||
? targetSchema || 'dbo'
|
||||
: 'dbo',
|
||||
targetColumn: targetCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (inlineFkMatch) {
|
||||
const [, targetSchema = 'dbo', targetTable, targetCol] =
|
||||
inlineFkMatch;
|
||||
relationships.push({
|
||||
@@ -536,10 +602,36 @@ export async function fromSQLServer(
|
||||
try {
|
||||
// First, handle ALTER TABLE statements for foreign keys
|
||||
// Split by GO or semicolon for SQL Server
|
||||
const statements = sqlContent
|
||||
let statements = sqlContent
|
||||
.split(/(?:GO\s*$|;\s*$)/im)
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
|
||||
// Additional splitting for CREATE TABLE statements that might not be separated by semicolons
|
||||
// If we have a statement with multiple CREATE TABLE, split them
|
||||
const expandedStatements: string[] = [];
|
||||
for (const stmt of statements) {
|
||||
// Check if this statement contains multiple CREATE TABLE statements
|
||||
if ((stmt.match(/CREATE\s+TABLE/gi) || []).length > 1) {
|
||||
// Split by ") ON [PRIMARY]" followed by CREATE TABLE
|
||||
const parts = stmt.split(
|
||||
/\)\s*ON\s*\[PRIMARY\]\s*(?=CREATE\s+TABLE)/gi
|
||||
);
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
let part = parts[i].trim();
|
||||
// Re-add ") ON [PRIMARY]" to all parts except the last (which should already have it)
|
||||
if (i < parts.length - 1 && part.length > 0) {
|
||||
part += ') ON [PRIMARY]';
|
||||
}
|
||||
if (part.trim().length > 0) {
|
||||
expandedStatements.push(part);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
expandedStatements.push(stmt);
|
||||
}
|
||||
}
|
||||
statements = expandedStatements;
|
||||
|
||||
const alterTableStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().includes('ALTER TABLE') &&
|
||||
|
||||
7273
src/lib/dbml/dbml-export/__tests__/cases/1.dbml
Normal file
7273
src/lib/dbml/dbml-export/__tests__/cases/1.dbml
Normal file
File diff suppressed because it is too large
Load Diff
73546
src/lib/dbml/dbml-export/__tests__/cases/1.json
Normal file
73546
src/lib/dbml/dbml-export/__tests__/cases/1.json
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user