mirror of
https://github.com/chartdb/chartdb.git
synced 2025-11-01 12:33:51 +00:00
Compare commits
42 Commits
jf/fix_fk_
...
v1.14.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8dfa7cc62e | ||
|
|
23e93bfd01 | ||
|
|
16f9f4671e | ||
|
|
0c300e5e72 | ||
|
|
b9a1e78b53 | ||
|
|
337f7cdab4 | ||
|
|
1b0390f0b7 | ||
|
|
bc52933b58 | ||
|
|
2fdad2344c | ||
|
|
0c7eaa2df2 | ||
|
|
a5f8e56b3c | ||
|
|
8ffde62c1a | ||
|
|
39247b77a2 | ||
|
|
984b2aeee2 | ||
|
|
eed104be5b | ||
|
|
00bd535b3c | ||
|
|
18e914242f | ||
|
|
e68837a34a | ||
|
|
b30162d98b | ||
|
|
dba372d25a | ||
|
|
2eb48e75d3 | ||
|
|
867903cd5f | ||
|
|
8aeb1df0ad | ||
|
|
6bea827293 | ||
|
|
a119854da7 | ||
|
|
bfbfd7b843 | ||
|
|
0ca7008735 | ||
|
|
4bc71c52ff | ||
|
|
8f27f10dec | ||
|
|
a93ec2cab9 | ||
|
|
386e40a0bf | ||
|
|
bda150d4b6 | ||
|
|
87836e53d1 | ||
|
|
7e0483f1a5 | ||
|
|
309ee9cb0f | ||
|
|
79b885502e | ||
|
|
745bdee86d | ||
|
|
08eb9cc55f | ||
|
|
778f85d492 | ||
|
|
fb92be7d3e | ||
|
|
6df588f40e | ||
|
|
b46ed58dff |
63
CHANGELOG.md
63
CHANGELOG.md
@@ -1,5 +1,68 @@
|
||||
# Changelog
|
||||
|
||||
## [1.14.0](https://github.com/chartdb/chartdb/compare/v1.13.2...v1.14.0) (2025-08-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add floating "Show All" button when tables are out of view ([#787](https://github.com/chartdb/chartdb/issues/787)) ([bda150d](https://github.com/chartdb/chartdb/commit/bda150d4b6d6fb90beb423efba69349d21a037a5))
|
||||
* add table selection for large database imports ([#776](https://github.com/chartdb/chartdb/issues/776)) ([0d9f57a](https://github.com/chartdb/chartdb/commit/0d9f57a9c969a67e350d6bf25e07c3a9ef5bba39))
|
||||
* **canvas:** Add filter tables on canvas ([#774](https://github.com/chartdb/chartdb/issues/774)) ([dfbcf05](https://github.com/chartdb/chartdb/commit/dfbcf05b2f595f5b7b77dd61abf77e6e07acaf8f))
|
||||
* **custom-types:** add highlight fields option for custom types ([#726](https://github.com/chartdb/chartdb/issues/726)) ([7e0483f](https://github.com/chartdb/chartdb/commit/7e0483f1a5512a6a737baf61caf7513e043f2e96))
|
||||
* **datatypes:** Add decimal / numeric attribute support + organize field row ([#715](https://github.com/chartdb/chartdb/issues/715)) ([778f85d](https://github.com/chartdb/chartdb/commit/778f85d49214232a39710e47bb5d4ec41b75d427))
|
||||
* **dbml:** Edit Diagram Directly from DBML ([#819](https://github.com/chartdb/chartdb/issues/819)) ([1b0390f](https://github.com/chartdb/chartdb/commit/1b0390f0b7652fe415540b7942cf53ec87143f08))
|
||||
* **default value:** add default value option to table field settings ([#770](https://github.com/chartdb/chartdb/issues/770)) ([c9ea7da](https://github.com/chartdb/chartdb/commit/c9ea7da0923ff991cb936235674d9a52b8186137))
|
||||
* enhance primary key and unique field handling logic ([#817](https://github.com/chartdb/chartdb/issues/817)) ([39247b7](https://github.com/chartdb/chartdb/commit/39247b77a299caa4f29ea434af3028155c6d37ed))
|
||||
* implement area grouping with parent-child relationships ([#762](https://github.com/chartdb/chartdb/issues/762)) ([b35e175](https://github.com/chartdb/chartdb/commit/b35e17526b3c9b918928ae5f3f89711ea7b2529c))
|
||||
* **schema:** support create new schema ([#801](https://github.com/chartdb/chartdb/issues/801)) ([867903c](https://github.com/chartdb/chartdb/commit/867903cd5f24d96ce1fe718dc9b562e2f2b75276))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add open and create diagram to side menu ([#757](https://github.com/chartdb/chartdb/issues/757)) ([67f5ac3](https://github.com/chartdb/chartdb/commit/67f5ac303ebf5ada97d5c80fb08a2815ca205a91))
|
||||
* add PostgreSQL tests and fix parsing SQL ([#760](https://github.com/chartdb/chartdb/issues/760)) ([5d33740](https://github.com/chartdb/chartdb/commit/5d337409d64d1078b538350016982a98e684c06c))
|
||||
* area resizers size ([#830](https://github.com/chartdb/chartdb/issues/830)) ([23e93bf](https://github.com/chartdb/chartdb/commit/23e93bfd01d741dd3d11aa5c479cef97e1a86fa6))
|
||||
* **area:** redo/undo after dragging an area with tables ([#767](https://github.com/chartdb/chartdb/issues/767)) ([6af94af](https://github.com/chartdb/chartdb/commit/6af94afc56cf8987b8fc9e3f0a9bfa966de35408))
|
||||
* **canvas filter:** improve scroller on canvas filter ([#799](https://github.com/chartdb/chartdb/issues/799)) ([6bea827](https://github.com/chartdb/chartdb/commit/6bea82729362a8c7b73dc089ddd9e52bae176aa2))
|
||||
* **canvas:** fix filter eye button ([#780](https://github.com/chartdb/chartdb/issues/780)) ([b7dbe54](https://github.com/chartdb/chartdb/commit/b7dbe54c83c75cfe3c556f7a162055dcfe2de23d))
|
||||
* clone of custom types ([#804](https://github.com/chartdb/chartdb/issues/804)) ([b30162d](https://github.com/chartdb/chartdb/commit/b30162d98bc659a61aae023cdeaead4ce25c7ae9))
|
||||
* **cockroachdb:** support schema creation for cockroachdb ([#803](https://github.com/chartdb/chartdb/issues/803)) ([dba372d](https://github.com/chartdb/chartdb/commit/dba372d25a8c642baf8600d05aa154882729d446))
|
||||
* **dbml actions:** set dbml tooltips side ([#798](https://github.com/chartdb/chartdb/issues/798)) ([a119854](https://github.com/chartdb/chartdb/commit/a119854da7c935eb595984ea9398e04136ce60c4))
|
||||
* **dbml editor:** move tooltips button to be on the right ([#797](https://github.com/chartdb/chartdb/issues/797)) ([bfbfd7b](https://github.com/chartdb/chartdb/commit/bfbfd7b843f96c894b1966ad95393b866c927466))
|
||||
* **dbml export:** fix handle tables with same name under different schemas ([#807](https://github.com/chartdb/chartdb/issues/807)) ([18e9142](https://github.com/chartdb/chartdb/commit/18e914242faccd6376fe5a7cd5a4478667f065ee))
|
||||
* **dbml export:** handle tables with same name under different schemas ([#806](https://github.com/chartdb/chartdb/issues/806)) ([e68837a](https://github.com/chartdb/chartdb/commit/e68837a34aa635fb6fc02c7f1289495e5c448242))
|
||||
* **dbml field comments:** support export field comments in dbml ([#796](https://github.com/chartdb/chartdb/issues/796)) ([0ca7008](https://github.com/chartdb/chartdb/commit/0ca700873577bbfbf1dd3f8088c258fc89b10c53))
|
||||
* **dbml import:** fix dbml import types + schemas ([#808](https://github.com/chartdb/chartdb/issues/808)) ([00bd535](https://github.com/chartdb/chartdb/commit/00bd535b3c62d26d25a6276d52beb10e26afad76))
|
||||
* **dbml-export:** merge field attributes into single brackets and fix schema syntax ([#790](https://github.com/chartdb/chartdb/issues/790)) ([309ee9c](https://github.com/chartdb/chartdb/commit/309ee9cb0ff1f5a68ed183e3919e1a11a8410909))
|
||||
* **dbml-import:** handle unsupported DBML features and add comprehensive tests ([#766](https://github.com/chartdb/chartdb/issues/766)) ([22d46e1](https://github.com/chartdb/chartdb/commit/22d46e1e90729730cc25dd6961bfe8c3d2ae0c98))
|
||||
* **dbml:** dbml indentation ([#829](https://github.com/chartdb/chartdb/issues/829)) ([16f9f46](https://github.com/chartdb/chartdb/commit/16f9f4671e011eb66ba9594bed47570eda3eed66))
|
||||
* **dbml:** dbml note syntax ([#826](https://github.com/chartdb/chartdb/issues/826)) ([337f7cd](https://github.com/chartdb/chartdb/commit/337f7cdab4759d15cb4d25a8c0e9394e99ba33d4))
|
||||
* **dbml:** fix dbml output format ([#815](https://github.com/chartdb/chartdb/issues/815)) ([eed104b](https://github.com/chartdb/chartdb/commit/eed104be5ba2b7d9940ffac38e7877722ad764fc))
|
||||
* **dbml:** fix schemas with same table names ([#828](https://github.com/chartdb/chartdb/issues/828)) ([0c300e5](https://github.com/chartdb/chartdb/commit/0c300e5e72cc5ff22cac42f8dbaed167061157c6))
|
||||
* **dbml:** import dbml notes (table + fields) ([#827](https://github.com/chartdb/chartdb/issues/827)) ([b9a1e78](https://github.com/chartdb/chartdb/commit/b9a1e78b53c932c0b1a12ee38b62494a5c2f9348))
|
||||
* **dbml:** support multiple relationships on same field in inline DBML ([#822](https://github.com/chartdb/chartdb/issues/822)) ([a5f8e56](https://github.com/chartdb/chartdb/commit/a5f8e56b3ca97b851b6953481644d3a3ff7ce882))
|
||||
* **dbml:** support spaces in names ([#794](https://github.com/chartdb/chartdb/issues/794)) ([8f27f10](https://github.com/chartdb/chartdb/commit/8f27f10dec96af400dc2c12a30b22b3a346803a9))
|
||||
* fix hotkeys on form elements ([#778](https://github.com/chartdb/chartdb/issues/778)) ([43d1dff](https://github.com/chartdb/chartdb/commit/43d1dfff71f2b960358a79b0112b78d11df91fb7))
|
||||
* fix screen freeze after schema select ([#800](https://github.com/chartdb/chartdb/issues/800)) ([8aeb1df](https://github.com/chartdb/chartdb/commit/8aeb1df0ad353c49e91243453f24bfa5921a89ab))
|
||||
* **i18n:** add Croatian (hr) language support ([#802](https://github.com/chartdb/chartdb/issues/802)) ([2eb48e7](https://github.com/chartdb/chartdb/commit/2eb48e75d303d622f51327d22502a6f78e7fb32d))
|
||||
* improve SQL export formatting and add schema-aware FK grouping ([#783](https://github.com/chartdb/chartdb/issues/783)) ([6df588f](https://github.com/chartdb/chartdb/commit/6df588f40e6e7066da6125413b94466429d48767))
|
||||
* lost in canvas button animation ([#793](https://github.com/chartdb/chartdb/issues/793)) ([a93ec2c](https://github.com/chartdb/chartdb/commit/a93ec2cab906d0e4431d8d1668adcf2dbfc3c80f))
|
||||
* **readonly:** fix zoom out on readonly ([#818](https://github.com/chartdb/chartdb/issues/818)) ([8ffde62](https://github.com/chartdb/chartdb/commit/8ffde62c1a00893c4bf6b4dd39068df530375416))
|
||||
* remove error lag after autofix ([#764](https://github.com/chartdb/chartdb/issues/764)) ([bf32c08](https://github.com/chartdb/chartdb/commit/bf32c08d37c02ee6d7946a41633bb97b2271fcb7))
|
||||
* remove unnecessary import ([#791](https://github.com/chartdb/chartdb/issues/791)) ([87836e5](https://github.com/chartdb/chartdb/commit/87836e53d145b825f9c4f80abca72f418df50e6c))
|
||||
* **scroll:** disable scroll x behavior ([#795](https://github.com/chartdb/chartdb/issues/795)) ([4bc71c5](https://github.com/chartdb/chartdb/commit/4bc71c52ff5c462800d8530b72a5aadb7d7f85ed))
|
||||
* set focus on filter search ([#775](https://github.com/chartdb/chartdb/issues/775)) ([9949a46](https://github.com/chartdb/chartdb/commit/9949a46ee3ba7f46a2ea7f2c0d7101cc9336df4f))
|
||||
* solve issue with multiple render of tables ([#823](https://github.com/chartdb/chartdb/issues/823)) ([0c7eaa2](https://github.com/chartdb/chartdb/commit/0c7eaa2df20cfb6994b7e6251c760a2d4581c879))
|
||||
* **sql-export:** escape newlines and quotes in multi-line comments ([#765](https://github.com/chartdb/chartdb/issues/765)) ([f7f9290](https://github.com/chartdb/chartdb/commit/f7f92903def84a94ac0c66f625f96a6681383945))
|
||||
* **sql-server:** improvment for sql-server import via sql script ([#789](https://github.com/chartdb/chartdb/issues/789)) ([79b8855](https://github.com/chartdb/chartdb/commit/79b885502e3385e996a52093a3ccd5f6e469993a))
|
||||
* **table-node:** fix comment icon on field ([#786](https://github.com/chartdb/chartdb/issues/786)) ([745bdee](https://github.com/chartdb/chartdb/commit/745bdee86d07f1e9c3a2d24237c48c25b9a8eeea))
|
||||
* **table-node:** improve field spacing ([#785](https://github.com/chartdb/chartdb/issues/785)) ([08eb9cc](https://github.com/chartdb/chartdb/commit/08eb9cc55f0077f53afea6f9ce720341e1a583c2))
|
||||
* **table-select:** add loading indication for import ([#782](https://github.com/chartdb/chartdb/issues/782)) ([b46ed58](https://github.com/chartdb/chartdb/commit/b46ed58dff1ec74579fb1544dba46b0f77730c52))
|
||||
* **ui:** reduce spacing between primary key icon and short field types ([#816](https://github.com/chartdb/chartdb/issues/816)) ([984b2ae](https://github.com/chartdb/chartdb/commit/984b2aeee22c43cb9bda77df2c22087973079af4))
|
||||
* update MariaDB database import smart query ([#792](https://github.com/chartdb/chartdb/issues/792)) ([386e40a](https://github.com/chartdb/chartdb/commit/386e40a0bf93d9aef1486bb1e729d8f485e675eb))
|
||||
* update multiple schemas toast to require user action ([#771](https://github.com/chartdb/chartdb/issues/771)) ([f56fab9](https://github.com/chartdb/chartdb/commit/f56fab9876fb9fc46c6c708231324a90d8a7851d))
|
||||
* update relationship when table width changes via expand/shrink ([#825](https://github.com/chartdb/chartdb/issues/825)) ([bc52933](https://github.com/chartdb/chartdb/commit/bc52933b58bfe6bc73779d9401128254cbf497d5))
|
||||
|
||||
## [1.13.2](https://github.com/chartdb/chartdb/compare/v1.13.1...v1.13.2) (2025-07-06)
|
||||
|
||||
|
||||
|
||||
22
package-lock.json
generated
22
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"version": "1.13.2",
|
||||
"version": "1.14.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "chartdb",
|
||||
"version": "1.13.2",
|
||||
"version": "1.14.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
@@ -35,7 +35,7 @@
|
||||
"@radix-ui/react-toggle-group": "^1.1.0",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@uidotdev/usehooks": "^2.4.1",
|
||||
"@xyflow/react": "^12.3.1",
|
||||
"@xyflow/react": "^12.8.2",
|
||||
"ahooks": "^3.8.1",
|
||||
"ai": "^3.3.14",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
@@ -4603,12 +4603,12 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@xyflow/react": {
|
||||
"version": "12.4.2",
|
||||
"resolved": "https://registry.npmjs.org/@xyflow/react/-/react-12.4.2.tgz",
|
||||
"integrity": "sha512-AFJKVc/fCPtgSOnRst3xdYJwiEcUN9lDY7EO/YiRvFHYCJGgfzg+jpvZjkTOnBLGyrMJre9378pRxAc3fsR06A==",
|
||||
"version": "12.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@xyflow/react/-/react-12.8.2.tgz",
|
||||
"integrity": "sha512-VifLpxOy74ck283NQOtBn1e8igmB7xo7ADDKxyBHkKd8IKpyr16TgaYOhzqVwNMdB4NT+m++zfkic530L+gEXw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xyflow/system": "0.0.50",
|
||||
"@xyflow/system": "0.0.66",
|
||||
"classcat": "^5.0.3",
|
||||
"zustand": "^4.4.0"
|
||||
},
|
||||
@@ -4618,16 +4618,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@xyflow/system": {
|
||||
"version": "0.0.50",
|
||||
"resolved": "https://registry.npmjs.org/@xyflow/system/-/system-0.0.50.tgz",
|
||||
"integrity": "sha512-HVUZd4LlY88XAaldFh2nwVxDOcdIBxGpQ5txzwfJPf+CAjj2BfYug1fHs2p4yS7YO8H6A3EFJQovBE8YuHkAdg==",
|
||||
"version": "0.0.66",
|
||||
"resolved": "https://registry.npmjs.org/@xyflow/system/-/system-0.0.66.tgz",
|
||||
"integrity": "sha512-TTxESDwPsATnuDMUeYYtKe4wt9v8bRO29dgYBhR8HyhSCzipnAdIL/1CDfFd+WqS1srVreo24u6zZeVIDk4r3Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/d3-drag": "^3.0.7",
|
||||
"@types/d3-interpolate": "^3.0.4",
|
||||
"@types/d3-selection": "^3.0.10",
|
||||
"@types/d3-transition": "^3.0.8",
|
||||
"@types/d3-zoom": "^3.0.8",
|
||||
"d3-drag": "^3.0.0",
|
||||
"d3-interpolate": "^3.0.1",
|
||||
"d3-selection": "^3.0.0",
|
||||
"d3-zoom": "^3.0.0"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"private": true,
|
||||
"version": "1.13.2",
|
||||
"version": "1.14.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -43,7 +43,7 @@
|
||||
"@radix-ui/react-toggle-group": "^1.1.0",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@uidotdev/usehooks": "^2.4.1",
|
||||
"@xyflow/react": "^12.3.1",
|
||||
"@xyflow/react": "^12.8.2",
|
||||
"ahooks": "^3.8.1",
|
||||
"ai": "^3.3.14",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
|
||||
@@ -31,6 +31,7 @@ export interface CodeSnippetAction {
|
||||
label: string;
|
||||
icon: LucideIcon;
|
||||
onClick: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export interface CodeSnippetProps {
|
||||
@@ -43,6 +44,8 @@ export interface CodeSnippetProps {
|
||||
isComplete?: boolean;
|
||||
editorProps?: React.ComponentProps<EditorType>;
|
||||
actions?: CodeSnippetAction[];
|
||||
actionsTooltipSide?: 'top' | 'right' | 'bottom' | 'left';
|
||||
allowCopy?: boolean;
|
||||
}
|
||||
|
||||
export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
@@ -56,6 +59,8 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
isComplete = true,
|
||||
editorProps,
|
||||
actions,
|
||||
actionsTooltipSide,
|
||||
allowCopy = true,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const monaco = useMonaco();
|
||||
@@ -129,33 +134,37 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
<Suspense fallback={<Spinner />}>
|
||||
{isComplete ? (
|
||||
<div className="absolute right-1 top-1 z-10 flex flex-col gap-1">
|
||||
<Tooltip
|
||||
onOpenChange={setTooltipOpen}
|
||||
open={isCopied || tooltipOpen}
|
||||
>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
className="h-fit p-1.5"
|
||||
variant="outline"
|
||||
onClick={copyToClipboard}
|
||||
>
|
||||
{isCopied ? (
|
||||
<CopyCheck size={16} />
|
||||
) : (
|
||||
<Copy size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
isCopied
|
||||
? 'copied'
|
||||
: 'copy_to_clipboard'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
{allowCopy ? (
|
||||
<Tooltip
|
||||
onOpenChange={setTooltipOpen}
|
||||
open={isCopied || tooltipOpen}
|
||||
>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
className="h-fit p-1.5"
|
||||
variant="outline"
|
||||
onClick={copyToClipboard}
|
||||
>
|
||||
{isCopied ? (
|
||||
<CopyCheck size={16} />
|
||||
) : (
|
||||
<Copy size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent
|
||||
side={actionsTooltipSide}
|
||||
>
|
||||
{t(
|
||||
isCopied
|
||||
? 'copied'
|
||||
: 'copy_to_clipboard'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : null}
|
||||
|
||||
{actions &&
|
||||
actions.length > 0 &&
|
||||
@@ -164,7 +173,10 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
className="h-fit p-1.5"
|
||||
className={cn(
|
||||
'h-fit p-1.5',
|
||||
action.className
|
||||
)}
|
||||
variant="outline"
|
||||
onClick={action.onClick}
|
||||
>
|
||||
@@ -174,7 +186,9 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<TooltipContent
|
||||
side={actionsTooltipSide}
|
||||
>
|
||||
{action.label}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
51
src/components/code-snippet/dbml/utils.ts
Normal file
51
src/components/code-snippet/dbml/utils.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import type { DBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import * as monaco from 'monaco-editor';
|
||||
|
||||
export const highlightErrorLine = ({
|
||||
error,
|
||||
model,
|
||||
editorDecorationsCollection,
|
||||
}: {
|
||||
error: DBMLError;
|
||||
model?: monaco.editor.ITextModel | null;
|
||||
editorDecorationsCollection:
|
||||
| monaco.editor.IEditorDecorationsCollection
|
||||
| undefined;
|
||||
}) => {
|
||||
if (!model) return;
|
||||
if (!editorDecorationsCollection) return;
|
||||
|
||||
const decorations = [
|
||||
{
|
||||
range: new monaco.Range(
|
||||
error.line,
|
||||
1,
|
||||
error.line,
|
||||
model.getLineMaxColumn(error.line)
|
||||
),
|
||||
options: {
|
||||
isWholeLine: true,
|
||||
className: 'dbml-error-line',
|
||||
glyphMarginClassName: 'dbml-error-glyph',
|
||||
hoverMessage: { value: error.message },
|
||||
overviewRuler: {
|
||||
color: '#ff0000',
|
||||
position: monaco.editor.OverviewRulerLane.Right,
|
||||
darkColor: '#ff0000',
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
editorDecorationsCollection?.set(decorations);
|
||||
};
|
||||
|
||||
export const clearErrorHighlight = (
|
||||
editorDecorationsCollection:
|
||||
| monaco.editor.IEditorDecorationsCollection
|
||||
| undefined
|
||||
) => {
|
||||
if (editorDecorationsCollection) {
|
||||
editorDecorationsCollection.clear();
|
||||
}
|
||||
};
|
||||
@@ -37,18 +37,28 @@ export const setupDBMLLanguage = (monaco: Monaco) => {
|
||||
const datatypePattern = dataTypesNames.join('|');
|
||||
|
||||
monaco.languages.setMonarchTokensProvider('dbml', {
|
||||
keywords: ['Table', 'Ref', 'Indexes'],
|
||||
keywords: ['Table', 'Ref', 'Indexes', 'Note', 'Enum'],
|
||||
datatypes: dataTypesNames,
|
||||
tokenizer: {
|
||||
root: [
|
||||
[/\b(Table|Ref|Indexes)\b/, 'keyword'],
|
||||
[
|
||||
/\b([Tt][Aa][Bb][Ll][Ee]|[Ee][Nn][Uu][Mm]|[Rr][Ee][Ff]|[Ii][Nn][Dd][Ee][Xx][Ee][Ss]|[Nn][Oo][Tt][Ee])\b/,
|
||||
'keyword',
|
||||
],
|
||||
[/\[.*?\]/, 'annotation'],
|
||||
[/'''/, 'string', '@tripleQuoteString'],
|
||||
[/".*?"/, 'string'],
|
||||
[/'.*?'/, 'string'],
|
||||
[/`.*?`/, 'string'],
|
||||
[/[{}]/, 'delimiter'],
|
||||
[/[<>]/, 'operator'],
|
||||
[new RegExp(`\\b(${datatypePattern})\\b`, 'i'), 'type'], // Added 'i' flag for case-insensitive matching
|
||||
],
|
||||
tripleQuoteString: [
|
||||
[/[^']+/, 'string'],
|
||||
[/'''/, 'string', '@pop'],
|
||||
[/'/, 'string'],
|
||||
],
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
@@ -93,6 +93,8 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
(isOpen: boolean) => {
|
||||
setOpen?.(isOpen);
|
||||
setIsOpen(isOpen);
|
||||
|
||||
setTimeout(() => (document.body.style.pointerEvents = ''), 500);
|
||||
},
|
||||
[setOpen]
|
||||
);
|
||||
@@ -227,7 +229,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
onSelect={() =>
|
||||
handleSelect(
|
||||
option.value,
|
||||
matches?.map((match) => match.toString())
|
||||
matches?.map((match) => match?.toString())
|
||||
)
|
||||
}
|
||||
>
|
||||
|
||||
@@ -78,6 +78,9 @@ export interface ChartDBContext {
|
||||
events: EventEmitter<ChartDBEvent>;
|
||||
readonly?: boolean;
|
||||
|
||||
highlightedCustomType?: DBCustomType;
|
||||
highlightCustomTypeId: (id?: string) => void;
|
||||
|
||||
filteredSchemas?: string[];
|
||||
filterSchemas: (schemaIds: string[]) => void;
|
||||
|
||||
@@ -92,6 +95,10 @@ export interface ChartDBContext {
|
||||
updateDiagramUpdatedAt: () => Promise<void>;
|
||||
clearDiagramData: () => Promise<void>;
|
||||
deleteDiagram: () => Promise<void>;
|
||||
updateDiagramData: (
|
||||
diagram: Diagram,
|
||||
options?: { forceUpdateStorage?: boolean }
|
||||
) => Promise<void>;
|
||||
|
||||
// Database type operations
|
||||
updateDatabaseType: (databaseType: DatabaseType) => Promise<void>;
|
||||
@@ -294,6 +301,7 @@ export const chartDBContext = createContext<ChartDBContext>({
|
||||
areas: [],
|
||||
customTypes: [],
|
||||
schemas: [],
|
||||
highlightCustomTypeId: emptyFn,
|
||||
filteredSchemas: [],
|
||||
filterSchemas: emptyFn,
|
||||
currentDiagram: {
|
||||
@@ -313,6 +321,7 @@ export const chartDBContext = createContext<ChartDBContext>({
|
||||
loadDiagramFromData: emptyFn,
|
||||
clearDiagramData: emptyFn,
|
||||
deleteDiagram: emptyFn,
|
||||
updateDiagramData: emptyFn,
|
||||
|
||||
// Database type operations
|
||||
updateDatabaseType: emptyFn,
|
||||
|
||||
@@ -40,7 +40,8 @@ export const ChartDBProvider: React.FC<
|
||||
React.PropsWithChildren<ChartDBProviderProps>
|
||||
> = ({ children, diagram, readonly: readonlyProp }) => {
|
||||
const { hasDiff } = useDiff();
|
||||
let db = useStorage();
|
||||
const dbStorage = useStorage();
|
||||
let db = dbStorage;
|
||||
const events = useEventEmitter<ChartDBEvent>();
|
||||
const { setSchemasFilter, schemasFilter } = useLocalConfig();
|
||||
const { addUndoAction, resetRedoStack, resetUndoStack } =
|
||||
@@ -74,6 +75,9 @@ export const ChartDBProvider: React.FC<
|
||||
const [hiddenTableIds, setHiddenTableIds] = useState<string[]>([]);
|
||||
const { events: diffEvents } = useDiff();
|
||||
|
||||
const [highlightedCustomTypeId, setHighlightedCustomTypeId] =
|
||||
useState<string>();
|
||||
|
||||
const diffCalculatedHandler = useCallback((event: DiffCalculatedEvent) => {
|
||||
const { tablesAdded, fieldsAdded, relationshipsAdded } = event.data;
|
||||
setTables((tables) =>
|
||||
@@ -1531,22 +1535,37 @@ export const ChartDBProvider: React.FC<
|
||||
[db, diagramId, setAreas, getArea, addUndoAction, resetRedoStack]
|
||||
);
|
||||
|
||||
const highlightCustomTypeId = useCallback(
|
||||
(id?: string) => setHighlightedCustomTypeId(id),
|
||||
[setHighlightedCustomTypeId]
|
||||
);
|
||||
|
||||
const highlightedCustomType = useMemo(() => {
|
||||
return highlightedCustomTypeId
|
||||
? customTypes.find((type) => type.id === highlightedCustomTypeId)
|
||||
: undefined;
|
||||
}, [highlightedCustomTypeId, customTypes]);
|
||||
|
||||
const loadDiagramFromData: ChartDBContext['loadDiagramFromData'] =
|
||||
useCallback(
|
||||
async (diagram) => {
|
||||
(diagram) => {
|
||||
setDiagramId(diagram.id);
|
||||
setDiagramName(diagram.name);
|
||||
setDatabaseType(diagram.databaseType);
|
||||
setDatabaseEdition(diagram.databaseEdition);
|
||||
setTables(diagram?.tables ?? []);
|
||||
setRelationships(diagram?.relationships ?? []);
|
||||
setDependencies(diagram?.dependencies ?? []);
|
||||
setAreas(diagram?.areas ?? []);
|
||||
setCustomTypes(diagram?.customTypes ?? []);
|
||||
setTables(diagram.tables ?? []);
|
||||
setRelationships(diagram.relationships ?? []);
|
||||
setDependencies(diagram.dependencies ?? []);
|
||||
setAreas(diagram.areas ?? []);
|
||||
setCustomTypes(diagram.customTypes ?? []);
|
||||
setDiagramCreatedAt(diagram.createdAt);
|
||||
setDiagramUpdatedAt(diagram.updatedAt);
|
||||
setHighlightedCustomTypeId(undefined);
|
||||
|
||||
events.emit({ action: 'load_diagram', data: { diagram } });
|
||||
|
||||
resetRedoStack();
|
||||
resetUndoStack();
|
||||
},
|
||||
[
|
||||
setDiagramId,
|
||||
@@ -1560,10 +1579,23 @@ export const ChartDBProvider: React.FC<
|
||||
setCustomTypes,
|
||||
setDiagramCreatedAt,
|
||||
setDiagramUpdatedAt,
|
||||
setHighlightedCustomTypeId,
|
||||
events,
|
||||
resetRedoStack,
|
||||
resetUndoStack,
|
||||
]
|
||||
);
|
||||
|
||||
const updateDiagramData: ChartDBContext['updateDiagramData'] = useCallback(
|
||||
async (diagram, options) => {
|
||||
const st = options?.forceUpdateStorage ? dbStorage : db;
|
||||
await st.deleteDiagram(diagram.id);
|
||||
await st.addDiagram({ diagram });
|
||||
loadDiagramFromData(diagram);
|
||||
},
|
||||
[db, dbStorage, loadDiagramFromData]
|
||||
);
|
||||
|
||||
const loadDiagram: ChartDBContext['loadDiagram'] = useCallback(
|
||||
async (diagramId: string) => {
|
||||
const diagram = await db.getDiagram(diagramId, {
|
||||
@@ -1766,6 +1798,7 @@ export const ChartDBProvider: React.FC<
|
||||
events,
|
||||
readonly,
|
||||
filterSchemas,
|
||||
updateDiagramData,
|
||||
updateDiagramId,
|
||||
updateDiagramName,
|
||||
loadDiagram,
|
||||
@@ -1825,6 +1858,8 @@ export const ChartDBProvider: React.FC<
|
||||
hiddenTableIds,
|
||||
addHiddenTableId,
|
||||
removeHiddenTableId,
|
||||
highlightCustomTypeId,
|
||||
highlightedCustomType,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -32,14 +32,20 @@ export interface DiffContext {
|
||||
originalDiagram: Diagram | null;
|
||||
diffMap: DiffMap;
|
||||
hasDiff: boolean;
|
||||
isSummaryOnly: boolean;
|
||||
|
||||
calculateDiff: ({
|
||||
diagram,
|
||||
newDiagram,
|
||||
options,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
options?: {
|
||||
summaryOnly?: boolean;
|
||||
};
|
||||
}) => void;
|
||||
resetDiff: () => void;
|
||||
|
||||
// table diff
|
||||
checkIfTableHasChange: ({ tableId }: { tableId: string }) => boolean;
|
||||
@@ -60,6 +66,15 @@ export interface DiffContext {
|
||||
checkIfNewField: ({ fieldId }: { fieldId: string }) => boolean;
|
||||
getFieldNewName: ({ fieldId }: { fieldId: string }) => string | null;
|
||||
getFieldNewType: ({ fieldId }: { fieldId: string }) => DataType | null;
|
||||
getFieldNewPrimaryKey: ({ fieldId }: { fieldId: string }) => boolean | null;
|
||||
getFieldNewNullable: ({ fieldId }: { fieldId: string }) => boolean | null;
|
||||
getFieldNewCharacterMaximumLength: ({
|
||||
fieldId,
|
||||
}: {
|
||||
fieldId: string;
|
||||
}) => string | null;
|
||||
getFieldNewScale: ({ fieldId }: { fieldId: string }) => number | null;
|
||||
getFieldNewPrecision: ({ fieldId }: { fieldId: string }) => number | null;
|
||||
|
||||
// relationship diff
|
||||
checkIfNewRelationship: ({
|
||||
|
||||
@@ -32,6 +32,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
const [fieldsChanged, setFieldsChanged] = React.useState<
|
||||
Map<string, boolean>
|
||||
>(new Map<string, boolean>());
|
||||
const [isSummaryOnly, setIsSummaryOnly] = React.useState<boolean>(false);
|
||||
|
||||
const events = useEventEmitter<DiffEvent>();
|
||||
|
||||
@@ -127,7 +128,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
);
|
||||
|
||||
const calculateDiff: DiffContext['calculateDiff'] = useCallback(
|
||||
({ diagram, newDiagram: newDiagramArg }) => {
|
||||
({ diagram, newDiagram: newDiagramArg, options }) => {
|
||||
const {
|
||||
diffMap: newDiffs,
|
||||
changedTables: newChangedTables,
|
||||
@@ -139,6 +140,7 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
setFieldsChanged(newChangedFields);
|
||||
setNewDiagram(newDiagramArg);
|
||||
setOriginalDiagram(diagram);
|
||||
setIsSummaryOnly(options?.summaryOnly ?? false);
|
||||
|
||||
events.emit({
|
||||
action: 'diff_calculated',
|
||||
@@ -305,6 +307,117 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewPrimaryKey = useCallback<
|
||||
DiffContext['getFieldNewPrimaryKey']
|
||||
>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'primaryKey',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as boolean;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewNullable = useCallback<DiffContext['getFieldNewNullable']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'nullable',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as boolean;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewCharacterMaximumLength = useCallback<
|
||||
DiffContext['getFieldNewCharacterMaximumLength']
|
||||
>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'characterMaximumLength',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewScale = useCallback<DiffContext['getFieldNewScale']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'scale',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as number;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewPrecision = useCallback<
|
||||
DiffContext['getFieldNewPrecision']
|
||||
>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'precision',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as number;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfNewRelationship = useCallback<
|
||||
DiffContext['checkIfNewRelationship']
|
||||
>(
|
||||
@@ -339,6 +452,15 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const resetDiff = useCallback<DiffContext['resetDiff']>(() => {
|
||||
setDiffMap(new Map<string, ChartDBDiff>());
|
||||
setTablesChanged(new Map<string, boolean>());
|
||||
setFieldsChanged(new Map<string, boolean>());
|
||||
setNewDiagram(null);
|
||||
setOriginalDiagram(null);
|
||||
setIsSummaryOnly(false);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<diffContext.Provider
|
||||
value={{
|
||||
@@ -346,8 +468,10 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
originalDiagram,
|
||||
diffMap,
|
||||
hasDiff: diffMap.size > 0,
|
||||
isSummaryOnly,
|
||||
|
||||
calculateDiff,
|
||||
resetDiff,
|
||||
|
||||
// table diff
|
||||
getTableNewName,
|
||||
@@ -362,6 +486,11 @@ export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
checkIfNewField,
|
||||
getFieldNewName,
|
||||
getFieldNewType,
|
||||
getFieldNewPrimaryKey,
|
||||
getFieldNewNullable,
|
||||
getFieldNewCharacterMaximumLength,
|
||||
getFieldNewScale,
|
||||
getFieldNewPrecision,
|
||||
|
||||
// relationship diff
|
||||
checkIfNewRelationship,
|
||||
|
||||
@@ -19,6 +19,9 @@ export interface LocalConfigContext {
|
||||
showCardinality: boolean;
|
||||
setShowCardinality: (showCardinality: boolean) => void;
|
||||
|
||||
showFieldAttributes: boolean;
|
||||
setShowFieldAttributes: (showFieldAttributes: boolean) => void;
|
||||
|
||||
hideMultiSchemaNotification: boolean;
|
||||
setHideMultiSchemaNotification: (
|
||||
hideMultiSchemaNotification: boolean
|
||||
@@ -50,6 +53,9 @@ export const LocalConfigContext = createContext<LocalConfigContext>({
|
||||
showCardinality: true,
|
||||
setShowCardinality: emptyFn,
|
||||
|
||||
showFieldAttributes: true,
|
||||
setShowFieldAttributes: emptyFn,
|
||||
|
||||
hideMultiSchemaNotification: false,
|
||||
setHideMultiSchemaNotification: emptyFn,
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ const themeKey = 'theme';
|
||||
const scrollActionKey = 'scroll_action';
|
||||
const schemasFilterKey = 'schemas_filter';
|
||||
const showCardinalityKey = 'show_cardinality';
|
||||
const showFieldAttributesKey = 'show_field_attributes';
|
||||
const hideMultiSchemaNotificationKey = 'hide_multi_schema_notification';
|
||||
const githubRepoOpenedKey = 'github_repo_opened';
|
||||
const starUsDialogLastOpenKey = 'star_us_dialog_last_open';
|
||||
@@ -34,6 +35,11 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
(localStorage.getItem(showCardinalityKey) || 'true') === 'true'
|
||||
);
|
||||
|
||||
const [showFieldAttributes, setShowFieldAttributes] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(showFieldAttributesKey) || 'true') === 'true'
|
||||
);
|
||||
|
||||
const [hideMultiSchemaNotification, setHideMultiSchemaNotification] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(hideMultiSchemaNotificationKey) ||
|
||||
@@ -119,6 +125,8 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
setSchemasFilter,
|
||||
showCardinality,
|
||||
setShowCardinality,
|
||||
showFieldAttributes,
|
||||
setShowFieldAttributes,
|
||||
hideMultiSchemaNotification,
|
||||
setHideMultiSchemaNotification,
|
||||
setGithubRepoOpened,
|
||||
|
||||
@@ -61,6 +61,7 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
const [showTables, setShowTables] = useState(true);
|
||||
const [showViews, setShowViews] = useState(false);
|
||||
const { t } = useTranslation();
|
||||
const [isImporting, setIsImporting] = useState(false);
|
||||
|
||||
// Prepare all tables and views with their metadata
|
||||
const allTables = useMemo(() => {
|
||||
@@ -258,22 +259,37 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
setSelectedTables(new Set());
|
||||
}, []);
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
const selectedTableObjects: SelectedTable[] = Array.from(selectedTables)
|
||||
.map((key): SelectedTable | null => {
|
||||
const table = allTables.find((t) => t.key === key);
|
||||
if (!table) return null;
|
||||
const handleConfirm = useCallback(async () => {
|
||||
if (isImporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
schema: table.schema,
|
||||
table: table.tableName,
|
||||
type: table.type,
|
||||
} satisfies SelectedTable;
|
||||
})
|
||||
.filter((t): t is SelectedTable => t !== null);
|
||||
setIsImporting(true);
|
||||
|
||||
onImport({ selectedTables: selectedTableObjects, databaseMetadata });
|
||||
}, [selectedTables, allTables, onImport, databaseMetadata]);
|
||||
try {
|
||||
const selectedTableObjects: SelectedTable[] = Array.from(
|
||||
selectedTables
|
||||
)
|
||||
.map((key): SelectedTable | null => {
|
||||
const table = allTables.find((t) => t.key === key);
|
||||
if (!table) return null;
|
||||
|
||||
return {
|
||||
schema: table.schema,
|
||||
table: table.tableName,
|
||||
type: table.type,
|
||||
} satisfies SelectedTable;
|
||||
})
|
||||
.filter((t): t is SelectedTable => t !== null);
|
||||
|
||||
await onImport({
|
||||
selectedTables: selectedTableObjects,
|
||||
databaseMetadata,
|
||||
});
|
||||
} finally {
|
||||
setIsImporting(false);
|
||||
}
|
||||
}, [selectedTables, allTables, onImport, databaseMetadata, isImporting]);
|
||||
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
@@ -635,27 +651,29 @@ export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
</div>
|
||||
{isDesktop ? renderPagination() : null}
|
||||
</DialogInternalContent>
|
||||
<DialogFooter
|
||||
// className={cn(
|
||||
// 'gap-2',
|
||||
// isDesktop
|
||||
// ? 'flex items-center justify-between'
|
||||
// : 'flex flex-col'
|
||||
// )}
|
||||
className="flex flex-col-reverse gap-2 sm:flex-row sm:justify-end sm:space-x-2 md:justify-between md:gap-0"
|
||||
>
|
||||
{/* Desktop layout */}
|
||||
|
||||
<Button type="button" variant="secondary" onClick={onBack}>
|
||||
<DialogFooter className="flex flex-col-reverse gap-2 sm:flex-row sm:justify-end sm:space-x-2 md:justify-between md:gap-0">
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
onClick={onBack}
|
||||
disabled={isImporting}
|
||||
>
|
||||
{t('new_diagram_dialog.back')}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
onClick={handleConfirm}
|
||||
disabled={selectedTables.size === 0}
|
||||
disabled={selectedTables.size === 0 || isImporting}
|
||||
className="bg-pink-500 text-white hover:bg-pink-600"
|
||||
>
|
||||
Import {selectedTables.size} Tables
|
||||
{isImporting ? (
|
||||
<>
|
||||
<Spinner className="mr-2 size-4 text-white" />
|
||||
Importing...
|
||||
</>
|
||||
) : (
|
||||
`Import ${selectedTables.size} Tables`
|
||||
)}
|
||||
</Button>
|
||||
|
||||
{!isDesktop ? renderPagination() : null}
|
||||
|
||||
@@ -5,7 +5,7 @@ import React, {
|
||||
Suspense,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import * as monaco from 'monaco-editor';
|
||||
import type * as monaco from 'monaco-editor';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
@@ -36,45 +36,11 @@ import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
import { debounce } from '@/lib/utils';
|
||||
|
||||
interface DBMLError {
|
||||
message: string;
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
function parseDBMLError(error: unknown): DBMLError | null {
|
||||
try {
|
||||
if (typeof error === 'string') {
|
||||
const parsed = JSON.parse(error);
|
||||
if (parsed.diags?.[0]) {
|
||||
const diag = parsed.diags[0];
|
||||
return {
|
||||
message: diag.message,
|
||||
line: diag.location.start.line,
|
||||
column: diag.location.start.column,
|
||||
};
|
||||
}
|
||||
} else if (error && typeof error === 'object' && 'diags' in error) {
|
||||
const parsed = error as {
|
||||
diags: Array<{
|
||||
message: string;
|
||||
location: { start: { line: number; column: number } };
|
||||
}>;
|
||||
};
|
||||
if (parsed.diags?.[0]) {
|
||||
return {
|
||||
message: parsed.diags[0].message,
|
||||
line: parsed.diags[0].location.start.line,
|
||||
column: parsed.diags[0].location.start.column,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing DBML error:', e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
import { parseDBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
|
||||
export interface ImportDBMLDialogProps extends BaseDialogProps {
|
||||
withCreateEmptyDiagram?: boolean;
|
||||
@@ -150,39 +116,8 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
}
|
||||
}, [reorder, reorderTables]);
|
||||
|
||||
const highlightErrorLine = useCallback((error: DBMLError) => {
|
||||
if (!editorRef.current) return;
|
||||
|
||||
const model = editorRef.current.getModel();
|
||||
if (!model) return;
|
||||
|
||||
const decorations = [
|
||||
{
|
||||
range: new monaco.Range(
|
||||
error.line,
|
||||
1,
|
||||
error.line,
|
||||
model.getLineMaxColumn(error.line)
|
||||
),
|
||||
options: {
|
||||
isWholeLine: true,
|
||||
className: 'dbml-error-line',
|
||||
glyphMarginClassName: 'dbml-error-glyph',
|
||||
hoverMessage: { value: error.message },
|
||||
overviewRuler: {
|
||||
color: '#ff0000',
|
||||
position: monaco.editor.OverviewRulerLane.Right,
|
||||
darkColor: '#ff0000',
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
decorationsCollection.current?.set(decorations);
|
||||
}, []);
|
||||
|
||||
const clearDecorations = useCallback(() => {
|
||||
decorationsCollection.current?.clear();
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
}, []);
|
||||
|
||||
const validateDBML = useCallback(
|
||||
@@ -205,7 +140,12 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
t('import_dbml_dialog.error.description') +
|
||||
` (1 error found - in line ${parsedError.line})`
|
||||
);
|
||||
highlightErrorLine(parsedError);
|
||||
highlightErrorLine({
|
||||
error: parsedError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
} else {
|
||||
setErrorMessage(
|
||||
e instanceof Error ? e.message : JSON.stringify(e)
|
||||
@@ -213,7 +153,7 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
}
|
||||
}
|
||||
},
|
||||
[clearDecorations, highlightErrorLine, t]
|
||||
[clearDecorations, t]
|
||||
);
|
||||
|
||||
const debouncedValidateRef = useRef<((value: string) => void) | null>(null);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useCallback, useEffect, useMemo } from 'react';
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
@@ -17,11 +17,23 @@ import type { DBSchema } from '@/lib/domain/db-schema';
|
||||
import { schemaNameToSchemaId } from '@/lib/domain/db-schema';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import { Group, SquarePlus } from 'lucide-react';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { Label } from '@/components/label/label';
|
||||
|
||||
export interface TableSchemaDialogProps extends BaseDialogProps {
|
||||
table?: DBTable;
|
||||
schemas: DBSchema[];
|
||||
onConfirm: ({ schema }: { schema: DBSchema }) => void;
|
||||
allowSchemaCreation?: boolean;
|
||||
}
|
||||
|
||||
export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
@@ -29,13 +41,32 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
table,
|
||||
schemas,
|
||||
onConfirm,
|
||||
allowSchemaCreation = false,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [selectedSchemaId, setSelectedSchemaId] = React.useState<string>(
|
||||
const { databaseType, filteredSchemas, filterSchemas } = useChartDB();
|
||||
const [selectedSchemaId, setSelectedSchemaId] = useState<string>(
|
||||
table?.schema
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
);
|
||||
const allowSchemaSelection = useMemo(
|
||||
() => schemas && schemas.length > 0,
|
||||
[schemas]
|
||||
);
|
||||
|
||||
const defaultSchemaName = useMemo(
|
||||
() => defaultSchemas?.[databaseType],
|
||||
[databaseType]
|
||||
);
|
||||
|
||||
const [isCreatingNew, setIsCreatingNew] =
|
||||
useState<boolean>(!allowSchemaSelection);
|
||||
const [newSchemaName, setNewSchemaName] = useState<string>(
|
||||
allowSchemaCreation && !allowSchemaSelection
|
||||
? (defaultSchemaName ?? '')
|
||||
: ''
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) return;
|
||||
@@ -44,15 +75,56 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
);
|
||||
}, [dialog.open, schemas, table?.schema]);
|
||||
setIsCreatingNew(!allowSchemaSelection);
|
||||
setNewSchemaName(
|
||||
allowSchemaCreation && !allowSchemaSelection
|
||||
? (defaultSchemaName ?? '')
|
||||
: ''
|
||||
);
|
||||
}, [
|
||||
defaultSchemaName,
|
||||
dialog.open,
|
||||
schemas,
|
||||
table?.schema,
|
||||
allowSchemaSelection,
|
||||
allowSchemaCreation,
|
||||
]);
|
||||
|
||||
const { closeTableSchemaDialog } = useDialog();
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
const schema = schemas.find((s) => s.id === selectedSchemaId);
|
||||
if (!schema) return;
|
||||
let createdSchemaId: string;
|
||||
if (isCreatingNew && newSchemaName.trim()) {
|
||||
const newSchema: DBSchema = {
|
||||
id: schemaNameToSchemaId(newSchemaName.trim()),
|
||||
name: newSchemaName.trim(),
|
||||
tableCount: 0,
|
||||
};
|
||||
|
||||
onConfirm({ schema });
|
||||
}, [onConfirm, selectedSchemaId, schemas]);
|
||||
createdSchemaId = newSchema.id;
|
||||
|
||||
onConfirm({ schema: newSchema });
|
||||
} else {
|
||||
const schema = schemas.find((s) => s.id === selectedSchemaId);
|
||||
if (!schema) return;
|
||||
|
||||
createdSchemaId = schema.id;
|
||||
onConfirm({ schema });
|
||||
}
|
||||
|
||||
filterSchemas([
|
||||
...(filteredSchemas ?? schemas.map((s) => s.id)),
|
||||
createdSchemaId,
|
||||
]);
|
||||
}, [
|
||||
onConfirm,
|
||||
selectedSchemaId,
|
||||
schemas,
|
||||
isCreatingNew,
|
||||
newSchemaName,
|
||||
filteredSchemas,
|
||||
filterSchemas,
|
||||
]);
|
||||
|
||||
const schemaOptions: SelectBoxOption[] = useMemo(
|
||||
() =>
|
||||
@@ -63,6 +135,25 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
[schemas]
|
||||
);
|
||||
|
||||
const renderSwitchCreateOrSelectButton = useCallback(
|
||||
() => (
|
||||
<Button
|
||||
variant="outline"
|
||||
className="w-full justify-start"
|
||||
onClick={() => setIsCreatingNew(!isCreatingNew)}
|
||||
disabled={!allowSchemaSelection || !allowSchemaCreation}
|
||||
>
|
||||
{!isCreatingNew ? (
|
||||
<SquarePlus className="mr-2 size-4 " />
|
||||
) : (
|
||||
<Group className="mr-2 size-4 " />
|
||||
)}
|
||||
{isCreatingNew ? 'Select existing schema' : 'Create new schema'}
|
||||
</Button>
|
||||
),
|
||||
[isCreatingNew, allowSchemaSelection, allowSchemaCreation]
|
||||
);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
@@ -70,48 +161,106 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
if (!open) {
|
||||
closeTableSchemaDialog();
|
||||
}
|
||||
|
||||
setTimeout(() => (document.body.style.pointerEvents = ''), 500);
|
||||
}}
|
||||
>
|
||||
<DialogContent className="flex flex-col" showClose>
|
||||
<DialogHeader>
|
||||
<DialogTitle>
|
||||
{table
|
||||
? t('update_table_schema_dialog.title')
|
||||
: t('new_table_schema_dialog.title')}
|
||||
{!allowSchemaSelection && allowSchemaCreation
|
||||
? t('create_table_schema_dialog.title')
|
||||
: table
|
||||
? t('update_table_schema_dialog.title')
|
||||
: t('new_table_schema_dialog.title')}
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
{table
|
||||
? t('update_table_schema_dialog.description', {
|
||||
tableName: table.name,
|
||||
})
|
||||
: t('new_table_schema_dialog.description')}
|
||||
{!allowSchemaSelection && allowSchemaCreation
|
||||
? t('create_table_schema_dialog.description')
|
||||
: table
|
||||
? t('update_table_schema_dialog.description', {
|
||||
tableName: table.name,
|
||||
})
|
||||
: t('new_table_schema_dialog.description')}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="grid gap-4 py-1">
|
||||
<div className="grid w-full items-center gap-4">
|
||||
<SelectBox
|
||||
options={schemaOptions}
|
||||
multiple={false}
|
||||
value={selectedSchemaId}
|
||||
onChange={(value) =>
|
||||
setSelectedSchemaId(value as string)
|
||||
}
|
||||
/>
|
||||
{!isCreatingNew ? (
|
||||
<SelectBox
|
||||
options={schemaOptions}
|
||||
multiple={false}
|
||||
value={selectedSchemaId}
|
||||
onChange={(value) =>
|
||||
setSelectedSchemaId(value as string)
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<div className="flex flex-col gap-2">
|
||||
{allowSchemaCreation &&
|
||||
!allowSchemaSelection ? (
|
||||
<Label htmlFor="new-schema-name">
|
||||
Schema Name
|
||||
</Label>
|
||||
) : null}
|
||||
<Input
|
||||
id="new-schema-name"
|
||||
value={newSchemaName}
|
||||
onChange={(e) =>
|
||||
setNewSchemaName(e.target.value)
|
||||
}
|
||||
placeholder={`Enter schema name.${defaultSchemaName ? ` e.g. ${defaultSchemaName}.` : ''}`}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{allowSchemaCreation && allowSchemaSelection ? (
|
||||
<>
|
||||
<div className="relative">
|
||||
<Separator className="my-2" />
|
||||
<span className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2 bg-background px-2 text-xs text-muted-foreground">
|
||||
or
|
||||
</span>
|
||||
</div>
|
||||
{allowSchemaSelection ? (
|
||||
renderSwitchCreateOrSelectButton()
|
||||
) : (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
{renderSwitchCreateOrSelectButton()}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>No existing schemas available</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter className="flex gap-1 md:justify-between">
|
||||
<DialogClose asChild>
|
||||
<Button variant="secondary">
|
||||
{table
|
||||
? t('update_table_schema_dialog.cancel')
|
||||
: t('new_table_schema_dialog.cancel')}
|
||||
{isCreatingNew
|
||||
? t('create_table_schema_dialog.cancel')
|
||||
: table
|
||||
? t('update_table_schema_dialog.cancel')
|
||||
: t('new_table_schema_dialog.cancel')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
<DialogClose asChild>
|
||||
<Button onClick={handleConfirm}>
|
||||
{table
|
||||
? t('update_table_schema_dialog.confirm')
|
||||
: t('new_table_schema_dialog.confirm')}
|
||||
<Button
|
||||
onClick={handleConfirm}
|
||||
disabled={isCreatingNew && !newSchemaName.trim()}
|
||||
>
|
||||
{isCreatingNew
|
||||
? t('create_table_schema_dialog.create')
|
||||
: table
|
||||
? t('update_table_schema_dialog.confirm')
|
||||
: t('new_table_schema_dialog.confirm')}
|
||||
</Button>
|
||||
</DialogClose>
|
||||
</DialogFooter>
|
||||
|
||||
@@ -83,6 +83,7 @@
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
overscroll-behavior-x: none;
|
||||
}
|
||||
|
||||
.text-editable {
|
||||
@@ -154,3 +155,29 @@
|
||||
background-size: 650%;
|
||||
}
|
||||
}
|
||||
|
||||
/* Edit button emphasis animation */
|
||||
@keyframes dbml_edit-button-emphasis {
|
||||
0% {
|
||||
transform: scale(1);
|
||||
box-shadow: 0 0 0 0 rgba(59, 130, 246, 0.7);
|
||||
background-color: rgba(59, 130, 246, 0);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
box-shadow: 0 0 0 10px rgba(59, 130, 246, 0);
|
||||
background-color: rgba(59, 130, 246, 0.1);
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
box-shadow: 0 0 0 0 rgba(59, 130, 246, 0);
|
||||
background-color: rgba(59, 130, 246, 0);
|
||||
}
|
||||
}
|
||||
|
||||
.dbml-edit-button-emphasis {
|
||||
animation: dbml_edit-button-emphasis 0.6s ease-in-out;
|
||||
animation-iteration-count: 1;
|
||||
position: relative;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
@@ -23,23 +23,25 @@ import { bn, bnMetadata } from './locales/bn';
|
||||
import { gu, guMetadata } from './locales/gu';
|
||||
import { vi, viMetadata } from './locales/vi';
|
||||
import { ar, arMetadata } from './locales/ar';
|
||||
import { hr, hrMetadata } from './locales/hr';
|
||||
|
||||
export const languages: LanguageMetadata[] = [
|
||||
enMetadata,
|
||||
esMetadata,
|
||||
frMetadata,
|
||||
deMetadata,
|
||||
esMetadata,
|
||||
ukMetadata,
|
||||
ruMetadata,
|
||||
trMetadata,
|
||||
hrMetadata,
|
||||
pt_BRMetadata,
|
||||
hiMetadata,
|
||||
jaMetadata,
|
||||
ko_KRMetadata,
|
||||
pt_BRMetadata,
|
||||
ukMetadata,
|
||||
ruMetadata,
|
||||
zh_CNMetadata,
|
||||
zh_TWMetadata,
|
||||
neMetadata,
|
||||
mrMetadata,
|
||||
trMetadata,
|
||||
id_IDMetadata,
|
||||
teMetadata,
|
||||
bnMetadata,
|
||||
@@ -70,6 +72,7 @@ const resources = {
|
||||
gu,
|
||||
vi,
|
||||
ar,
|
||||
hr,
|
||||
};
|
||||
|
||||
i18n.use(LanguageDetector)
|
||||
|
||||
@@ -26,6 +26,8 @@ export const ar: LanguageTranslation = {
|
||||
hide_sidebar: 'إخفاء الشريط الجانبي',
|
||||
hide_cardinality: 'إخفاء الكاردينالية',
|
||||
show_cardinality: 'إظهار الكاردينالية',
|
||||
hide_field_attributes: 'إخفاء خصائص الحقل',
|
||||
show_field_attributes: 'إظهار خصائص الحقل',
|
||||
zoom_on_scroll: 'تكبير/تصغير عند التمرير',
|
||||
theme: 'المظهر',
|
||||
show_dependencies: 'إظهار الاعتمادات',
|
||||
@@ -151,6 +153,8 @@ export const ar: LanguageTranslation = {
|
||||
delete_field: 'حذف الحقل',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'الدقة',
|
||||
scale: 'النطاق',
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
},
|
||||
@@ -253,9 +257,12 @@ export const ar: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -273,6 +280,9 @@ export const ar: LanguageTranslation = {
|
||||
highlight_overlapping_tables: 'تمييز الجداول المتداخلة',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
@@ -404,6 +414,13 @@ export const ar: LanguageTranslation = {
|
||||
cancel: 'إلغاء',
|
||||
confirm: 'تغيير',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'إنشاء مخطط جديد',
|
||||
description:
|
||||
'لا توجد مخططات حتى الآن. قم بإنشاء أول مخطط لتنظيم جداولك.',
|
||||
create: 'إنشاء',
|
||||
cancel: 'إلغاء',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '!ساعدنا على التحسن',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const bn: LanguageTranslation = {
|
||||
hide_sidebar: 'সাইডবার লুকান',
|
||||
hide_cardinality: 'কার্ডিনালিটি লুকান',
|
||||
show_cardinality: 'কার্ডিনালিটি দেখান',
|
||||
hide_field_attributes: 'ফিল্ড অ্যাট্রিবিউট লুকান',
|
||||
show_field_attributes: 'ফিল্ড অ্যাট্রিবিউট দেখান',
|
||||
zoom_on_scroll: 'স্ক্রলে জুম করুন',
|
||||
theme: 'থিম',
|
||||
show_dependencies: 'নির্ভরতাগুলি দেখান',
|
||||
@@ -155,6 +157,8 @@ export const bn: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'নির্ভুলতা',
|
||||
scale: 'স্কেল',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ইনডেক্স কর্ম',
|
||||
@@ -254,9 +258,12 @@ export const bn: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -272,7 +279,11 @@ export const bn: LanguageTranslation = {
|
||||
redo: 'পুনরায় করুন',
|
||||
reorder_diagram: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
highlight_overlapping_tables: 'ওভারল্যাপিং টেবিল হাইলাইট করুন',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -405,6 +416,13 @@ export const bn: LanguageTranslation = {
|
||||
cancel: 'বাতিল করুন',
|
||||
confirm: 'পরিবর্তন করুন',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'নতুন স্কিমা তৈরি করুন',
|
||||
description:
|
||||
'এখনও কোনো স্কিমা নেই। আপনার টেবিলগুলি সংগঠিত করতে আপনার প্রথম স্কিমা তৈরি করুন।',
|
||||
create: 'তৈরি করুন',
|
||||
cancel: 'বাতিল করুন',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'আমাদের উন্নত করতে সাহায্য করুন!',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const de: LanguageTranslation = {
|
||||
hide_sidebar: 'Seitenleiste ausblenden',
|
||||
hide_cardinality: 'Kardinalität ausblenden',
|
||||
show_cardinality: 'Kardinalität anzeigen',
|
||||
hide_field_attributes: 'Feldattribute ausblenden',
|
||||
show_field_attributes: 'Feldattribute anzeigen',
|
||||
zoom_on_scroll: 'Zoom beim Scrollen',
|
||||
theme: 'Stil',
|
||||
show_dependencies: 'Abhängigkeiten anzeigen',
|
||||
@@ -156,6 +158,8 @@ export const de: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Präzision',
|
||||
scale: 'Skalierung',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Indexattribute',
|
||||
@@ -256,9 +260,12 @@ export const de: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -273,6 +280,11 @@ export const de: LanguageTranslation = {
|
||||
undo: 'Rückgängig',
|
||||
redo: 'Wiederholen',
|
||||
reorder_diagram: 'Diagramm neu anordnen',
|
||||
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Überlappende Tabellen hervorheben',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -408,6 +420,13 @@ export const de: LanguageTranslation = {
|
||||
cancel: 'Abbrechen',
|
||||
confirm: 'Ändern',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Neues Schema erstellen',
|
||||
description:
|
||||
'Es existieren noch keine Schemas. Erstellen Sie Ihr erstes Schema, um Ihre Tabellen zu organisieren.',
|
||||
create: 'Erstellen',
|
||||
cancel: 'Abbrechen',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Hilf uns, uns zu verbessern!',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const en = {
|
||||
hide_sidebar: 'Hide Sidebar',
|
||||
hide_cardinality: 'Hide Cardinality',
|
||||
show_cardinality: 'Show Cardinality',
|
||||
hide_field_attributes: 'Hide Field Attributes',
|
||||
show_field_attributes: 'Show Field Attributes',
|
||||
zoom_on_scroll: 'Zoom on Scroll',
|
||||
theme: 'Theme',
|
||||
show_dependencies: 'Show Dependencies',
|
||||
@@ -143,6 +145,8 @@ export const en = {
|
||||
title: 'Field Attributes',
|
||||
unique: 'Unique',
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
comments: 'Comments',
|
||||
no_comments: 'No comments',
|
||||
default_value: 'Default Value',
|
||||
@@ -246,8 +250,11 @@ export const en = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
delete_custom_type: 'Delete',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
@@ -264,6 +271,9 @@ export const en = {
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Reorder Diagram',
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -396,6 +406,14 @@ export const en = {
|
||||
confirm: 'Change',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Create New Schema',
|
||||
description:
|
||||
'No schemas exist yet. Create your first schema to organize your tables.',
|
||||
create: 'Create',
|
||||
cancel: 'Cancel',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Help us improve!',
|
||||
description:
|
||||
|
||||
@@ -24,6 +24,8 @@ export const es: LanguageTranslation = {
|
||||
view: 'Ver',
|
||||
hide_cardinality: 'Ocultar Cardinalidad',
|
||||
show_cardinality: 'Mostrar Cardinalidad',
|
||||
show_field_attributes: 'Mostrar Atributos de Campo',
|
||||
hide_field_attributes: 'Ocultar Atributos de Campo',
|
||||
show_sidebar: 'Mostrar Barra Lateral',
|
||||
hide_sidebar: 'Ocultar Barra Lateral',
|
||||
zoom_on_scroll: 'Zoom al Desplazarse',
|
||||
@@ -145,6 +147,8 @@ export const es: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precisión',
|
||||
scale: 'Escala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos del Índice',
|
||||
@@ -244,9 +248,12 @@ export const es: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -261,6 +268,10 @@ export const es: LanguageTranslation = {
|
||||
undo: 'Deshacer',
|
||||
redo: 'Rehacer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Resaltar tablas superpuestas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -397,6 +408,13 @@ export const es: LanguageTranslation = {
|
||||
cancel: 'Cancelar',
|
||||
confirm: 'Cambiar',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Crear Nuevo Esquema',
|
||||
description:
|
||||
'Aún no existen esquemas. Crea tu primer esquema para organizar tus tablas.',
|
||||
create: 'Crear',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '¡Ayúdanos a mejorar!',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const fr: LanguageTranslation = {
|
||||
hide_sidebar: 'Cacher la Barre Latérale',
|
||||
hide_cardinality: 'Cacher la Cardinalité',
|
||||
show_cardinality: 'Afficher la Cardinalité',
|
||||
hide_field_attributes: 'Masquer les Attributs de Champ',
|
||||
show_field_attributes: 'Afficher les Attributs de Champ',
|
||||
zoom_on_scroll: 'Zoom sur le Défilement',
|
||||
theme: 'Thème',
|
||||
show_dependencies: 'Afficher les Dépendances',
|
||||
@@ -143,6 +145,8 @@ export const fr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Précision',
|
||||
scale: 'Échelle',
|
||||
},
|
||||
index_actions: {
|
||||
title: "Attributs de l'Index",
|
||||
@@ -242,9 +246,12 @@ export const fr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -259,6 +266,10 @@ export const fr: LanguageTranslation = {
|
||||
undo: 'Annuler',
|
||||
redo: 'Rétablir',
|
||||
reorder_diagram: 'Réorganiser le Diagramme',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Surligner les tables chevauchées',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -377,6 +388,13 @@ export const fr: LanguageTranslation = {
|
||||
cancel: 'Annuler',
|
||||
confirm: 'Modifier',
|
||||
},
|
||||
create_table_schema_dialog: {
|
||||
title: 'Créer un Nouveau Schéma',
|
||||
description:
|
||||
"Aucun schéma n'existe encore. Créez votre premier schéma pour organiser vos tables.",
|
||||
create: 'Créer',
|
||||
cancel: 'Annuler',
|
||||
},
|
||||
|
||||
create_relationship_dialog: {
|
||||
title: 'Créer une Relation',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const gu: LanguageTranslation = {
|
||||
hide_sidebar: 'સાઇડબાર છુપાવો',
|
||||
hide_cardinality: 'કાર્ડિનાલિટી છુપાવો',
|
||||
show_cardinality: 'કાર્ડિનાલિટી બતાવો',
|
||||
hide_field_attributes: 'ફીલ્ડ અટ્રિબ્યુટ્સ છુપાવો',
|
||||
show_field_attributes: 'ફીલ્ડ અટ્રિબ્યુટ્સ બતાવો',
|
||||
zoom_on_scroll: 'સ્ક્રોલ પર ઝૂમ કરો',
|
||||
theme: 'થિમ',
|
||||
show_dependencies: 'નિર્ભરતાઓ બતાવો',
|
||||
@@ -156,6 +158,8 @@ export const gu: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'ચોકસાઈ',
|
||||
scale: 'માપ',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ઇન્ડેક્સ લક્ષણો',
|
||||
@@ -255,9 +259,12 @@ export const gu: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -272,6 +279,10 @@ export const gu: LanguageTranslation = {
|
||||
undo: 'અનડુ',
|
||||
redo: 'રીડુ',
|
||||
reorder_diagram: 'ડાયાગ્રામ ફરીથી વ્યવસ્થિત કરો',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ઓવરલેપ કરતો ટેબલ હાઇલાઇટ કરો',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -406,6 +417,14 @@ export const gu: LanguageTranslation = {
|
||||
confirm: 'બદલો',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'નવું સ્કીમા બનાવો',
|
||||
description:
|
||||
'હજી સુધી કોઈ સ્કીમા અસ્તિત્વમાં નથી. તમારા ટેબલ્સ ને વ્યવસ્થિત કરવા માટે તમારું પહેલું સ્કીમા બનાવો.',
|
||||
create: 'બનાવો',
|
||||
cancel: 'રદ કરો',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'અમને સુધારવામાં મદદ કરો!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const hi: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार छिपाएँ',
|
||||
hide_cardinality: 'कार्डिनैलिटी छिपाएँ',
|
||||
show_cardinality: 'कार्डिनैलिटी दिखाएँ',
|
||||
hide_field_attributes: 'फ़ील्ड विशेषताएँ छिपाएँ',
|
||||
show_field_attributes: 'फ़ील्ड विशेषताएँ दिखाएँ',
|
||||
zoom_on_scroll: 'स्क्रॉल पर ज़ूम',
|
||||
theme: 'थीम',
|
||||
show_dependencies: 'निर्भरता दिखाएँ',
|
||||
@@ -155,6 +157,8 @@ export const hi: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precision',
|
||||
scale: 'Scale',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचकांक विशेषताएँ',
|
||||
@@ -255,9 +259,12 @@ export const hi: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -272,6 +279,10 @@ export const hi: LanguageTranslation = {
|
||||
undo: 'पूर्ववत करें',
|
||||
redo: 'पुनः करें',
|
||||
reorder_diagram: 'आरेख पुनः व्यवस्थित करें',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ओवरलैपिंग तालिकाओं को हाइलाइट करें',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -409,6 +420,14 @@ export const hi: LanguageTranslation = {
|
||||
confirm: 'बदलें',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नया स्कीमा बनाएं',
|
||||
description:
|
||||
'अभी तक कोई स्कीमा मौजूद नहीं है। अपनी तालिकाओं को व्यवस्थित करने के लिए अपना पहला स्कीमा बनाएं।',
|
||||
create: 'बनाएं',
|
||||
cancel: 'रद्द करें',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'हमें सुधारने में मदद करें!',
|
||||
description:
|
||||
|
||||
503
src/i18n/locales/hr.ts
Normal file
503
src/i18n/locales/hr.ts
Normal file
@@ -0,0 +1,503 @@
|
||||
import type { LanguageMetadata, LanguageTranslation } from '../types';
|
||||
|
||||
export const hr: LanguageTranslation = {
|
||||
translation: {
|
||||
menu: {
|
||||
file: {
|
||||
file: 'Datoteka',
|
||||
new: 'Nova',
|
||||
open: 'Otvori',
|
||||
save: 'Spremi',
|
||||
import: 'Uvezi',
|
||||
export_sql: 'Izvezi SQL',
|
||||
export_as: 'Izvezi kao',
|
||||
delete_diagram: 'Izbriši dijagram',
|
||||
exit: 'Izađi',
|
||||
},
|
||||
edit: {
|
||||
edit: 'Uredi',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
clear: 'Očisti',
|
||||
},
|
||||
view: {
|
||||
view: 'Prikaz',
|
||||
show_sidebar: 'Prikaži bočnu traku',
|
||||
hide_sidebar: 'Sakrij bočnu traku',
|
||||
hide_cardinality: 'Sakrij kardinalnost',
|
||||
show_cardinality: 'Prikaži kardinalnost',
|
||||
hide_field_attributes: 'Sakrij atribute polja',
|
||||
show_field_attributes: 'Prikaži atribute polja',
|
||||
zoom_on_scroll: 'Zumiranje pri skrolanju',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Prikaži ovisnosti',
|
||||
hide_dependencies: 'Sakrij ovisnosti',
|
||||
show_minimap: 'Prikaži mini kartu',
|
||||
hide_minimap: 'Sakrij mini kartu',
|
||||
},
|
||||
backup: {
|
||||
backup: 'Sigurnosna kopija',
|
||||
export_diagram: 'Izvezi dijagram',
|
||||
restore_diagram: 'Vrati dijagram',
|
||||
},
|
||||
help: {
|
||||
help: 'Pomoć',
|
||||
docs_website: 'Dokumentacija',
|
||||
join_discord: 'Pridružite nam se na Discordu',
|
||||
},
|
||||
},
|
||||
|
||||
delete_diagram_alert: {
|
||||
title: 'Izbriši dijagram',
|
||||
description:
|
||||
'Ova radnja se ne može poništiti. Ovo će trajno izbrisati dijagram.',
|
||||
cancel: 'Odustani',
|
||||
delete: 'Izbriši',
|
||||
},
|
||||
|
||||
clear_diagram_alert: {
|
||||
title: 'Očisti dijagram',
|
||||
description:
|
||||
'Ova radnja se ne može poništiti. Ovo će trajno izbrisati sve podatke u dijagramu.',
|
||||
cancel: 'Odustani',
|
||||
clear: 'Očisti',
|
||||
},
|
||||
|
||||
reorder_diagram_alert: {
|
||||
title: 'Preuredi dijagram',
|
||||
description:
|
||||
'Ova radnja će preurediti sve tablice u dijagramu. Želite li nastaviti?',
|
||||
reorder: 'Preuredi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
multiple_schemas_alert: {
|
||||
title: 'Više shema',
|
||||
description:
|
||||
'{{schemasCount}} shema u ovom dijagramu. Trenutno prikazano: {{formattedSchemas}}.',
|
||||
show_me: 'Prikaži mi',
|
||||
none: 'nijedna',
|
||||
},
|
||||
|
||||
copy_to_clipboard_toast: {
|
||||
unsupported: {
|
||||
title: 'Kopiranje neuspješno',
|
||||
description: 'Međuspremnik nije podržan.',
|
||||
},
|
||||
failed: {
|
||||
title: 'Kopiranje neuspješno',
|
||||
description: 'Nešto je pošlo po zlu. Molimo pokušajte ponovno.',
|
||||
},
|
||||
},
|
||||
|
||||
theme: {
|
||||
system: 'Sustav',
|
||||
light: 'Svijetla',
|
||||
dark: 'Tamna',
|
||||
},
|
||||
|
||||
zoom: {
|
||||
on: 'Uključeno',
|
||||
off: 'Isključeno',
|
||||
},
|
||||
|
||||
last_saved: 'Zadnje spremljeno',
|
||||
saved: 'Spremljeno',
|
||||
loading_diagram: 'Učitavanje dijagrama...',
|
||||
deselect_all: 'Odznači sve',
|
||||
select_all: 'Označi sve',
|
||||
clear: 'Očisti',
|
||||
show_more: 'Prikaži više',
|
||||
show_less: 'Prikaži manje',
|
||||
copy_to_clipboard: 'Kopiraj u međuspremnik',
|
||||
copied: 'Kopirano!',
|
||||
|
||||
side_panel: {
|
||||
schema: 'Shema:',
|
||||
filter_by_schema: 'Filtriraj po shemi',
|
||||
search_schema: 'Pretraži shemu...',
|
||||
no_schemas_found: 'Nema pronađenih shema.',
|
||||
view_all_options: 'Prikaži sve opcije...',
|
||||
tables_section: {
|
||||
tables: 'Tablice',
|
||||
add_table: 'Dodaj tablicu',
|
||||
filter: 'Filtriraj',
|
||||
collapse: 'Sažmi sve',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih tablica koje odgovaraju vašem filteru.',
|
||||
show_list: 'Prikaži popis tablica',
|
||||
show_dbml: 'Prikaži DBML uređivač',
|
||||
|
||||
table: {
|
||||
fields: 'Polja',
|
||||
nullable: 'Može biti null?',
|
||||
primary_key: 'Primarni ključ',
|
||||
indexes: 'Indeksi',
|
||||
comments: 'Komentari',
|
||||
no_comments: 'Nema komentara',
|
||||
add_field: 'Dodaj polje',
|
||||
add_index: 'Dodaj indeks',
|
||||
index_select_fields: 'Odaberi polja',
|
||||
no_types_found: 'Nema pronađenih tipova',
|
||||
field_name: 'Naziv',
|
||||
field_type: 'Tip',
|
||||
field_actions: {
|
||||
title: 'Atributi polja',
|
||||
unique: 'Jedinstven',
|
||||
character_length: 'Maksimalna dužina',
|
||||
precision: 'Preciznost',
|
||||
scale: 'Skala',
|
||||
comments: 'Komentari',
|
||||
no_comments: 'Nema komentara',
|
||||
default_value: 'Zadana vrijednost',
|
||||
no_default: 'Nema zadane vrijednosti',
|
||||
delete_field: 'Izbriši polje',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributi indeksa',
|
||||
name: 'Naziv',
|
||||
unique: 'Jedinstven',
|
||||
delete_index: 'Izbriši indeks',
|
||||
},
|
||||
table_actions: {
|
||||
title: 'Radnje nad tablicom',
|
||||
change_schema: 'Promijeni shemu',
|
||||
add_field: 'Dodaj polje',
|
||||
add_index: 'Dodaj indeks',
|
||||
duplicate_table: 'Dupliciraj tablicu',
|
||||
delete_table: 'Izbriši tablicu',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema tablica',
|
||||
description: 'Stvorite tablicu za početak',
|
||||
},
|
||||
},
|
||||
relationships_section: {
|
||||
relationships: 'Veze',
|
||||
filter: 'Filtriraj',
|
||||
add_relationship: 'Dodaj vezu',
|
||||
collapse: 'Sažmi sve',
|
||||
relationship: {
|
||||
primary: 'Primarna tablica',
|
||||
foreign: 'Referentna tablica',
|
||||
cardinality: 'Kardinalnost',
|
||||
delete_relationship: 'Izbriši',
|
||||
relationship_actions: {
|
||||
title: 'Radnje',
|
||||
delete_relationship: 'Izbriši',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema veza',
|
||||
description: 'Stvorite vezu za povezivanje tablica',
|
||||
},
|
||||
},
|
||||
dependencies_section: {
|
||||
dependencies: 'Ovisnosti',
|
||||
filter: 'Filtriraj',
|
||||
collapse: 'Sažmi sve',
|
||||
dependency: {
|
||||
table: 'Tablica',
|
||||
dependent_table: 'Ovisni pogled',
|
||||
delete_dependency: 'Izbriši',
|
||||
dependency_actions: {
|
||||
title: 'Radnje',
|
||||
delete_dependency: 'Izbriši',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema ovisnosti',
|
||||
description: 'Stvorite pogled za početak',
|
||||
},
|
||||
},
|
||||
|
||||
areas_section: {
|
||||
areas: 'Područja',
|
||||
add_area: 'Dodaj područje',
|
||||
filter: 'Filtriraj',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih područja koja odgovaraju vašem filteru.',
|
||||
|
||||
area: {
|
||||
area_actions: {
|
||||
title: 'Radnje nad područjem',
|
||||
edit_name: 'Uredi naziv',
|
||||
delete_area: 'Izbriši područje',
|
||||
},
|
||||
},
|
||||
empty_state: {
|
||||
title: 'Nema područja',
|
||||
description: 'Stvorite područje za početak',
|
||||
},
|
||||
},
|
||||
|
||||
custom_types_section: {
|
||||
custom_types: 'Prilagođeni tipovi',
|
||||
filter: 'Filtriraj',
|
||||
clear: 'Očisti filter',
|
||||
no_results:
|
||||
'Nema pronađenih prilagođenih tipova koji odgovaraju vašem filteru.',
|
||||
empty_state: {
|
||||
title: 'Nema prilagođenih tipova',
|
||||
description:
|
||||
'Prilagođeni tipovi će se pojaviti ovdje kada budu dostupni u vašoj bazi podataka',
|
||||
},
|
||||
custom_type: {
|
||||
kind: 'Vrsta',
|
||||
enum_values: 'Enum vrijednosti',
|
||||
composite_fields: 'Polja',
|
||||
no_fields: 'Nema definiranih polja',
|
||||
field_name_placeholder: 'Naziv polja',
|
||||
field_type_placeholder: 'Odaberi tip',
|
||||
add_field: 'Dodaj polje',
|
||||
no_fields_tooltip:
|
||||
'Nema definiranih polja za ovaj prilagođeni tip',
|
||||
custom_type_actions: {
|
||||
title: 'Radnje',
|
||||
highlight_fields: 'Istakni polja',
|
||||
clear_field_highlight: 'Ukloni isticanje',
|
||||
delete_custom_type: 'Izbriši',
|
||||
},
|
||||
delete_custom_type: 'Izbriši tip',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
toolbar: {
|
||||
zoom_in: 'Uvećaj',
|
||||
zoom_out: 'Smanji',
|
||||
save: 'Spremi',
|
||||
show_all: 'Prikaži sve',
|
||||
undo: 'Poništi',
|
||||
redo: 'Ponovi',
|
||||
reorder_diagram: 'Preuredi dijagram',
|
||||
highlight_overlapping_tables: 'Istakni preklapajuće tablice',
|
||||
clear_custom_type_highlight: 'Ukloni isticanje za "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Isticanje "{{typeName}}" - Kliknite za uklanjanje',
|
||||
filter: 'Filtriraj tablice',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
database_selection: {
|
||||
title: 'Koja je vaša baza podataka?',
|
||||
description:
|
||||
'Svaka baza podataka ima svoje jedinstvene značajke i mogućnosti.',
|
||||
check_examples_long: 'Pogledaj primjere',
|
||||
check_examples_short: 'Primjeri',
|
||||
},
|
||||
|
||||
import_database: {
|
||||
title: 'Uvezite svoju bazu podataka',
|
||||
database_edition: 'Verzija baze podataka:',
|
||||
step_1: 'Pokrenite ovu skriptu u svojoj bazi podataka:',
|
||||
step_2: 'Zalijepite rezultat skripte u ovaj dio →',
|
||||
script_results_placeholder: 'Rezultati skripte ovdje...',
|
||||
ssms_instructions: {
|
||||
button_text: 'SSMS upute',
|
||||
title: 'Upute',
|
||||
step_1: 'Idite na Tools > Options > Query Results > SQL Server.',
|
||||
step_2: 'Ako koristite "Results to Grid," promijenite Maximum Characters Retrieved za Non-XML podatke (postavite na 9999999).',
|
||||
},
|
||||
instructions_link: 'Trebate pomoć? Pogledajte kako',
|
||||
check_script_result: 'Provjeri rezultat skripte',
|
||||
},
|
||||
|
||||
cancel: 'Odustani',
|
||||
import_from_file: 'Uvezi iz datoteke',
|
||||
back: 'Natrag',
|
||||
empty_diagram: 'Prazan dijagram',
|
||||
continue: 'Nastavi',
|
||||
import: 'Uvezi',
|
||||
},
|
||||
|
||||
open_diagram_dialog: {
|
||||
title: 'Otvori dijagram',
|
||||
description: 'Odaberite dijagram za otvaranje iz popisa ispod.',
|
||||
table_columns: {
|
||||
name: 'Naziv',
|
||||
created_at: 'Stvoreno',
|
||||
last_modified: 'Zadnje izmijenjeno',
|
||||
tables_count: 'Tablice',
|
||||
},
|
||||
cancel: 'Odustani',
|
||||
open: 'Otvori',
|
||||
},
|
||||
|
||||
export_sql_dialog: {
|
||||
title: 'Izvezi SQL',
|
||||
description:
|
||||
'Izvezite shemu vašeg dijagrama u {{databaseType}} skriptu',
|
||||
close: 'Zatvori',
|
||||
loading: {
|
||||
text: 'AI generira SQL za {{databaseType}}...',
|
||||
description: 'Ovo bi trebalo potrajati do 30 sekundi.',
|
||||
},
|
||||
error: {
|
||||
message:
|
||||
'Greška pri generiranju SQL skripte. Molimo pokušajte ponovno kasnije ili <0>kontaktirajte nas</0>.',
|
||||
description:
|
||||
'Slobodno koristite svoj OPENAI_TOKEN, pogledajte priručnik <0>ovdje</0>.',
|
||||
},
|
||||
},
|
||||
|
||||
create_relationship_dialog: {
|
||||
title: 'Kreiraj vezu',
|
||||
primary_table: 'Primarna tablica',
|
||||
primary_field: 'Primarno polje',
|
||||
referenced_table: 'Referentna tablica',
|
||||
referenced_field: 'Referentno polje',
|
||||
primary_table_placeholder: 'Odaberi tablicu',
|
||||
primary_field_placeholder: 'Odaberi polje',
|
||||
referenced_table_placeholder: 'Odaberi tablicu',
|
||||
referenced_field_placeholder: 'Odaberi polje',
|
||||
no_tables_found: 'Nema pronađenih tablica',
|
||||
no_fields_found: 'Nema pronađenih polja',
|
||||
create: 'Kreiraj',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
import_database_dialog: {
|
||||
title: 'Uvezi u trenutni dijagram',
|
||||
override_alert: {
|
||||
title: 'Uvezi bazu podataka',
|
||||
content: {
|
||||
alert: 'Uvoz ovog dijagrama će utjecati na postojeće tablice i veze.',
|
||||
new_tables:
|
||||
'<bold>{{newTablesNumber}}</bold> novih tablica će biti dodano.',
|
||||
new_relationships:
|
||||
'<bold>{{newRelationshipsNumber}}</bold> novih veza će biti stvoreno.',
|
||||
tables_override:
|
||||
'<bold>{{tablesOverrideNumber}}</bold> tablica će biti prepisano.',
|
||||
proceed: 'Želite li nastaviti?',
|
||||
},
|
||||
import: 'Uvezi',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
},
|
||||
|
||||
export_image_dialog: {
|
||||
title: 'Izvezi sliku',
|
||||
description: 'Odaberite faktor veličine za izvoz:',
|
||||
scale_1x: '1x Obično',
|
||||
scale_2x: '2x (Preporučeno)',
|
||||
scale_3x: '3x',
|
||||
scale_4x: '4x',
|
||||
cancel: 'Odustani',
|
||||
export: 'Izvezi',
|
||||
advanced_options: 'Napredne opcije',
|
||||
pattern: 'Uključi pozadinski uzorak',
|
||||
pattern_description: 'Dodaj suptilni mrežni uzorak u pozadinu.',
|
||||
transparent: 'Prozirna pozadina',
|
||||
transparent_description: 'Ukloni boju pozadine iz slike.',
|
||||
},
|
||||
|
||||
new_table_schema_dialog: {
|
||||
title: 'Odaberi shemu',
|
||||
description:
|
||||
'Trenutno je prikazano više shema. Odaberite jednu za novu tablicu.',
|
||||
cancel: 'Odustani',
|
||||
confirm: 'Potvrdi',
|
||||
},
|
||||
|
||||
update_table_schema_dialog: {
|
||||
title: 'Promijeni shemu',
|
||||
description: 'Ažuriraj shemu tablice "{{tableName}}"',
|
||||
cancel: 'Odustani',
|
||||
confirm: 'Promijeni',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Stvori novu shemu',
|
||||
description:
|
||||
'Još ne postoje sheme. Stvorite svoju prvu shemu za organiziranje tablica.',
|
||||
create: 'Stvori',
|
||||
cancel: 'Odustani',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Pomozite nam da se poboljšamo!',
|
||||
description:
|
||||
'Želite li nam dati zvjezdicu na GitHubu? Samo je jedan klik!',
|
||||
close: 'Ne sada',
|
||||
confirm: 'Naravno!',
|
||||
},
|
||||
export_diagram_dialog: {
|
||||
title: 'Izvezi dijagram',
|
||||
description: 'Odaberite format za izvoz:',
|
||||
format_json: 'JSON',
|
||||
cancel: 'Odustani',
|
||||
export: 'Izvezi',
|
||||
error: {
|
||||
title: 'Greška pri izvozu dijagrama',
|
||||
description:
|
||||
'Nešto je pošlo po zlu. Trebate pomoć? support@chartdb.io',
|
||||
},
|
||||
},
|
||||
|
||||
import_diagram_dialog: {
|
||||
title: 'Uvezi dijagram',
|
||||
description: 'Uvezite dijagram iz JSON datoteke.',
|
||||
cancel: 'Odustani',
|
||||
import: 'Uvezi',
|
||||
error: {
|
||||
title: 'Greška pri uvozu dijagrama',
|
||||
description:
|
||||
'JSON dijagrama je nevažeći. Molimo provjerite JSON i pokušajte ponovno. Trebate pomoć? support@chartdb.io',
|
||||
},
|
||||
},
|
||||
|
||||
import_dbml_dialog: {
|
||||
example_title: 'Uvezi primjer DBML-a',
|
||||
title: 'Uvezi DBML',
|
||||
description: 'Uvezite shemu baze podataka iz DBML formata.',
|
||||
import: 'Uvezi',
|
||||
cancel: 'Odustani',
|
||||
skip_and_empty: 'Preskoči i isprazni',
|
||||
show_example: 'Prikaži primjer',
|
||||
error: {
|
||||
title: 'Greška pri uvozu DBML-a',
|
||||
description:
|
||||
'Neuspješno parsiranje DBML-a. Molimo provjerite sintaksu.',
|
||||
},
|
||||
},
|
||||
relationship_type: {
|
||||
one_to_one: 'Jedan na jedan',
|
||||
one_to_many: 'Jedan na više',
|
||||
many_to_one: 'Više na jedan',
|
||||
many_to_many: 'Više na više',
|
||||
},
|
||||
|
||||
canvas_context_menu: {
|
||||
new_table: 'Nova tablica',
|
||||
new_relationship: 'Nova veza',
|
||||
new_area: 'Novo područje',
|
||||
},
|
||||
|
||||
table_node_context_menu: {
|
||||
edit_table: 'Uredi tablicu',
|
||||
duplicate_table: 'Dupliciraj tablicu',
|
||||
delete_table: 'Izbriši tablicu',
|
||||
add_relationship: 'Dodaj vezu',
|
||||
},
|
||||
|
||||
snap_to_grid_tooltip: 'Priljepljivanje na mrežu (Drži {{key}})',
|
||||
|
||||
tool_tips: {
|
||||
double_click_to_edit: 'Dvostruki klik za uređivanje',
|
||||
},
|
||||
|
||||
language_select: {
|
||||
change_language: 'Jezik',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const hrMetadata: LanguageMetadata = {
|
||||
name: 'Croatian',
|
||||
nativeName: 'Hrvatski',
|
||||
code: 'hr',
|
||||
};
|
||||
@@ -26,6 +26,8 @@ export const id_ID: LanguageTranslation = {
|
||||
hide_sidebar: 'Sembunyikan Sidebar',
|
||||
hide_cardinality: 'Sembunyikan Kardinalitas',
|
||||
show_cardinality: 'Tampilkan Kardinalitas',
|
||||
hide_field_attributes: 'Sembunyikan Atribut Kolom',
|
||||
show_field_attributes: 'Tampilkan Atribut Kolom',
|
||||
zoom_on_scroll: 'Perbesar saat Scroll',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Tampilkan Dependensi',
|
||||
@@ -154,6 +156,8 @@ export const id_ID: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Presisi',
|
||||
scale: 'Skala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atribut Indeks',
|
||||
@@ -253,9 +257,12 @@ export const id_ID: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -270,6 +277,10 @@ export const id_ID: LanguageTranslation = {
|
||||
undo: 'Undo',
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Atur Ulang Diagram',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Sorot Tabel yang Tumpang Tindih',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -404,6 +415,14 @@ export const id_ID: LanguageTranslation = {
|
||||
confirm: 'Ubah',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Buat Skema Baru',
|
||||
description:
|
||||
'Belum ada skema yang tersedia. Buat skema pertama Anda untuk mengatur tabel-tabel Anda.',
|
||||
create: 'Buat',
|
||||
cancel: 'Batal',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Bantu kami meningkatkan!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const ja: LanguageTranslation = {
|
||||
hide_sidebar: 'サイドバーを非表示',
|
||||
hide_cardinality: 'カーディナリティを非表示',
|
||||
show_cardinality: 'カーディナリティを表示',
|
||||
hide_field_attributes: 'フィールド属性を非表示',
|
||||
show_field_attributes: 'フィールド属性を表示',
|
||||
zoom_on_scroll: 'スクロールでズーム',
|
||||
theme: 'テーマ',
|
||||
// TODO: Translate
|
||||
@@ -158,6 +160,8 @@ export const ja: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小数点以下桁数',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'インデックス属性',
|
||||
@@ -259,9 +263,12 @@ export const ja: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -278,7 +285,9 @@ export const ja: LanguageTranslation = {
|
||||
reorder_diagram: 'ダイアグラムを並べ替え',
|
||||
// TODO: Translate
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear', // TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
@@ -413,6 +422,14 @@ export const ja: LanguageTranslation = {
|
||||
confirm: '変更',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '新しいスキーマを作成',
|
||||
description:
|
||||
'スキーマがまだ存在しません。テーブルを整理するために最初のスキーマを作成してください。',
|
||||
create: '作成',
|
||||
cancel: 'キャンセル',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '改善をサポートしてください!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
hide_sidebar: '사이드바 숨기기',
|
||||
hide_cardinality: '카디널리티 숨기기',
|
||||
show_cardinality: '카디널리티 보이기',
|
||||
hide_field_attributes: '필드 속성 숨기기',
|
||||
show_field_attributes: '필드 속성 보이기',
|
||||
zoom_on_scroll: '스크롤 시 확대',
|
||||
theme: '테마',
|
||||
show_dependencies: '종속성 보이기',
|
||||
@@ -154,6 +156,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '정밀도',
|
||||
scale: '소수점 자릿수',
|
||||
},
|
||||
index_actions: {
|
||||
title: '인덱스 속성',
|
||||
@@ -253,9 +257,12 @@ export const ko_KR: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -270,6 +277,10 @@ export const ko_KR: LanguageTranslation = {
|
||||
undo: '실행 취소',
|
||||
redo: '다시 실행',
|
||||
reorder_diagram: '다이어그램 재정렬',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '겹치는 테이블 강조 표시',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -404,6 +415,14 @@ export const ko_KR: LanguageTranslation = {
|
||||
confirm: '변경',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '새 스키마 생성',
|
||||
description:
|
||||
'아직 스키마가 없습니다. 테이블을 정리하기 위해 첫 번째 스키마를 생성하세요.',
|
||||
create: '생성',
|
||||
cancel: '취소',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '개선할 수 있도록 도와주세요!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const mr: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार लपवा',
|
||||
hide_cardinality: 'कार्डिनॅलिटी लपवा',
|
||||
show_cardinality: 'कार्डिनॅलिटी दाखवा',
|
||||
hide_field_attributes: 'फील्ड गुणधर्म लपवा',
|
||||
show_field_attributes: 'फील्ड गुणधर्म दाखवा',
|
||||
zoom_on_scroll: 'स्क्रोलवर झूम करा',
|
||||
theme: 'थीम',
|
||||
show_dependencies: 'डिपेंडेन्सि दाखवा',
|
||||
@@ -157,6 +159,8 @@ export const mr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'अचूकता',
|
||||
scale: 'प्रमाण',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'इंडेक्स गुणधर्म',
|
||||
@@ -258,9 +262,12 @@ export const mr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -275,6 +282,10 @@ export const mr: LanguageTranslation = {
|
||||
undo: 'पूर्ववत करा',
|
||||
redo: 'पुन्हा करा',
|
||||
reorder_diagram: 'आरेख पुनःक्रमित करा',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'ओव्हरलॅपिंग टेबल्स हायलाइट करा',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -412,6 +423,14 @@ export const mr: LanguageTranslation = {
|
||||
confirm: 'बदला',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नवीन स्कीमा तयार करा',
|
||||
description:
|
||||
'अजून कोणतीही स्कीमा अस्तित्वात नाही. आपल्या टेबल्स व्यवस्थित करण्यासाठी आपली पहिली स्कीमा तयार करा.',
|
||||
create: 'तयार करा',
|
||||
cancel: 'रद्द करा',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'आम्हाला सुधारण्यास मदत करा!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const ne: LanguageTranslation = {
|
||||
hide_sidebar: 'साइडबार लुकाउनुहोस्',
|
||||
hide_cardinality: 'कार्डिन्यालिटी लुकाउनुहोस्',
|
||||
show_cardinality: 'कार्डिन्यालिटी देखाउनुहोस्',
|
||||
hide_field_attributes: 'फिल्ड विशेषताहरू लुकाउनुहोस्',
|
||||
show_field_attributes: 'फिल्ड विशेषताहरू देखाउनुहोस्',
|
||||
zoom_on_scroll: 'स्क्रोलमा जुम गर्नुहोस्',
|
||||
theme: 'थिम',
|
||||
show_dependencies: 'डिपेन्डेन्सीहरू देखाउनुहोस्',
|
||||
@@ -155,6 +157,8 @@ export const ne: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'परिशुद्धता',
|
||||
scale: 'स्केल',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचक विशेषताहरू',
|
||||
@@ -255,9 +259,12 @@ export const ne: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -272,6 +279,10 @@ export const ne: LanguageTranslation = {
|
||||
undo: 'पूर्ववत',
|
||||
redo: 'पुनः गर्नुहोस्',
|
||||
reorder_diagram: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables:
|
||||
'अतिरिक्त तालिकाहरू हाइलाइट गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
@@ -409,6 +420,14 @@ export const ne: LanguageTranslation = {
|
||||
confirm: 'परिवर्तन गर्नुहोस्',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'नयाँ स्कीम सिर्जना गर्नुहोस्',
|
||||
description:
|
||||
'अहिलेसम्म कुनै स्कीम अस्तित्वमा छैन। आफ्ना तालिकाहरू व्यवस्थित गर्न आफ्नो पहिलो स्कीम सिर्जना गर्नुहोस्।',
|
||||
create: 'सिर्जना गर्नुहोस्',
|
||||
cancel: 'रद्द गर्नुहोस्',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'हामीलाई अझ राम्रो हुन मदत गर्नुहोस!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
hide_sidebar: 'Ocultar Barra Lateral',
|
||||
hide_cardinality: 'Ocultar Cardinalidade',
|
||||
show_cardinality: 'Mostrar Cardinalidade',
|
||||
hide_field_attributes: 'Ocultar Atributos de Campo',
|
||||
show_field_attributes: 'Mostrar Atributos de Campo',
|
||||
zoom_on_scroll: 'Zoom ao Rolar',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Mostrar Dependências',
|
||||
@@ -155,6 +157,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Precisão',
|
||||
scale: 'Escala',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos do Índice',
|
||||
@@ -254,9 +258,12 @@ export const pt_BR: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -271,6 +278,10 @@ export const pt_BR: LanguageTranslation = {
|
||||
undo: 'Desfazer',
|
||||
redo: 'Refazer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Destacar Tabelas Sobrepostas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -407,6 +418,14 @@ export const pt_BR: LanguageTranslation = {
|
||||
confirm: 'Alterar',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Criar Novo Esquema',
|
||||
description:
|
||||
'Ainda não existem esquemas. Crie seu primeiro esquema para organizar suas tabelas.',
|
||||
create: 'Criar',
|
||||
cancel: 'Cancelar',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Ajude-nos a melhorar!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const ru: LanguageTranslation = {
|
||||
hide_sidebar: 'Скрыть боковую панель',
|
||||
hide_cardinality: 'Скрыть виды связи',
|
||||
show_cardinality: 'Показать виды связи',
|
||||
show_field_attributes: 'Показать атрибуты поля',
|
||||
hide_field_attributes: 'Скрыть атрибуты поля',
|
||||
zoom_on_scroll: 'Увеличение при прокрутке',
|
||||
theme: 'Тема',
|
||||
show_dependencies: 'Показать зависимости',
|
||||
@@ -151,6 +153,8 @@ export const ru: LanguageTranslation = {
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
character_length: 'Макс. длина',
|
||||
precision: 'Точность',
|
||||
scale: 'Масштаб',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибуты индекса',
|
||||
@@ -251,9 +255,12 @@ export const ru: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -268,6 +275,10 @@ export const ru: LanguageTranslation = {
|
||||
undo: 'Отменить',
|
||||
redo: 'Вернуть',
|
||||
reorder_diagram: 'Переупорядочить диаграмму',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Выделение перекрывающихся таблиц',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -404,6 +415,14 @@ export const ru: LanguageTranslation = {
|
||||
confirm: 'Изменить',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Создать новую схему',
|
||||
description:
|
||||
'Схемы еще не существуют. Создайте вашу первую схему, чтобы организовать таблицы.',
|
||||
create: 'Создать',
|
||||
cancel: 'Отменить',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Помогите нам стать лучше!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const te: LanguageTranslation = {
|
||||
hide_sidebar: 'సైడ్బార్ దాచండి',
|
||||
hide_cardinality: 'కార్డినాలిటీని దాచండి',
|
||||
show_cardinality: 'కార్డినాలిటీని చూపించండి',
|
||||
show_field_attributes: 'ఫీల్డ్ గుణాలను చూపించు',
|
||||
hide_field_attributes: 'ఫీల్డ్ గుణాలను దాచండి',
|
||||
zoom_on_scroll: 'స్క్రోల్పై జూమ్',
|
||||
theme: 'థీమ్',
|
||||
show_dependencies: 'ఆధారాలు చూపించండి',
|
||||
@@ -155,6 +157,8 @@ export const te: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'సూక్ష్మత',
|
||||
scale: 'స్కేల్',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ఇండెక్స్ గుణాలు',
|
||||
@@ -255,9 +259,12 @@ export const te: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -272,6 +279,10 @@ export const te: LanguageTranslation = {
|
||||
undo: 'తిరిగి చేయు',
|
||||
redo: 'మరలా చేయు',
|
||||
reorder_diagram: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'అవకాశించు పట్టికలను హైలైట్ చేయండి',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -408,6 +419,14 @@ export const te: LanguageTranslation = {
|
||||
confirm: 'మార్చు',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'కొత్త స్కీమా సృష్టించండి',
|
||||
description:
|
||||
'ఇంకా ఏ స్కీమాలు లేవు. మీ పట్టికలను వ్యవస్థీకరించడానికి మీ మొదటి స్కీమాను సృష్టించండి.',
|
||||
create: 'సృష్టించు',
|
||||
cancel: 'రద్దు',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'మా సహాయంతో మెరుగుపరచండి!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const tr: LanguageTranslation = {
|
||||
hide_sidebar: 'Kenar Çubuğunu Gizle',
|
||||
hide_cardinality: 'Kardinaliteyi Gizle',
|
||||
show_cardinality: 'Kardinaliteyi Göster',
|
||||
show_field_attributes: 'Alan Özelliklerini Göster',
|
||||
hide_field_attributes: 'Alan Özelliklerini Gizle',
|
||||
zoom_on_scroll: 'Kaydırarak Yakınlaştır',
|
||||
theme: 'Tema',
|
||||
show_dependencies: 'Bağımlılıkları Göster',
|
||||
@@ -154,6 +156,8 @@ export const tr: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Hassasiyet',
|
||||
scale: 'Ölçek',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'İndeks Özellikleri',
|
||||
@@ -254,9 +258,12 @@ export const tr: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -270,6 +277,10 @@ export const tr: LanguageTranslation = {
|
||||
undo: 'Geri Al',
|
||||
redo: 'Yinele',
|
||||
reorder_diagram: 'Diyagramı Yeniden Sırala',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Çakışan Tabloları Vurgula',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -397,6 +408,14 @@ export const tr: LanguageTranslation = {
|
||||
cancel: 'İptal',
|
||||
confirm: 'Değiştir',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Yeni Şema Oluştur',
|
||||
description:
|
||||
'Henüz hiç şema mevcut değil. Tablolarınızı düzenlemek için ilk şemanızı oluşturun.',
|
||||
create: 'Oluştur',
|
||||
cancel: 'İptal',
|
||||
},
|
||||
star_us_dialog: {
|
||||
title: 'Bize yardım et!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const uk: LanguageTranslation = {
|
||||
hide_sidebar: 'Приховати бічну панель',
|
||||
hide_cardinality: 'Приховати потужність',
|
||||
show_cardinality: 'Показати кардинальність',
|
||||
show_field_attributes: 'Показати атрибути полів',
|
||||
hide_field_attributes: 'Приховати атрибути полів',
|
||||
zoom_on_scroll: 'Масштабувати прокручуванням',
|
||||
theme: 'Тема',
|
||||
show_dependencies: 'Показати залежності',
|
||||
@@ -153,6 +155,8 @@ export const uk: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Точність',
|
||||
scale: 'Масштаб',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибути індексу',
|
||||
@@ -252,9 +256,12 @@ export const uk: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -269,6 +276,10 @@ export const uk: LanguageTranslation = {
|
||||
undo: 'Скасувати',
|
||||
redo: 'Повторити',
|
||||
reorder_diagram: 'Перевпорядкувати діаграму',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Показати таблиці, що перекриваються',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -405,6 +416,14 @@ export const uk: LanguageTranslation = {
|
||||
confirm: 'Змінити',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Створити нову схему',
|
||||
description:
|
||||
'Поки що не існує жодної схеми. Створіть свою першу схему, щоб організувати ваші таблиці.',
|
||||
create: 'Створити',
|
||||
cancel: 'Скасувати',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Допоможіть нам покращитися!',
|
||||
description: 'Поставне на зірку на GitHub? Це лише один клік!',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const vi: LanguageTranslation = {
|
||||
hide_sidebar: 'Ẩn thanh bên',
|
||||
hide_cardinality: 'Ẩn số lượng',
|
||||
show_cardinality: 'Hiển thị số lượng',
|
||||
show_field_attributes: 'Hiển thị thuộc tính trường',
|
||||
hide_field_attributes: 'Ẩn thuộc tính trường',
|
||||
zoom_on_scroll: 'Thu phóng khi cuộn',
|
||||
theme: 'Chủ đề',
|
||||
show_dependencies: 'Hiển thị các phụ thuộc',
|
||||
@@ -154,6 +156,8 @@ export const vi: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: 'Độ chính xác',
|
||||
scale: 'Tỷ lệ',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Thuộc tính chỉ mục',
|
||||
@@ -253,9 +257,12 @@ export const vi: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -270,6 +277,10 @@ export const vi: LanguageTranslation = {
|
||||
undo: 'Hoàn tác',
|
||||
redo: 'Làm lại',
|
||||
reorder_diagram: 'Sắp xếp lại sơ đồ',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: 'Làm nổi bật các bảng chồng chéo',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -404,6 +415,14 @@ export const vi: LanguageTranslation = {
|
||||
confirm: 'Xác nhận',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: 'Tạo lược đồ mới',
|
||||
description:
|
||||
'Chưa có lược đồ nào. Tạo lược đồ đầu tiên của bạn để tổ chức các bảng.',
|
||||
create: 'Tạo',
|
||||
cancel: 'Hủy',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: 'Hãy giúp chúng tôi cải thiện!',
|
||||
description:
|
||||
|
||||
@@ -26,6 +26,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
hide_sidebar: '隐藏侧边栏',
|
||||
hide_cardinality: '隐藏基数',
|
||||
show_cardinality: '展示基数',
|
||||
show_field_attributes: '展示字段属性',
|
||||
hide_field_attributes: '隐藏字段属性',
|
||||
zoom_on_scroll: '滚动缩放',
|
||||
theme: '主题',
|
||||
show_dependencies: '展示依赖',
|
||||
@@ -151,6 +153,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小数位',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引属性',
|
||||
@@ -250,9 +254,12 @@ export const zh_CN: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -267,6 +274,10 @@ export const zh_CN: LanguageTranslation = {
|
||||
undo: '撤销',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列关系图',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '突出显示重叠的表',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -400,6 +411,13 @@ export const zh_CN: LanguageTranslation = {
|
||||
confirm: '更改',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '创建新模式',
|
||||
description: '尚未存在任何模式。创建您的第一个模式来组织您的表。',
|
||||
create: '创建',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '帮助我们改进!',
|
||||
description: '您想在 GitHub 上为我们加注星标吗?只需点击一下即可!',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
hide_sidebar: '隱藏側邊欄',
|
||||
hide_cardinality: '隱藏基數',
|
||||
show_cardinality: '顯示基數',
|
||||
hide_field_attributes: '隱藏欄位屬性',
|
||||
show_field_attributes: '顯示欄位屬性',
|
||||
zoom_on_scroll: '滾動縮放',
|
||||
theme: '主題',
|
||||
show_dependencies: '顯示相依性',
|
||||
@@ -151,6 +153,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
precision: '精度',
|
||||
scale: '小數位',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引屬性',
|
||||
@@ -250,9 +254,12 @@ export const zh_TW: LanguageTranslation = {
|
||||
field_name_placeholder: 'Field name',
|
||||
field_type_placeholder: 'Select type',
|
||||
add_field: 'Add Field',
|
||||
no_fields_tooltip: 'No fields defined for this custom type',
|
||||
custom_type_actions: {
|
||||
title: 'Actions',
|
||||
highlight_fields: 'Highlight Fields',
|
||||
delete_custom_type: 'Delete',
|
||||
clear_field_highlight: 'Clear Highlight',
|
||||
},
|
||||
delete_custom_type: 'Delete Type',
|
||||
},
|
||||
@@ -267,6 +274,10 @@ export const zh_TW: LanguageTranslation = {
|
||||
undo: '復原',
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列圖表',
|
||||
// TODO: Translate
|
||||
clear_custom_type_highlight: 'Clear highlight for "{{typeName}}"',
|
||||
custom_type_highlight_tooltip:
|
||||
'Highlighting "{{typeName}}" - Click to clear',
|
||||
highlight_overlapping_tables: '突出顯示重疊表格',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
@@ -399,6 +410,14 @@ export const zh_TW: LanguageTranslation = {
|
||||
confirm: '變更',
|
||||
},
|
||||
|
||||
create_table_schema_dialog: {
|
||||
title: '建立新 Schema',
|
||||
description:
|
||||
'尚未存在任何 Schema。建立您的第一個 Schema 來組織您的表格。',
|
||||
create: '建立',
|
||||
cancel: '取消',
|
||||
},
|
||||
|
||||
star_us_dialog: {
|
||||
title: '協助我們改善!',
|
||||
description: '請在 GitHub 上給我們一顆星,只需點擊一下!',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { DBCustomType } from './domain';
|
||||
import type { Area } from './domain/area';
|
||||
import type { DBDependency } from './domain/db-dependency';
|
||||
import type { DBField } from './domain/db-field';
|
||||
@@ -48,6 +49,10 @@ const generateIdsMapFromDiagram = (
|
||||
idsMap.set(area.id, generateId());
|
||||
});
|
||||
|
||||
diagram.customTypes?.forEach((customType) => {
|
||||
idsMap.set(customType.id, generateId());
|
||||
});
|
||||
|
||||
return idsMap;
|
||||
};
|
||||
|
||||
@@ -213,6 +218,22 @@ export const cloneDiagram = (
|
||||
})
|
||||
.filter((area): area is Area => area !== null) ?? [];
|
||||
|
||||
const customTypes: DBCustomType[] =
|
||||
diagram.customTypes
|
||||
?.map((customType) => {
|
||||
const id = getNewId(customType.id);
|
||||
if (!id) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...customType,
|
||||
id,
|
||||
} satisfies DBCustomType;
|
||||
})
|
||||
.filter(
|
||||
(customType): customType is DBCustomType => customType !== null
|
||||
) ?? [];
|
||||
|
||||
return {
|
||||
diagram: {
|
||||
...diagram,
|
||||
@@ -221,6 +242,7 @@ export const cloneDiagram = (
|
||||
relationships,
|
||||
tables,
|
||||
areas,
|
||||
customTypes,
|
||||
createdAt: diagram.createdAt
|
||||
? new Date(diagram.createdAt)
|
||||
: new Date(),
|
||||
|
||||
@@ -48,18 +48,30 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'char large object', id: 'char_large_object' },
|
||||
{ name: 'char varying', id: 'char_varying', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'char varying',
|
||||
id: 'char_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'character large object', id: 'character_large_object' },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'nchar large object', id: 'nchar_large_object' },
|
||||
{ name: 'nchar varying', id: 'nchar_varying', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'nchar varying',
|
||||
id: 'nchar_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'national character large object',
|
||||
id: 'national_character_large_object',
|
||||
@@ -67,22 +79,34 @@ export const clickhouseDataTypes: readonly DataTypeData[] = [
|
||||
{
|
||||
name: 'national character varying',
|
||||
id: 'national_character_varying',
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'national char varying',
|
||||
id: 'national_char_varying',
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'national character',
|
||||
id: 'national_character',
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'national char',
|
||||
id: 'national_char',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'national char', id: 'national_char', hasCharMaxLength: true },
|
||||
{ name: 'binary large object', id: 'binary_large_object' },
|
||||
{ name: 'binary varying', id: 'binary_varying', hasCharMaxLength: true },
|
||||
{ name: 'fixedstring', id: 'fixedstring', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'binary varying',
|
||||
id: 'binary_varying',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'fixedstring',
|
||||
id: 'fixedstring',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'string', id: 'string' },
|
||||
|
||||
// Date Types
|
||||
|
||||
@@ -14,9 +14,23 @@ export interface DataType {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface DataTypeData extends DataType {
|
||||
export interface FieldAttributeRange {
|
||||
max: number;
|
||||
min: number;
|
||||
default: number;
|
||||
}
|
||||
|
||||
interface FieldAttributes {
|
||||
hasCharMaxLength?: boolean;
|
||||
hasCharMaxLengthOption?: boolean;
|
||||
precision?: FieldAttributeRange;
|
||||
scale?: FieldAttributeRange;
|
||||
maxLength?: number;
|
||||
}
|
||||
|
||||
export interface DataTypeData extends DataType {
|
||||
usageLevel?: 1 | 2; // Level 1 is most common, Level 2 is second most common
|
||||
fieldAttributes?: FieldAttributes;
|
||||
}
|
||||
|
||||
export const dataTypeSchema: z.ZodType<DataType> = z.object({
|
||||
|
||||
@@ -2,7 +2,12 @@ import type { DataTypeData } from './data-types';
|
||||
|
||||
export const genericDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
@@ -10,23 +15,62 @@ export const genericDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 999,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 999,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
{ name: 'uuid', id: 'uuid', usageLevel: 2 },
|
||||
|
||||
// Less common types
|
||||
{ name: 'bigint', id: 'bigint' },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'enum', id: 'enum' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 999,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 999,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'set', id: 'set' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
] as const;
|
||||
|
||||
@@ -4,12 +4,32 @@ export const mariadbDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 1 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 1 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 1,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
@@ -20,16 +40,39 @@ export const mariadbDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'mediumint', id: 'mediumint' },
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'bit', id: 'bit' },
|
||||
{ name: 'bool', id: 'bool' },
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'year', id: 'year' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
|
||||
@@ -3,7 +3,12 @@ import type { DataTypeData } from './data-types';
|
||||
export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
@@ -11,7 +16,23 @@ export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 65,
|
||||
min: 1,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 30,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
@@ -22,7 +43,7 @@ export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'double', id: 'double' },
|
||||
{ name: 'bit', id: 'bit' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'tinytext', id: 'tinytext' },
|
||||
{ name: 'mediumtext', id: 'mediumtext' },
|
||||
{ name: 'longtext', id: 'longtext' },
|
||||
|
||||
@@ -2,15 +2,30 @@ import type { DataTypeData } from './data-types';
|
||||
|
||||
export const oracleDataTypes: readonly DataTypeData[] = [
|
||||
// Character types
|
||||
{ name: 'VARCHAR2', id: 'varchar2', usageLevel: 1, hasCharMaxLength: true },
|
||||
{
|
||||
name: 'VARCHAR2',
|
||||
id: 'varchar2',
|
||||
usageLevel: 1,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'NVARCHAR2',
|
||||
id: 'nvarchar2',
|
||||
usageLevel: 1,
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'CHAR',
|
||||
id: 'char',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'NCHAR',
|
||||
id: 'nchar',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'CHAR', id: 'char', usageLevel: 2, hasCharMaxLength: true },
|
||||
{ name: 'NCHAR', id: 'nchar', usageLevel: 2, hasCharMaxLength: true },
|
||||
{ name: 'CLOB', id: 'clob', usageLevel: 2 },
|
||||
{ name: 'NCLOB', id: 'nclob', usageLevel: 2 },
|
||||
|
||||
@@ -49,7 +64,12 @@ export const oracleDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'BFILE', id: 'bfile', usageLevel: 2 },
|
||||
|
||||
// Other types
|
||||
{ name: 'RAW', id: 'raw', usageLevel: 2, hasCharMaxLength: true },
|
||||
{
|
||||
name: 'RAW',
|
||||
id: 'raw',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'LONG RAW', id: 'long_raw', usageLevel: 2 },
|
||||
{ name: 'ROWID', id: 'rowid', usageLevel: 2 },
|
||||
{ name: 'UROWID', id: 'urowid', usageLevel: 2 },
|
||||
|
||||
@@ -3,7 +3,12 @@ import type { DataTypeData } from './data-types';
|
||||
export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'integer', id: 'integer', usageLevel: 1 },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
@@ -11,7 +16,23 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 131072,
|
||||
min: 0,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 16383,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'serial', id: 'serial', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
{ name: 'jsonb', id: 'jsonb', usageLevel: 2 },
|
||||
@@ -23,18 +44,33 @@ export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
},
|
||||
|
||||
// Less common types
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 131072,
|
||||
min: 0,
|
||||
default: 10,
|
||||
},
|
||||
scale: {
|
||||
max: 16383,
|
||||
min: 0,
|
||||
default: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'double precision', id: 'double_precision' },
|
||||
{ name: 'smallserial', id: 'smallserial' },
|
||||
{ name: 'bigserial', id: 'bigserial' },
|
||||
{ name: 'money', id: 'money' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
hasCharMaxLength: true,
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'timestamp without time zone', id: 'timestamp_without_time_zone' },
|
||||
|
||||
@@ -4,32 +4,93 @@ export const sqlServerDataTypes: readonly DataTypeData[] = [
|
||||
// Level 1 - Most commonly used types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'bit', id: 'bit', usageLevel: 1 },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'nvarchar', id: 'nvarchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 8000,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{
|
||||
name: 'nvarchar',
|
||||
id: 'nvarchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 4000,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{ name: 'text', id: 'text', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'bigint', id: 'bigint', usageLevel: 2 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 38,
|
||||
min: 1,
|
||||
default: 18,
|
||||
},
|
||||
scale: {
|
||||
max: 38,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'datetime2', id: 'datetime2', usageLevel: 2 },
|
||||
{ name: 'uniqueidentifier', id: 'uniqueidentifier', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
// Less common types
|
||||
{ name: 'numeric', id: 'numeric' },
|
||||
{
|
||||
name: 'numeric',
|
||||
id: 'numeric',
|
||||
fieldAttributes: {
|
||||
precision: {
|
||||
max: 38,
|
||||
min: 1,
|
||||
default: 18,
|
||||
},
|
||||
scale: {
|
||||
max: 38,
|
||||
min: 0,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'smallmoney', id: 'smallmoney' },
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'money', id: 'money' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'real', id: 'real' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'nchar', id: 'nchar', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'nchar', id: 'nchar', fieldAttributes: { hasCharMaxLength: true } },
|
||||
{ name: 'ntext', id: 'ntext' },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'binary',
|
||||
id: 'binary',
|
||||
fieldAttributes: { hasCharMaxLength: true },
|
||||
},
|
||||
{
|
||||
name: 'varbinary',
|
||||
id: 'varbinary',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
hasCharMaxLengthOption: true,
|
||||
maxLength: 8000,
|
||||
},
|
||||
},
|
||||
{ name: 'image', id: 'image' },
|
||||
{ name: 'datetimeoffset', id: 'datetimeoffset' },
|
||||
{ name: 'smalldatetime', id: 'smalldatetime' },
|
||||
|
||||
@@ -10,21 +10,41 @@ export const sqliteDataTypes: readonly DataTypeData[] = [
|
||||
|
||||
// SQLite type aliases and common types
|
||||
{ name: 'int', id: 'int', usageLevel: 1 },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true, usageLevel: 1 },
|
||||
{ name: 'timestamp', id: 'timestamp', usageLevel: 1 },
|
||||
{ name: 'date', id: 'date', usageLevel: 1 },
|
||||
{ name: 'datetime', id: 'datetime', usageLevel: 1 },
|
||||
{ name: 'boolean', id: 'boolean', usageLevel: 1 },
|
||||
{
|
||||
name: 'varchar',
|
||||
id: 'varchar',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
usageLevel: 1,
|
||||
},
|
||||
{
|
||||
name: 'timestamp',
|
||||
id: 'timestamp',
|
||||
usageLevel: 1,
|
||||
},
|
||||
|
||||
// Level 2 - Second most common types
|
||||
{ name: 'numeric', id: 'numeric', usageLevel: 2 },
|
||||
{ name: 'decimal', id: 'decimal', usageLevel: 2 },
|
||||
{ name: 'float', id: 'float', usageLevel: 2 },
|
||||
{
|
||||
name: 'decimal',
|
||||
id: 'decimal',
|
||||
usageLevel: 2,
|
||||
},
|
||||
{ name: 'double', id: 'double', usageLevel: 2 },
|
||||
{ name: 'json', id: 'json', usageLevel: 2 },
|
||||
|
||||
// Less common types (all map to SQLite storage classes)
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'char',
|
||||
id: 'char',
|
||||
fieldAttributes: {
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
usageLevel: 2,
|
||||
},
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
|
||||
@@ -4,4 +4,5 @@ export const defaultSchemas: { [key in DatabaseType]?: string } = {
|
||||
[DatabaseType.POSTGRESQL]: 'public',
|
||||
[DatabaseType.SQL_SERVER]: 'dbo',
|
||||
[DatabaseType.CLICKHOUSE]: 'default',
|
||||
[DatabaseType.COCKROACHDB]: 'public',
|
||||
};
|
||||
|
||||
@@ -227,7 +227,7 @@ describe('DBML Export - SQL Generation Tests', () => {
|
||||
expect(sql).not.toContain('DEFAULT DEFAULT has default');
|
||||
// The fields should still be in the table
|
||||
expect(sql).toContain('is_active boolean');
|
||||
expect(sql).toContain('stock_count int NOT NULL'); // integer gets simplified to int
|
||||
expect(sql).toContain('stock_count integer NOT NULL'); // integer gets simplified to int
|
||||
});
|
||||
|
||||
it('should handle valid default values correctly', () => {
|
||||
|
||||
@@ -73,7 +73,13 @@ function parseMSSQLDefault(field: DBField): string {
|
||||
return `'${defaultValue}'`;
|
||||
}
|
||||
|
||||
export function exportMSSQL(diagram: Diagram): string {
|
||||
export function exportMSSQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -83,166 +89,254 @@ export function exportMSSQL(diagram: Diagram): string {
|
||||
|
||||
// Create CREATE SCHEMA statements for all schemas
|
||||
let sqlScript = '';
|
||||
const schemas = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
if (!onlyRelationships) {
|
||||
const schemas = new Set<string>();
|
||||
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${schema}')\nBEGIN\n EXEC('CREATE SCHEMA [${schema}]');\nEND;\n\n`;
|
||||
});
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
const tableName = table.schema
|
||||
? `[${table.schema}].[${table.name}]`
|
||||
: `[${table.name}]`;
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${schema}')\nBEGIN\n EXEC('CREATE SCHEMA [${schema}]');\nEND;\n`;
|
||||
});
|
||||
|
||||
return `${
|
||||
table.comments ? formatMSSQLTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `[${field.name}]`;
|
||||
const typeName = field.type.name;
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Handle SQL Server specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'nvarchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'nchar'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
const tableName = table.schema
|
||||
? `[${table.schema}].[${table.name}]`
|
||||
: `[${table.name}]`;
|
||||
|
||||
return `${
|
||||
table.comments
|
||||
? formatMSSQLTableComment(table.comments)
|
||||
: ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `[${field.name}]`;
|
||||
const typeName = field.type.name;
|
||||
|
||||
// Handle SQL Server specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'nvarchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'nchar'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Check if identity column
|
||||
const identity = field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity')
|
||||
? ' IDENTITY(1,1)'
|
||||
: '';
|
||||
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using SQL Server specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parseMSSQLDefault(field)}`
|
||||
// Check if identity column
|
||||
const identity = field.default
|
||||
?.toLowerCase()
|
||||
.includes('identity')
|
||||
? ' IDENTITY(1,1)'
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${table.indexes
|
||||
.map((index) => {
|
||||
const indexName = table.schema
|
||||
? `[${table.schema}_${index.name}]`
|
||||
: `[${index.name}]`;
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? `[${field.name}]` : '';
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using SQL Server specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parseMSSQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
table.fields.filter((f) => f.primaryKey).length > 0
|
||||
? `,\n PRIMARY KEY (${table.fields
|
||||
.filter((f) => f.primaryKey)
|
||||
.map((f) => `[${f.name}]`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n${(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
const indexName = table.schema
|
||||
? `[${table.schema}_${index.name}]`
|
||||
: `[${index.name}]`;
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? `[${field.name}]` : '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// SQL Server has a limit of 32 columns in an index
|
||||
if (indexFields.length > 32) {
|
||||
const warningComment = `/* WARNING: This index originally had ${indexFields.length} columns. It has been truncated to 32 columns due to SQL Server's index column limit. */\n`;
|
||||
console.warn(
|
||||
`Warning: Index ${indexName} on table ${tableName} has ${indexFields.length} columns. SQL Server limits indexes to 32 columns. The index will be truncated.`
|
||||
);
|
||||
indexFields.length = 32;
|
||||
return indexFields.length > 0
|
||||
? `${warningComment}CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});`
|
||||
: '';
|
||||
}
|
||||
|
||||
return indexFields.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// SQL Server has a limit of 32 columns in an index
|
||||
if (indexFields.length > 32) {
|
||||
const warningComment = `/* WARNING: This index originally had ${indexFields.length} columns. It has been truncated to 32 columns due to SQL Server's index column limit. */\n`;
|
||||
console.warn(
|
||||
`Warning: Index ${indexName} on table ${tableName} has ${indexFields.length} columns. SQL Server limits indexes to 32 columns. The index will be truncated.`
|
||||
);
|
||||
indexFields.length = 32;
|
||||
return indexFields.length > 0
|
||||
? `${warningComment}CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});\n\n`
|
||||
: '';
|
||||
}
|
||||
|
||||
return indexFields.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFields.join(', ')});\n\n`
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
: '';
|
||||
})
|
||||
.join('')}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
})()}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
// Generate foreign keys
|
||||
sqlScript += `\n${relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
// Process all relationships and create FK objects with schema info
|
||||
const foreignKeys = relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `[${sourceTable.schema}].[${sourceTable.name}]`
|
||||
: `[${sourceTable.name}]`;
|
||||
const targetTableName = targetTable.schema
|
||||
? `[${targetTable.schema}].[${targetTable.name}]`
|
||||
: `[${targetTable.name}]`;
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT [${r.name}] FOREIGN KEY([${sourceField.name}]) REFERENCES ${targetTableName}([${targetField.name}]);\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `[${fkTable.schema}].[${fkTable.name}]`
|
||||
: `[${fkTable.name}]`;
|
||||
const refTableName = refTable.schema
|
||||
? `[${refTable.schema}].[${refTable.name}]`
|
||||
: `[${refTable.name}]`;
|
||||
|
||||
return {
|
||||
schema: fkTable.schema || 'dbo',
|
||||
sql: `ALTER TABLE ${fkTableName} ADD CONSTRAINT [${r.name}] FOREIGN KEY([${fkField.name}]) REFERENCES ${refTableName}([${refField.name}]);`,
|
||||
};
|
||||
})
|
||||
.filter(Boolean); // Remove empty objects
|
||||
|
||||
// Group foreign keys by schema
|
||||
const fksBySchema = foreignKeys.reduce(
|
||||
(acc, fk) => {
|
||||
if (!fk) return acc;
|
||||
const schema = fk.schema;
|
||||
if (!acc[schema]) {
|
||||
acc[schema] = [];
|
||||
}
|
||||
acc[schema].push(fk.sql);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string[]>
|
||||
);
|
||||
|
||||
// Sort schemas and generate SQL with separators
|
||||
const sortedSchemas = Object.keys(fksBySchema).sort();
|
||||
const fkSql = sortedSchemas
|
||||
.map((schema, index) => {
|
||||
const schemaFks = fksBySchema[schema].join('\n');
|
||||
if (index === 0) {
|
||||
return `-- Schema: ${schema}\n${schemaFks}`;
|
||||
} else {
|
||||
return `\n-- Schema: ${schema}\n${schemaFks}`;
|
||||
}
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
sqlScript += fkSql;
|
||||
}
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -170,7 +170,13 @@ function mapMySQLType(typeName: string): string {
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportMySQL(diagram: Diagram): string {
|
||||
export function exportMySQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -179,224 +185,245 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script
|
||||
let sqlScript = '-- MySQL database export\n\n';
|
||||
let sqlScript = '-- MySQL database export\n';
|
||||
|
||||
// MySQL doesn't really use transactions for DDL statements but we'll add it for consistency
|
||||
sqlScript += 'START TRANSACTION;\n\n';
|
||||
if (!onlyRelationships) {
|
||||
// MySQL doesn't really use transactions for DDL statements but we'll add it for consistency
|
||||
sqlScript += 'START TRANSACTION;\n';
|
||||
|
||||
// Create databases (schemas) if they don't exist
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
|
||||
});
|
||||
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
// Create databases (schemas) if they don't exist
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Use schema prefix if available
|
||||
const tableName = table.schema
|
||||
? `\`${table.schema}\`.\`${table.name}\``
|
||||
: `\`${table.name}\``;
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
|
||||
});
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Handle type name - map to MySQL compatible types
|
||||
const typeName = mapMySQLType(field.type.name);
|
||||
// Use schema prefix if available
|
||||
const tableName = table.schema
|
||||
? `\`${table.schema}\`.\`${table.name}\``
|
||||
: `\`${table.name}\``;
|
||||
|
||||
// Handle MySQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'varbinary'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
);
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}\nCREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
|
||||
// Handle type name - map to MySQL compatible types
|
||||
const typeName = mapMySQLType(field.type.name);
|
||||
|
||||
// Handle MySQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'varbinary'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
} else if (field.precision) {
|
||||
|
||||
// Set a default size for VARCHAR columns if not specified
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
typeName.toLowerCase() === 'varchar' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
typeWithSize = `${typeName}(255)`;
|
||||
}
|
||||
}
|
||||
|
||||
// Set a default size for VARCHAR columns if not specified
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
typeWithSize = `${typeName}(255)`;
|
||||
}
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle auto_increment - MySQL uses AUTO_INCREMENT keyword
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
// Handle auto_increment - MySQL uses AUTO_INCREMENT keyword
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
(field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTO_INCREMENT';
|
||||
}
|
||||
.includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTO_INCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
? ` DEFAULT ${parseMySQLDefault(field)}`
|
||||
// Handle default value
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
? ` DEFAULT ${parseMySQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${escapeSQLComment(field.comments)}'`
|
||||
: '';
|
||||
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${escapeSQLComment(field.comments)}'`
|
||||
: '';
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${escapeSQLComment(table.comments)}'`
|
||||
: ''
|
||||
};\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${escapeSQLComment(table.comments)}'`
|
||||
: ''
|
||||
};\n\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique index name by combining table name, field names, and a unique/non-unique indicator
|
||||
const fieldNamesForIndex = indexFields
|
||||
.map((field) => field?.name || '')
|
||||
.join('_');
|
||||
const uniqueIndicator = index.unique
|
||||
? '_unique'
|
||||
: '';
|
||||
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `\`${field.name}\`` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
// Check for text/blob fields that need special handling
|
||||
const hasTextOrBlob = indexFields.some(
|
||||
(field) => {
|
||||
const typeName =
|
||||
field?.type.name.toLowerCase() ||
|
||||
'';
|
||||
return (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
);
|
||||
}
|
||||
);
|
||||
return field ? field : null;
|
||||
|
||||
// If there are TEXT/BLOB fields, need to add prefix length
|
||||
const indexFieldsWithPrefix = hasTextOrBlob
|
||||
? indexFieldNames.map((name) => {
|
||||
const field = indexFields.find(
|
||||
(f) => `\`${f?.name}\`` === name
|
||||
);
|
||||
if (!field) return name;
|
||||
|
||||
const typeName =
|
||||
field.type.name.toLowerCase();
|
||||
if (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
) {
|
||||
// Add a prefix length for TEXT/BLOB fields (required in MySQL)
|
||||
return `${name}(255)`;
|
||||
}
|
||||
return name;
|
||||
})
|
||||
: indexFieldNames;
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${indexFieldsWithPrefix.join(', ')});`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique index name by combining table name, field names, and a unique/non-unique indicator
|
||||
const fieldNamesForIndex = indexFields
|
||||
.map((field) => field?.name || '')
|
||||
.join('_');
|
||||
const uniqueIndicator = index.unique ? '_unique' : '';
|
||||
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `\`${field.name}\`` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Check for text/blob fields that need special handling
|
||||
const hasTextOrBlob = indexFields.some((field) => {
|
||||
const typeName =
|
||||
field?.type.name.toLowerCase() || '';
|
||||
return (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
);
|
||||
});
|
||||
|
||||
// If there are TEXT/BLOB fields, need to add prefix length
|
||||
const indexFieldsWithPrefix = hasTextOrBlob
|
||||
? indexFieldNames.map((name) => {
|
||||
const field = indexFields.find(
|
||||
(f) => `\`${f?.name}\`` === name
|
||||
);
|
||||
if (!field) return name;
|
||||
|
||||
const typeName =
|
||||
field.type.name.toLowerCase();
|
||||
if (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
) {
|
||||
// Add a prefix length for TEXT/BLOB fields (required in MySQL)
|
||||
return `${name}(255)`;
|
||||
}
|
||||
return name;
|
||||
})
|
||||
: indexFieldNames;
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldsWithPrefix.join(', ')});\n`
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
// Generate foreign keys
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n\n';
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
|
||||
sqlScript += relationships
|
||||
const foreignKeys = relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
@@ -425,25 +452,62 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `\`${sourceTable.schema}\`.\`${sourceTable.name}\``
|
||||
: `\`${sourceTable.name}\``;
|
||||
const targetTableName = targetTable.schema
|
||||
? `\`${targetTable.schema}\`.\`${targetTable.name}\``
|
||||
: `\`${targetTable.name}\``;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `\`${fkTable.schema}\`.\`${fkTable.name}\``
|
||||
: `\`${fkTable.name}\``;
|
||||
const refTableName = refTable.schema
|
||||
? `\`${refTable.schema}\`.\`${refTable.name}\``
|
||||
: `\`${refTable.name}\``;
|
||||
|
||||
// Create a descriptive constraint name
|
||||
const constraintName = `\`fk_${sourceTable.name}_${sourceField.name}\``;
|
||||
const constraintName = `\`fk_${fkTable.name}_${fkField.name}\``;
|
||||
|
||||
// MySQL supports ON DELETE and ON UPDATE actions
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${sourceField.name}\`) REFERENCES ${targetTableName}(\`${targetField.name}\`)\nON UPDATE CASCADE ON DELETE RESTRICT;\n`;
|
||||
return `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${fkField.name}\`) REFERENCES ${refTableName}(\`${refField.name}\`);`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n');
|
||||
.filter(Boolean); // Remove empty strings
|
||||
|
||||
sqlScript += foreignKeys.join('\n');
|
||||
}
|
||||
|
||||
// Commit transaction
|
||||
sqlScript += '\nCOMMIT;\n';
|
||||
sqlScript += '\n\nCOMMIT;\n';
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -142,10 +142,16 @@ function exportCustomTypes(customTypes: DBCustomType[]): string {
|
||||
}
|
||||
});
|
||||
|
||||
return typesSql + '\n';
|
||||
return typesSql ? typesSql + '\n' : '';
|
||||
}
|
||||
|
||||
export function exportPostgreSQL(diagram: Diagram): string {
|
||||
export function exportPostgreSQL({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -156,290 +162,391 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
|
||||
// Create CREATE SCHEMA statements for all schemas
|
||||
let sqlScript = '';
|
||||
const schemas = new Set<string>();
|
||||
if (!onlyRelationships) {
|
||||
const schemas = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Also collect schemas from custom types
|
||||
customTypes.forEach((customType) => {
|
||||
if (customType.schema) {
|
||||
schemas.add(customType.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
// Add custom types (enums and composite types)
|
||||
sqlScript += exportCustomTypes(customTypes);
|
||||
|
||||
// Add sequence creation statements
|
||||
const sequences = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.default) {
|
||||
// Match nextval('schema.sequence_name') or nextval('sequence_name')
|
||||
const match = field.default.match(
|
||||
/nextval\('([^']+)'(?:::[^)]+)?\)/
|
||||
);
|
||||
if (match) {
|
||||
sequences.add(match[1]);
|
||||
}
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
// Also collect schemas from custom types
|
||||
customTypes.forEach((customType) => {
|
||||
if (customType.schema) {
|
||||
schemas.add(customType.schema);
|
||||
}
|
||||
});
|
||||
|
||||
const tableName = table.schema
|
||||
? `"${table.schema}"."${table.name}"`
|
||||
: `"${table.name}"`;
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
// Add custom types (enums and composite types)
|
||||
sqlScript += exportCustomTypes(customTypes);
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
// Add sequence creation statements
|
||||
const sequences = new Set<string>();
|
||||
|
||||
// Handle type name - map problematic types to PostgreSQL compatible types
|
||||
const typeName = mapPostgresType(
|
||||
field.type.name,
|
||||
field.name
|
||||
tables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.default) {
|
||||
// Match nextval('schema.sequence_name') or nextval('sequence_name')
|
||||
const match = field.default.match(
|
||||
/nextval\('([^']+)'(?:::[^)]+)?\)/
|
||||
);
|
||||
|
||||
// Handle PostgreSQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
let serialType = null;
|
||||
|
||||
if (field.increment && !field.nullable) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'integer' ||
|
||||
typeName.toLowerCase() === 'int'
|
||||
) {
|
||||
serialType = 'SERIAL';
|
||||
} else if (typeName.toLowerCase() === 'bigint') {
|
||||
serialType = 'BIGSERIAL';
|
||||
} else if (typeName.toLowerCase() === 'smallint') {
|
||||
serialType = 'SMALLSERIAL';
|
||||
}
|
||||
if (match) {
|
||||
sequences.add(match[1]);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'character varying' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'character'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
if (sequences.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tableName = table.schema
|
||||
? `"${table.schema}"."${table.name}"`
|
||||
: `"${table.name}"`;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
);
|
||||
|
||||
return `${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle type name - map problematic types to PostgreSQL compatible types
|
||||
const typeName = mapPostgresType(
|
||||
field.type.name,
|
||||
field.name
|
||||
);
|
||||
|
||||
// Handle PostgreSQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
let serialType = null;
|
||||
|
||||
if (field.increment && !field.nullable) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'integer' ||
|
||||
typeName.toLowerCase() === 'int'
|
||||
) {
|
||||
serialType = 'SERIAL';
|
||||
} else if (typeName.toLowerCase() === 'bigint') {
|
||||
serialType = 'BIGSERIAL';
|
||||
} else if (typeName.toLowerCase() === 'smallint') {
|
||||
serialType = 'SMALLSERIAL';
|
||||
}
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() ===
|
||||
'character varying' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'character'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize = typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize =
|
||||
typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle identity generation
|
||||
let identity = '';
|
||||
if (field.default && field.default.includes('nextval')) {
|
||||
// PostgreSQL already handles this with DEFAULT nextval()
|
||||
} else if (
|
||||
field.default &&
|
||||
field.default.toLowerCase().includes('identity')
|
||||
) {
|
||||
identity = ' GENERATED BY DEFAULT AS IDENTITY';
|
||||
}
|
||||
// Handle identity generation
|
||||
let identity = '';
|
||||
if (
|
||||
field.default &&
|
||||
field.default.includes('nextval')
|
||||
) {
|
||||
// PostgreSQL already handles this with DEFAULT nextval()
|
||||
} else if (
|
||||
field.default &&
|
||||
field.default.toLowerCase().includes('identity')
|
||||
) {
|
||||
identity = ' GENERATED BY DEFAULT AS IDENTITY';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
// This avoids redundant uniqueness constraints
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
// This avoids redundant uniqueness constraints
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using PostgreSQL specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parsePostgresDefault(field)}`
|
||||
: '';
|
||||
// Handle default value using PostgreSQL specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parsePostgresDefault(field)}`
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${serialType || typeWithSize}${serialType ? '' : notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `COMMENT ON TABLE ${tableName} IS '${escapeSQLComment(table.comments)}';\n\n`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
table.fields
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`COMMENT ON COLUMN ${tableName}."${f.name}" IS '${escapeSQLComment(f.comments || '')}';\n`
|
||||
)
|
||||
.join('')
|
||||
}\n${
|
||||
// Add indexes only for non-primary key fields or composite indexes
|
||||
// This avoids duplicate indexes on primary key columns
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${serialType || typeWithSize}${serialType ? '' : notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `\nCOMMENT ON TABLE ${tableName} IS '${escapeSQLComment(table.comments)}';`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
table.fields
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`\nCOMMENT ON COLUMN ${tableName}."${f.name}" IS '${escapeSQLComment(f.comments || '')}';`
|
||||
)
|
||||
.join('')
|
||||
}${
|
||||
// Add indexes only for non-primary key fields or composite indexes
|
||||
// This avoids duplicate indexes on primary key columns
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create unique index name using table name and index name
|
||||
// This ensures index names are unique across the database
|
||||
const safeTableName = table.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
return field ? field : null;
|
||||
const safeIndexName = index.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
|
||||
// Limit index name length to avoid PostgreSQL's 63-character identifier limit
|
||||
let combinedName = `${safeTableName}_${safeIndexName}`;
|
||||
if (combinedName.length > 60) {
|
||||
// If too long, use just the index name or a truncated version
|
||||
combinedName =
|
||||
safeIndexName.length > 60
|
||||
? safeIndexName.substring(0, 60)
|
||||
: safeIndexName;
|
||||
}
|
||||
|
||||
const indexName = `"${combinedName}"`;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `"${field.name}"` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${indexFieldNames.join(', ')});`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create unique index name using table name and index name
|
||||
// This ensures index names are unique across the database
|
||||
const safeTableName = table.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
const safeIndexName = index.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
|
||||
// Limit index name length to avoid PostgreSQL's 63-character identifier limit
|
||||
let combinedName = `${safeTableName}_${safeIndexName}`;
|
||||
if (combinedName.length > 60) {
|
||||
// If too long, use just the index name or a truncated version
|
||||
combinedName =
|
||||
safeIndexName.length > 60
|
||||
? safeIndexName.substring(0, 60)
|
||||
: safeIndexName;
|
||||
}
|
||||
|
||||
const indexName = `"${combinedName}"`;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldNames.join(', ')});\n\n`
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
// Generate foreign keys
|
||||
sqlScript += `\n${relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
// Process all relationships and create FK objects with schema info
|
||||
const foreignKeys = relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `"${sourceTable.schema}"."${sourceTable.name}"`
|
||||
: `"${sourceTable.name}"`;
|
||||
const targetTableName = targetTable.schema
|
||||
? `"${targetTable.schema}"."${targetTable.name}"`
|
||||
: `"${targetTable.name}"`;
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${sourceTable.name}_${sourceField.name}_${targetTable.name}_${targetField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
? baseName.substring(0, 60).replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
: baseName.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY("${sourceField.name}") REFERENCES ${targetTableName}("${targetField.name}");\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
const fkTableName = fkTable.schema
|
||||
? `"${fkTable.schema}"."${fkTable.name}"`
|
||||
: `"${fkTable.name}"`;
|
||||
const refTableName = refTable.schema
|
||||
? `"${refTable.schema}"."${refTable.name}"`
|
||||
: `"${refTable.name}"`;
|
||||
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${fkTable.name}_${fkField.name}_${refTable.name}_${refField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
? baseName
|
||||
.substring(0, 60)
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
: baseName.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
|
||||
return {
|
||||
schema: fkTable.schema || 'public',
|
||||
sql: `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${constraintName} FOREIGN KEY("${fkField.name}") REFERENCES ${refTableName}("${refField.name}");`,
|
||||
};
|
||||
})
|
||||
.filter(Boolean); // Remove empty objects
|
||||
|
||||
// Group foreign keys by schema
|
||||
const fksBySchema = foreignKeys.reduce(
|
||||
(acc, fk) => {
|
||||
if (!fk) return acc;
|
||||
const schema = fk.schema;
|
||||
if (!acc[schema]) {
|
||||
acc[schema] = [];
|
||||
}
|
||||
acc[schema].push(fk.sql);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string[]>
|
||||
);
|
||||
|
||||
// Sort schemas and generate SQL with separators
|
||||
const sortedSchemas = Object.keys(fksBySchema).sort();
|
||||
const fkSql = sortedSchemas
|
||||
.map((schema, index) => {
|
||||
const schemaFks = fksBySchema[schema].join('\n');
|
||||
if (index === 0) {
|
||||
return `-- Schema: ${schema}\n${schemaFks}`;
|
||||
} else {
|
||||
return `\n-- Schema: ${schema}\n${schemaFks}`;
|
||||
}
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
sqlScript += fkSql;
|
||||
}
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -140,7 +140,13 @@ function mapSQLiteType(typeName: string, isPrimaryKey: boolean): string {
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportSQLite(diagram: Diagram): string {
|
||||
export function exportSQLite({
|
||||
diagram,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
onlyRelationships?: boolean;
|
||||
}): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
@@ -149,10 +155,10 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script - SQLite doesn't use schemas, so we skip schema creation
|
||||
let sqlScript = '-- SQLite database export\n\n';
|
||||
let sqlScript = '-- SQLite database export\n';
|
||||
|
||||
// Begin transaction for faster import
|
||||
sqlScript += 'BEGIN TRANSACTION;\n\n';
|
||||
sqlScript += 'BEGIN TRANSACTION;\n';
|
||||
|
||||
// SQLite doesn't have sequences, so we skip sequence creation
|
||||
|
||||
@@ -166,151 +172,167 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
'sqlite_master',
|
||||
];
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
if (!onlyRelationships) {
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Skip SQLite system tables
|
||||
if (sqliteSystemTables.includes(table.name.toLowerCase())) {
|
||||
return `-- Skipping SQLite system table: "${table.name}"\n`;
|
||||
}
|
||||
// Skip SQLite system tables
|
||||
if (sqliteSystemTables.includes(table.name.toLowerCase())) {
|
||||
return `-- Skipping SQLite system table: "${table.name}"\n`;
|
||||
}
|
||||
|
||||
// SQLite doesn't use schema prefixes, so we use just the table name
|
||||
// Include the schema in a comment if it exists
|
||||
const schemaComment = table.schema
|
||||
? `-- Original schema: ${table.schema}\n`
|
||||
: '';
|
||||
const tableName = `"${table.name}"`;
|
||||
// SQLite doesn't use schema prefixes, so we use just the table name
|
||||
// Include the schema in a comment if it exists
|
||||
const schemaComment = table.schema
|
||||
? `-- Original schema: ${table.schema}\n`
|
||||
: '';
|
||||
const tableName = `"${table.name}"`;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(f) => f.primaryKey
|
||||
);
|
||||
|
||||
// Check if this is a single-column INTEGER PRIMARY KEY (for AUTOINCREMENT)
|
||||
const singleIntegerPrimaryKey =
|
||||
primaryKeyFields.length === 1 &&
|
||||
(primaryKeyFields[0].type.name.toLowerCase() === 'integer' ||
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
// Check if this is a single-column INTEGER PRIMARY KEY (for AUTOINCREMENT)
|
||||
const singleIntegerPrimaryKey =
|
||||
primaryKeyFields.length === 1 &&
|
||||
(primaryKeyFields[0].type.name.toLowerCase() ===
|
||||
'integer' ||
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
|
||||
return `${schemaComment}${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
return `${schemaComment}${
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle type name - map to SQLite compatible types
|
||||
const typeName = mapSQLiteType(
|
||||
field.type.name,
|
||||
field.primaryKey
|
||||
);
|
||||
// Handle type name - map to SQLite compatible types
|
||||
const typeName = mapSQLiteType(
|
||||
field.type.name,
|
||||
field.primaryKey
|
||||
);
|
||||
|
||||
// SQLite ignores length specifiers, so we don't add them
|
||||
// We'll keep this simple without size info
|
||||
const typeWithoutSize = typeName;
|
||||
// SQLite ignores length specifiers, so we don't add them
|
||||
// We'll keep this simple without size info
|
||||
const typeWithoutSize = typeName;
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle autoincrement - only works with INTEGER PRIMARY KEY
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
singleIntegerPrimaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
// Handle autoincrement - only works with INTEGER PRIMARY KEY
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
singleIntegerPrimaryKey &&
|
||||
(field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTOINCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value - Special handling for datetime() function
|
||||
let defaultValue = '';
|
||||
if (
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
) {
|
||||
// Special handling for quoted functions like 'datetime(\'\'now\'\')' - remove extra quotes
|
||||
if (field.default.includes("datetime(''now'')")) {
|
||||
defaultValue = ' DEFAULT CURRENT_TIMESTAMP';
|
||||
} else {
|
||||
defaultValue = ` DEFAULT ${parseSQLiteDefault(field)}`;
|
||||
.includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTOINCREMENT';
|
||||
}
|
||||
}
|
||||
|
||||
// Add PRIMARY KEY inline only for single INTEGER primary key
|
||||
const primaryKey =
|
||||
field.primaryKey && singleIntegerPrimaryKey
|
||||
? ' PRIMARY KEY' + autoIncrement
|
||||
: '';
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithoutSize}${primaryKey}${notNull}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint for composite primary keys or non-INTEGER primary keys
|
||||
primaryKeyFields.length > 0 && !singleIntegerPrimaryKey
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add indexes - SQLite doesn't support indexes in CREATE TABLE
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Skip indexes that exactly match the primary key
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
// Handle default value - Special handling for datetime() function
|
||||
let defaultValue = '';
|
||||
if (
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
) {
|
||||
// Special handling for quoted functions like 'datetime(\'\'now\'\')' - remove extra quotes
|
||||
if (field.default.includes("datetime(''now'')")) {
|
||||
defaultValue = ' DEFAULT CURRENT_TIMESTAMP';
|
||||
} else {
|
||||
defaultValue = ` DEFAULT ${parseSQLiteDefault(field)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add PRIMARY KEY inline only for single INTEGER primary key
|
||||
const primaryKey =
|
||||
field.primaryKey && singleIntegerPrimaryKey
|
||||
? ' PRIMARY KEY' + autoIncrement
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithoutSize}${primaryKey}${notNull}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint for composite primary keys or non-INTEGER primary keys
|
||||
primaryKeyFields.length > 0 && !singleIntegerPrimaryKey
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n${
|
||||
// Add indexes - SQLite doesn't support indexes in CREATE TABLE
|
||||
(() => {
|
||||
const validIndexes = table.indexes
|
||||
.map((index) => {
|
||||
// Skip indexes that exactly match the primary key
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) =>
|
||||
field ? `"${field.name}"` : ''
|
||||
)
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length ===
|
||||
indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) =>
|
||||
field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create safe index name
|
||||
const safeIndexName =
|
||||
`${table.name}_${index.name}`
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
.substring(0, 60);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX IF NOT EXISTS "${safeIndexName}"\nON ${tableName} (${indexFieldNames.join(', ')});`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create safe index name
|
||||
const safeIndexName = `${table.name}_${index.name}`
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
.substring(0, 60);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX IF NOT EXISTS "${safeIndexName}"\nON ${tableName} (${indexFieldNames.join(', ')});\n`
|
||||
return validIndexes.length > 0
|
||||
? `\n-- Indexes\n${validIndexes.join('\n')}`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
})()
|
||||
}\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
}
|
||||
// Generate table constraints and triggers for foreign keys
|
||||
// SQLite handles foreign keys differently - we'll add them with CREATE TABLE statements
|
||||
// But we'll also provide individual ALTER TABLE statements as comments for reference
|
||||
@@ -319,7 +341,7 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript +=
|
||||
'-- Note: SQLite requires foreign_keys pragma to be enabled:\n';
|
||||
sqlScript += '-- PRAGMA foreign_keys = ON;\n\n';
|
||||
sqlScript += '-- PRAGMA foreign_keys = ON;\n';
|
||||
|
||||
relationships.forEach((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
@@ -347,8 +369,44 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return;
|
||||
}
|
||||
|
||||
// Create commented out version of what would be ALTER TABLE statement
|
||||
sqlScript += `-- ALTER TABLE "${sourceTable.name}" ADD CONSTRAINT "fk_${sourceTable.name}_${sourceField.name}" FOREIGN KEY("${sourceField.name}") REFERENCES "${targetTable.name}"("${targetField.name}");\n`;
|
||||
sqlScript += `-- ALTER TABLE "${fkTable.name}" ADD CONSTRAINT "fk_${fkTable.name}_${fkField.name}" FOREIGN KEY("${fkField.name}") REFERENCES "${refTable.name}"("${refField.name}");\n`;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -11,23 +11,7 @@ import { exportMySQL } from './export-per-type/mysql';
|
||||
|
||||
// Function to simplify verbose data type names
|
||||
const simplifyDataType = (typeName: string): string => {
|
||||
const typeMap: Record<string, string> = {
|
||||
'character varying': 'varchar',
|
||||
'char varying': 'varchar',
|
||||
integer: 'int',
|
||||
int4: 'int',
|
||||
int8: 'bigint',
|
||||
serial4: 'serial',
|
||||
serial8: 'bigserial',
|
||||
float8: 'double precision',
|
||||
float4: 'real',
|
||||
bool: 'boolean',
|
||||
character: 'char',
|
||||
'timestamp without time zone': 'timestamp',
|
||||
'timestamp with time zone': 'timestamptz',
|
||||
'time without time zone': 'time',
|
||||
'time with time zone': 'timetz',
|
||||
};
|
||||
const typeMap: Record<string, string> = {};
|
||||
|
||||
return typeMap[typeName.toLowerCase()] || typeName;
|
||||
};
|
||||
@@ -36,10 +20,12 @@ export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
isDBMLFlow = false,
|
||||
onlyRelationships = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
targetDatabaseType: DatabaseType;
|
||||
isDBMLFlow?: boolean;
|
||||
onlyRelationships?: boolean;
|
||||
}): string => {
|
||||
const { tables, relationships } = diagram;
|
||||
|
||||
@@ -50,16 +36,16 @@ export const exportBaseSQL = ({
|
||||
if (!isDBMLFlow && diagram.databaseType === targetDatabaseType) {
|
||||
switch (diagram.databaseType) {
|
||||
case DatabaseType.SQL_SERVER:
|
||||
return exportMSSQL(diagram);
|
||||
return exportMSSQL({ diagram, onlyRelationships });
|
||||
case DatabaseType.POSTGRESQL:
|
||||
return exportPostgreSQL(diagram);
|
||||
return exportPostgreSQL({ diagram, onlyRelationships });
|
||||
case DatabaseType.SQLITE:
|
||||
return exportSQLite(diagram);
|
||||
return exportSQLite({ diagram, onlyRelationships });
|
||||
case DatabaseType.MYSQL:
|
||||
case DatabaseType.MARIADB:
|
||||
return exportMySQL(diagram);
|
||||
return exportMySQL({ diagram, onlyRelationships });
|
||||
default:
|
||||
return exportPostgreSQL(diagram);
|
||||
return exportPostgreSQL({ diagram, onlyRelationships });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,7 +117,23 @@ export const exportBaseSQL = ({
|
||||
}
|
||||
}
|
||||
});
|
||||
sqlScript += '\n'; // Add a newline if custom types were processed
|
||||
if (
|
||||
diagram.customTypes.some(
|
||||
(ct) =>
|
||||
(ct.kind === 'enum' &&
|
||||
ct.values &&
|
||||
ct.values.length > 0 &&
|
||||
targetDatabaseType === DatabaseType.POSTGRESQL &&
|
||||
!isDBMLFlow) ||
|
||||
(ct.kind === 'composite' &&
|
||||
ct.fields &&
|
||||
ct.fields.length > 0 &&
|
||||
(targetDatabaseType === DatabaseType.POSTGRESQL ||
|
||||
isDBMLFlow))
|
||||
)
|
||||
) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
}
|
||||
|
||||
// Add CREATE SEQUENCE statements
|
||||
@@ -154,7 +156,9 @@ export const exportBaseSQL = ({
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
if (sequences.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Loop through each non-view table to generate the SQL statements
|
||||
nonViewTables.forEach((table) => {
|
||||
@@ -234,7 +238,8 @@ export const exportBaseSQL = ({
|
||||
// Add size for character types
|
||||
if (
|
||||
field.characterMaximumLength &&
|
||||
parseInt(field.characterMaximumLength) > 0
|
||||
parseInt(field.characterMaximumLength) > 0 &&
|
||||
field.type.name.toLowerCase() !== 'decimal'
|
||||
) {
|
||||
sqlScript += `(${field.characterMaximumLength})`;
|
||||
} else if (field.type.name.toLowerCase().includes('varchar')) {
|
||||
@@ -316,7 +321,7 @@ export const exportBaseSQL = ({
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
|
||||
sqlScript += '\n);\n\n';
|
||||
sqlScript += '\n);\n';
|
||||
|
||||
// Add table comment
|
||||
if (table.comments) {
|
||||
@@ -341,16 +346,19 @@ export const exportBaseSQL = ({
|
||||
.join(', ');
|
||||
|
||||
if (fieldNames) {
|
||||
const indexName = table.schema
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
const indexName =
|
||||
table.schema && !isDBMLFlow
|
||||
? `${table.schema}_${index.name}`
|
||||
: index.name;
|
||||
sqlScript += `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName} (${fieldNames});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
sqlScript += '\n';
|
||||
});
|
||||
|
||||
if (nonViewTables.length > 0 && (relationships?.length ?? 0) > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Handle relationships (foreign keys)
|
||||
relationships?.forEach((relationship) => {
|
||||
const sourceTable = nonViewTables.find(
|
||||
@@ -373,13 +381,52 @@ export const exportBaseSQL = ({
|
||||
sourceTableField &&
|
||||
targetTableField
|
||||
) {
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `${sourceTable.schema}.${sourceTable.name}`
|
||||
: sourceTable.name;
|
||||
const targetTableName = targetTable.schema
|
||||
? `${targetTable.schema}.${targetTable.name}`
|
||||
: targetTable.name;
|
||||
sqlScript += `ALTER TABLE ${sourceTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${sourceTableField.name}) REFERENCES ${targetTableName} (${targetTableField.name});\n`;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
// In a 1:many relationship, the foreign key goes on the "many" side
|
||||
// If source is "one" and target is "many", FK goes on target table
|
||||
// If source is "many" and target is "one", FK goes on source table
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
relationship.sourceCardinality === 'one' &&
|
||||
relationship.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetTableField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceTableField;
|
||||
} else if (
|
||||
relationship.sourceCardinality === 'many' &&
|
||||
relationship.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceTableField;
|
||||
refTable = targetTable;
|
||||
refField = targetTableField;
|
||||
} else if (
|
||||
relationship.sourceCardinality === 'one' &&
|
||||
relationship.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceTableField;
|
||||
refTable = targetTable;
|
||||
refField = targetTableField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return;
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `${fkTable.schema}.${fkTable.name}`
|
||||
: fkTable.name;
|
||||
const refTableName = refTable.schema
|
||||
? `${refTable.schema}.${refTable.name}`
|
||||
: refTable.name;
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${fkField.name}) REFERENCES ${refTableName} (${refField.name});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ const withExtras = false;
|
||||
const withDefault = `IFNULL(REPLACE(REPLACE(cols.column_default, '\\\\', ''), '"', 'ֿֿֿ\\"'), '')`;
|
||||
const withoutDefault = `""`;
|
||||
|
||||
export const mariaDBQuery = `SELECT CAST(CONCAT(
|
||||
export const mariaDBQuery = `SET SESSION group_concat_max_len = 10000000;
|
||||
SELECT CAST(CONCAT(
|
||||
'{"fk_info": [',
|
||||
IFNULL((SELECT GROUP_CONCAT(
|
||||
CONCAT('{"schema":"', cast(fk.table_schema as CHAR),
|
||||
|
||||
@@ -18,11 +18,14 @@ export interface SQLColumn {
|
||||
nullable: boolean;
|
||||
primaryKey: boolean;
|
||||
unique: boolean;
|
||||
typeArgs?: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
typeArgs?:
|
||||
| {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
}
|
||||
| number[]
|
||||
| string;
|
||||
comment?: string;
|
||||
default?: string;
|
||||
increment?: boolean;
|
||||
@@ -559,6 +562,38 @@ export function convertToChartDBDiagram(
|
||||
id: column.type.toLowerCase(),
|
||||
name: column.type,
|
||||
};
|
||||
}
|
||||
// Handle SQL Server types specifically
|
||||
else if (
|
||||
sourceDatabaseType === DatabaseType.SQL_SERVER &&
|
||||
targetDatabaseType === DatabaseType.SQL_SERVER
|
||||
) {
|
||||
const normalizedType = column.type.toLowerCase();
|
||||
|
||||
// Preserve SQL Server specific types when target is also SQL Server
|
||||
if (
|
||||
normalizedType === 'nvarchar' ||
|
||||
normalizedType === 'nchar' ||
|
||||
normalizedType === 'ntext' ||
|
||||
normalizedType === 'uniqueidentifier' ||
|
||||
normalizedType === 'datetime2' ||
|
||||
normalizedType === 'datetimeoffset' ||
|
||||
normalizedType === 'money' ||
|
||||
normalizedType === 'smallmoney' ||
|
||||
normalizedType === 'bit' ||
|
||||
normalizedType === 'xml' ||
|
||||
normalizedType === 'hierarchyid' ||
|
||||
normalizedType === 'geography' ||
|
||||
normalizedType === 'geometry'
|
||||
) {
|
||||
mappedType = { id: normalizedType, name: normalizedType };
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
column.type,
|
||||
sourceDatabaseType
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
@@ -581,22 +616,68 @@ export function convertToChartDBDiagram(
|
||||
|
||||
// Add type arguments if present
|
||||
if (column.typeArgs) {
|
||||
// Transfer length for varchar/char types
|
||||
if (
|
||||
column.typeArgs.length !== undefined &&
|
||||
(field.type.id === 'varchar' || field.type.id === 'char')
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
column.typeArgs.length.toString();
|
||||
// Handle string typeArgs (e.g., 'max' for varchar(max))
|
||||
if (typeof column.typeArgs === 'string') {
|
||||
if (
|
||||
(field.type.id === 'varchar' ||
|
||||
field.type.id === 'nvarchar') &&
|
||||
column.typeArgs === 'max'
|
||||
) {
|
||||
field.characterMaximumLength = 'max';
|
||||
}
|
||||
}
|
||||
|
||||
// Transfer precision/scale for numeric types
|
||||
if (
|
||||
column.typeArgs.precision !== undefined &&
|
||||
(field.type.id === 'numeric' || field.type.id === 'decimal')
|
||||
// Handle array typeArgs (SQL Server format)
|
||||
else if (
|
||||
Array.isArray(column.typeArgs) &&
|
||||
column.typeArgs.length > 0
|
||||
) {
|
||||
field.precision = column.typeArgs.precision;
|
||||
field.scale = column.typeArgs.scale;
|
||||
if (
|
||||
field.type.id === 'varchar' ||
|
||||
field.type.id === 'nvarchar' ||
|
||||
field.type.id === 'char' ||
|
||||
field.type.id === 'nchar'
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
column.typeArgs[0].toString();
|
||||
} else if (
|
||||
(field.type.id === 'numeric' ||
|
||||
field.type.id === 'decimal') &&
|
||||
column.typeArgs.length >= 2
|
||||
) {
|
||||
field.precision = column.typeArgs[0];
|
||||
field.scale = column.typeArgs[1];
|
||||
}
|
||||
}
|
||||
// Handle object typeArgs (standard format)
|
||||
else if (
|
||||
typeof column.typeArgs === 'object' &&
|
||||
!Array.isArray(column.typeArgs)
|
||||
) {
|
||||
const typeArgsObj = column.typeArgs as {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
|
||||
// Transfer length for varchar/char types
|
||||
if (
|
||||
typeArgsObj.length !== undefined &&
|
||||
(field.type.id === 'varchar' ||
|
||||
field.type.id === 'char')
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
typeArgsObj.length.toString();
|
||||
}
|
||||
|
||||
// Transfer precision/scale for numeric types
|
||||
if (
|
||||
typeArgsObj.precision !== undefined &&
|
||||
(field.type.id === 'numeric' ||
|
||||
field.type.id === 'decimal')
|
||||
) {
|
||||
field.precision = typeArgsObj.precision;
|
||||
field.scale = typeArgsObj.scale;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,350 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Core Parser Tests', () => {
|
||||
it('should parse basic tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INT PRIMARY KEY,
|
||||
name NVARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should parse tables with schemas', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spells] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
name NVARCHAR(100) NOT NULL,
|
||||
level INT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizards] (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name NVARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.find((t) => t.name === 'spells')).toBeDefined();
|
||||
expect(result.tables.find((t) => t.name === 'spells')?.schema).toBe(
|
||||
'magic'
|
||||
);
|
||||
expect(result.tables.find((t) => t.name === 'wizards')?.schema).toBe(
|
||||
'dbo'
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INT PRIMARY KEY);
|
||||
CREATE TABLE mages (
|
||||
id INT PRIMARY KEY,
|
||||
guild_id INT FOREIGN KEY REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
expect(result.relationships[0].sourceColumn).toBe('guild_id');
|
||||
expect(result.relationships[0].targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with schema references', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[schools] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
name NVARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[towers] (
|
||||
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
||||
school_id UNIQUEIDENTIFIER NOT NULL,
|
||||
name NVARCHAR(100) NOT NULL,
|
||||
CONSTRAINT FK_towers_schools FOREIGN KEY (school_id) REFERENCES [magic].[schools](id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('towers');
|
||||
expect(result.relationships[0].targetTable).toBe('schools');
|
||||
expect(result.relationships[0].sourceSchema).toBe('magic');
|
||||
expect(result.relationships[0].targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle GO statements and SQL Server specific syntax', async () => {
|
||||
const sql = `
|
||||
USE [MagicalRealm]
|
||||
GO
|
||||
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
|
||||
CREATE TABLE [dbo].[enchantments] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Power] [decimal](18, 2) NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
CONSTRAINT [PK_enchantments] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('enchantments');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
expect(
|
||||
result.tables[0].columns.find((c) => c.name === 'Power')?.type
|
||||
).toBe('decimal');
|
||||
});
|
||||
|
||||
it('should parse ALTER TABLE ADD CONSTRAINT for foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [calibration].[Calibration] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Average] [decimal](18, 2) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [calibration].[CalibrationProcess] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[CalibrationId] [uniqueidentifier] NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [calibration].[CalibrationProcess]
|
||||
ADD CONSTRAINT [FK_CalibrationProcess_Calibration]
|
||||
FOREIGN KEY ([CalibrationId])
|
||||
REFERENCES [calibration].[Calibration]([Id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('CalibrationProcess');
|
||||
expect(result.relationships[0].targetTable).toBe('Calibration');
|
||||
expect(result.relationships[0].name).toBe(
|
||||
'FK_CalibrationProcess_Calibration'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle multiple schemas from the test file', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA [magic]
|
||||
GO
|
||||
CREATE SCHEMA [artifacts]
|
||||
GO
|
||||
|
||||
CREATE TABLE [magic].[wizards] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [artifacts].[wands] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[WoodType] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_wands_wizards] FOREIGN KEY ([WizardId]) REFERENCES [magic].[wizards]([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.find((t) => t.schema === 'magic')).toBeDefined();
|
||||
expect(
|
||||
result.tables.find((t) => t.schema === 'artifacts')
|
||||
).toBeDefined();
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceSchema).toBe('artifacts');
|
||||
expect(result.relationships[0].targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle SQL Server data types correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spell_components] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Quantity] [int] NOT NULL,
|
||||
[Weight] [decimal](10, 2) NOT NULL,
|
||||
[IsPowerful] [bit] NOT NULL,
|
||||
[DiscoveredAt] [datetime2](7) NOT NULL,
|
||||
[Description] [nvarchar](max) NULL,
|
||||
[RarityLevel] [tinyint] NOT NULL,
|
||||
[MarketValue] [money] NOT NULL,
|
||||
[AlchemicalFormula] [xml] NULL,
|
||||
PRIMARY KEY ([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
|
||||
expect(columns.find((c) => c.name === 'Id')?.type).toBe(
|
||||
'uniqueidentifier'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'Name')?.type).toBe('nvarchar');
|
||||
expect(columns.find((c) => c.name === 'Quantity')?.type).toBe('int');
|
||||
expect(columns.find((c) => c.name === 'Weight')?.type).toBe('decimal');
|
||||
expect(columns.find((c) => c.name === 'IsPowerful')?.type).toBe('bit');
|
||||
expect(columns.find((c) => c.name === 'DiscoveredAt')?.type).toBe(
|
||||
'datetime2'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'Description')?.type).toBe(
|
||||
'nvarchar'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'RarityLevel')?.type).toBe(
|
||||
'tinyint'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'MarketValue')?.type).toBe(
|
||||
'money'
|
||||
);
|
||||
expect(columns.find((c) => c.name === 'AlchemicalFormula')?.type).toBe(
|
||||
'xml'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle IDENTITY columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[magical_creatures] (
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](100) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const idColumn = result.tables[0].columns.find((c) => c.name === 'Id');
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse composite primary keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[spell_ingredients] (
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[IngredientId] [uniqueidentifier] NOT NULL,
|
||||
[Quantity] [int] NOT NULL,
|
||||
CONSTRAINT [PK_spell_ingredients] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[SpellId] ASC,
|
||||
[IngredientId] ASC
|
||||
)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
expect(table.columns.filter((c) => c.primaryKey)).toHaveLength(2);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'SpellId')?.primaryKey
|
||||
).toBe(true);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'IngredientId')?.primaryKey
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle unique constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[arcane_libraries] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Code] [nvarchar](50) NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [UQ_arcane_libraries_code] UNIQUE ([Code])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].indexes).toHaveLength(1);
|
||||
expect(result.tables[0].indexes[0].name).toBe(
|
||||
'UQ_arcane_libraries_code'
|
||||
);
|
||||
expect(result.tables[0].indexes[0].unique).toBe(true);
|
||||
expect(result.tables[0].indexes[0].columns).toContain('Code');
|
||||
});
|
||||
|
||||
it('should handle default values', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[potion_recipes] (
|
||||
[Id] [uniqueidentifier] NOT NULL DEFAULT NEWID(),
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[IsActive] [bit] NOT NULL DEFAULT 1,
|
||||
[CreatedAt] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
[Difficulty] [int] NOT NULL DEFAULT 5,
|
||||
PRIMARY KEY ([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
|
||||
expect(columns.find((c) => c.name === 'Id')?.default).toBeDefined();
|
||||
expect(columns.find((c) => c.name === 'IsActive')?.default).toBe('1');
|
||||
expect(
|
||||
columns.find((c) => c.name === 'CreatedAt')?.default
|
||||
).toBeDefined();
|
||||
expect(columns.find((c) => c.name === 'Difficulty')?.default).toBe('5');
|
||||
});
|
||||
|
||||
it('should parse indexes created separately', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[spell_books] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Title] [nvarchar](255) NOT NULL,
|
||||
[Author] [nvarchar](255) NOT NULL,
|
||||
[PublishedYear] [int] NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX [IX_spell_books_author] ON [dbo].[spell_books] ([Author]);
|
||||
CREATE UNIQUE INDEX [UIX_spell_books_title] ON [dbo].[spell_books] ([Title]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].indexes).toHaveLength(2);
|
||||
|
||||
const authorIndex = result.tables[0].indexes.find(
|
||||
(i) => i.name === 'IX_spell_books_author'
|
||||
);
|
||||
expect(authorIndex?.unique).toBe(false);
|
||||
expect(authorIndex?.columns).toContain('Author');
|
||||
|
||||
const titleIndex = result.tables[0].indexes.find(
|
||||
(i) => i.name === 'UIX_spell_books_title'
|
||||
);
|
||||
expect(titleIndex?.unique).toBe(true);
|
||||
expect(titleIndex?.columns).toContain('Title');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,478 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Real-World Examples', () => {
|
||||
describe('Magical Academy Example', () => {
|
||||
it('should parse the magical academy example with all 16 tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[schools](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE()
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[towers](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_towers_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[ranks](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_ranks_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[spell_permissions](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[spell_type] [nvarchar](255) NOT NULL,
|
||||
[casting_level] [nvarchar](255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[rank_spell_permissions](
|
||||
[rank_id] [uniqueidentifier] NOT NULL,
|
||||
[spell_permission_id] [uniqueidentifier] NOT NULL,
|
||||
PRIMARY KEY ([rank_id], [spell_permission_id]),
|
||||
CONSTRAINT [FK_rsp_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_rsp_permissions] FOREIGN KEY ([spell_permission_id]) REFERENCES [dbo].[spell_permissions]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[grimoire_types](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_grimoire_types_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizards](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[wizard_name] [nvarchar](255) NOT NULL,
|
||||
[email] [nvarchar](255) NOT NULL,
|
||||
CONSTRAINT [FK_wizards_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wizards_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_wizards_school_name] UNIQUE ([school_id], [wizard_name])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[wizard_ranks](
|
||||
[wizard_id] [uniqueidentifier] NOT NULL,
|
||||
[rank_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[assigned_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
PRIMARY KEY ([wizard_id], [rank_id], [tower_id]),
|
||||
CONSTRAINT [FK_wr_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wr_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_wr_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[apprentices](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[first_name] [nvarchar](255) NOT NULL,
|
||||
[last_name] [nvarchar](255) NOT NULL,
|
||||
[enrollment_date] [date] NOT NULL,
|
||||
[primary_mentor] [uniqueidentifier] NULL,
|
||||
[sponsoring_wizard] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [FK_apprentices_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_apprentices_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_apprentices_mentor] FOREIGN KEY ([primary_mentor]) REFERENCES [dbo].[wizards]([id]),
|
||||
CONSTRAINT [FK_apprentices_sponsor] FOREIGN KEY ([sponsoring_wizard]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[spell_lessons](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[instructor_id] [uniqueidentifier] NOT NULL,
|
||||
[lesson_date] [datetime2](7) NOT NULL,
|
||||
CONSTRAINT [FK_sl_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_sl_instructors] FOREIGN KEY ([instructor_id]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[grimoires](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[grimoire_type_id] [uniqueidentifier] NOT NULL,
|
||||
[author_wizard_id] [uniqueidentifier] NOT NULL,
|
||||
[content] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [FK_g_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_g_types] FOREIGN KEY ([grimoire_type_id]) REFERENCES [dbo].[grimoire_types]([id]),
|
||||
CONSTRAINT [FK_g_authors] FOREIGN KEY ([author_wizard_id]) REFERENCES [dbo].[wizards]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[tuition_scrolls](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[tower_id] [uniqueidentifier] NOT NULL,
|
||||
[apprentice_id] [uniqueidentifier] NOT NULL,
|
||||
[total_amount] [decimal](10,2) NOT NULL,
|
||||
[status] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_ts_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ts_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ts_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[tuition_items](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[description] [nvarchar](max) NOT NULL,
|
||||
[amount] [decimal](10,2) NOT NULL,
|
||||
CONSTRAINT [FK_ti_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[patron_sponsorships](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[patron_house] [nvarchar](255) NOT NULL,
|
||||
[sponsorship_code] [nvarchar](50) NOT NULL,
|
||||
[status] [nvarchar](50) NOT NULL,
|
||||
CONSTRAINT [FK_ps_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[gold_payments](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
|
||||
[amount] [decimal](10,2) NOT NULL,
|
||||
[payment_date] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
CONSTRAINT [FK_gp_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[arcane_logs](
|
||||
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[school_id] [uniqueidentifier] NULL,
|
||||
[wizard_id] [uniqueidentifier] NULL,
|
||||
[tower_id] [uniqueidentifier] NULL,
|
||||
[table_name] [nvarchar](255) NOT NULL,
|
||||
[operation] [nvarchar](50) NOT NULL,
|
||||
[record_id] [uniqueidentifier] NULL,
|
||||
[changes] [nvarchar](max) NULL,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
|
||||
CONSTRAINT [FK_al_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE SET NULL,
|
||||
CONSTRAINT [FK_al_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE SET NULL,
|
||||
CONSTRAINT [FK_al_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE SET NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'patron_sponsorships',
|
||||
'rank_spell_permissions',
|
||||
'ranks',
|
||||
'schools',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_items',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables).toHaveLength(16);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships exist
|
||||
const relationships = result.relationships;
|
||||
|
||||
// Check some critical relationships
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizard_ranks' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'wizard_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enchanted Bazaar Example', () => {
|
||||
it('should parse the enchanted bazaar example with complex features', async () => {
|
||||
const sql = `
|
||||
-- Enchanted Bazaar tables with complex features
|
||||
CREATE TABLE [dbo].[merchants](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[email] [nvarchar](255) NOT NULL,
|
||||
[created_at] [datetime] DEFAULT GETDATE(),
|
||||
CONSTRAINT [UQ_merchants_email] UNIQUE ([email])
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[artifacts](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[merchant_id] [int] NOT NULL,
|
||||
[name] [nvarchar](255) NOT NULL,
|
||||
[price] [decimal](10, 2) NOT NULL CHECK ([price] >= 0),
|
||||
[enchantment_charges] [int] DEFAULT 0 CHECK ([enchantment_charges] >= 0),
|
||||
CONSTRAINT [FK_artifacts_merchants] FOREIGN KEY ([merchant_id]) REFERENCES [dbo].[merchants]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[trades](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[created_at] [datetime] DEFAULT GETDATE(),
|
||||
[status] [varchar](50) DEFAULT 'negotiating'
|
||||
);
|
||||
|
||||
CREATE TABLE [dbo].[trade_items](
|
||||
[trade_id] [int] NOT NULL,
|
||||
[artifact_id] [int] NOT NULL,
|
||||
[quantity] [int] NOT NULL CHECK ([quantity] > 0),
|
||||
[agreed_price] [decimal](10, 2) NOT NULL,
|
||||
PRIMARY KEY ([trade_id], [artifact_id]),
|
||||
CONSTRAINT [FK_ti_trades] FOREIGN KEY ([trade_id]) REFERENCES [dbo].[trades]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_ti_artifacts] FOREIGN KEY ([artifact_id]) REFERENCES [dbo].[artifacts]([id])
|
||||
);
|
||||
|
||||
-- Create indexes
|
||||
CREATE INDEX [IX_artifacts_merchant_id] ON [dbo].[artifacts] ([merchant_id]);
|
||||
CREATE INDEX [IX_artifacts_price] ON [dbo].[artifacts] ([price] DESC);
|
||||
CREATE UNIQUE INDEX [UIX_artifacts_name_merchant] ON [dbo].[artifacts] ([name], [merchant_id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Should parse all tables
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
// Check for specific tables
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('merchants');
|
||||
expect(tableNames).toContain('artifacts');
|
||||
expect(tableNames).toContain('trades');
|
||||
expect(tableNames).toContain('trade_items');
|
||||
|
||||
// Check relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'artifacts' &&
|
||||
r.targetTable === 'merchants'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'trade_items' &&
|
||||
r.targetTable === 'trades'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check indexes were created
|
||||
const artifactsTable = result.tables.find(
|
||||
(t) => t.name === 'artifacts'
|
||||
);
|
||||
expect(artifactsTable?.indexes.length).toBeGreaterThanOrEqual(2);
|
||||
expect(
|
||||
artifactsTable?.indexes.some(
|
||||
(i) => i.name === 'IX_artifacts_merchant_id'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
artifactsTable?.indexes.some(
|
||||
(i) => i.unique && i.name === 'UIX_artifacts_name_merchant'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex SQL Server Schema Example', () => {
|
||||
it('should parse complex multi-schema database with various SQL Server features', async () => {
|
||||
const sql = `
|
||||
CREATE SCHEMA [magic];
|
||||
GO
|
||||
CREATE SCHEMA [inventory];
|
||||
GO
|
||||
CREATE SCHEMA [academy];
|
||||
GO
|
||||
|
||||
-- Magic schema tables
|
||||
CREATE TABLE [magic].[spell_categories](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
|
||||
[name] [nvarchar](100) NOT NULL,
|
||||
[description] [nvarchar](max) NULL,
|
||||
[is_forbidden] [bit] NOT NULL DEFAULT 0,
|
||||
[created_at] [datetime2](7) NOT NULL DEFAULT SYSDATETIME()
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[spells](
|
||||
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
|
||||
[category_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](200) NOT NULL,
|
||||
[mana_cost] [smallint] NOT NULL CHECK ([mana_cost] > 0),
|
||||
[damage_output] [decimal](10,2) NULL,
|
||||
[cast_time_ms] [int] NOT NULL DEFAULT 1000,
|
||||
[is_active] [bit] NOT NULL DEFAULT 1,
|
||||
[metadata] [xml] NULL,
|
||||
CONSTRAINT [FK_spells_categories] FOREIGN KEY ([category_id])
|
||||
REFERENCES [magic].[spell_categories]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_spells_name] UNIQUE ([name])
|
||||
);
|
||||
|
||||
-- Inventory schema tables
|
||||
CREATE TABLE [inventory].[item_types](
|
||||
[id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[type_code] [char](3) NOT NULL UNIQUE,
|
||||
[type_name] [varchar](50) NOT NULL,
|
||||
[max_stack_size] [tinyint] NOT NULL DEFAULT 99
|
||||
);
|
||||
|
||||
CREATE TABLE [inventory].[magical_items](
|
||||
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[item_type_id] [int] NOT NULL,
|
||||
[item_name] [nvarchar](255) NOT NULL,
|
||||
[rarity] [varchar](20) NOT NULL,
|
||||
[weight_kg] [float] NOT NULL,
|
||||
[base_value] [money] NOT NULL,
|
||||
[enchantment_level] [tinyint] NULL CHECK ([enchantment_level] BETWEEN 0 AND 10),
|
||||
[discovered_date] [date] NULL,
|
||||
[discovered_time] [time](7) NULL,
|
||||
[full_discovered_at] [datetimeoffset](7) NULL,
|
||||
CONSTRAINT [FK_items_types] FOREIGN KEY ([item_type_id])
|
||||
REFERENCES [inventory].[item_types]([id])
|
||||
);
|
||||
|
||||
-- Academy schema tables
|
||||
CREATE TABLE [academy].[courses](
|
||||
[course_id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[course_code] [nvarchar](10) NOT NULL UNIQUE,
|
||||
[course_name] [nvarchar](200) NOT NULL,
|
||||
[credits] [decimal](3,1) NOT NULL,
|
||||
[prerequisite_spell_id] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [FK_courses_spells] FOREIGN KEY ([prerequisite_spell_id])
|
||||
REFERENCES [magic].[spells]([id])
|
||||
);
|
||||
|
||||
CREATE TABLE [academy].[enrollments](
|
||||
[enrollment_id] [bigint] IDENTITY(1,1) PRIMARY KEY,
|
||||
[student_id] [uniqueidentifier] NOT NULL,
|
||||
[course_id] [uniqueidentifier] NOT NULL,
|
||||
[enrollment_date] [datetime2](0) NOT NULL DEFAULT GETDATE(),
|
||||
[grade] [decimal](4,2) NULL CHECK ([grade] >= 0 AND [grade] <= 100),
|
||||
[completion_status] [nvarchar](20) NOT NULL DEFAULT 'enrolled',
|
||||
CONSTRAINT [FK_enrollments_courses] FOREIGN KEY ([course_id])
|
||||
REFERENCES [academy].[courses]([course_id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [UQ_enrollment] UNIQUE ([student_id], [course_id])
|
||||
);
|
||||
|
||||
-- Cross-schema relationships
|
||||
CREATE TABLE [inventory].[spell_reagents](
|
||||
[spell_id] [uniqueidentifier] NOT NULL,
|
||||
[item_id] [bigint] NOT NULL,
|
||||
[quantity_required] [smallint] NOT NULL DEFAULT 1,
|
||||
PRIMARY KEY ([spell_id], [item_id]),
|
||||
CONSTRAINT [FK_reagents_spells] FOREIGN KEY ([spell_id])
|
||||
REFERENCES [magic].[spells]([id]) ON DELETE CASCADE,
|
||||
CONSTRAINT [FK_reagents_items] FOREIGN KEY ([item_id])
|
||||
REFERENCES [inventory].[magical_items]([id]) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Additional indexes
|
||||
CREATE INDEX [IX_spells_category] ON [magic].[spells] ([category_id]);
|
||||
CREATE INDEX [IX_items_type_rarity] ON [inventory].[magical_items] ([item_type_id], [rarity]);
|
||||
CREATE UNIQUE INDEX [UIX_items_name_type] ON [inventory].[magical_items] ([item_name], [item_type_id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Verify all tables are parsed
|
||||
expect(result.tables).toHaveLength(7);
|
||||
|
||||
// Check schema assignment
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'magic')
|
||||
).toHaveLength(2);
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'inventory')
|
||||
).toHaveLength(3);
|
||||
expect(
|
||||
result.tables.filter((t) => t.schema === 'academy')
|
||||
).toHaveLength(2);
|
||||
|
||||
// Verify cross-schema relationships
|
||||
const crossSchemaRel = result.relationships.find(
|
||||
(r) => r.sourceTable === 'courses' && r.targetTable === 'spells'
|
||||
);
|
||||
expect(crossSchemaRel).toBeDefined();
|
||||
expect(crossSchemaRel?.sourceSchema).toBe('academy');
|
||||
expect(crossSchemaRel?.targetSchema).toBe('magic');
|
||||
|
||||
// Check various SQL Server data types
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(
|
||||
spellsTable?.columns.find((c) => c.name === 'mana_cost')?.type
|
||||
).toBe('smallint');
|
||||
expect(
|
||||
spellsTable?.columns.find((c) => c.name === 'metadata')?.type
|
||||
).toBe('xml');
|
||||
|
||||
const itemsTable = result.tables.find(
|
||||
(t) => t.name === 'magical_items'
|
||||
);
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'weight_kg')?.type
|
||||
).toBe('float');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'base_value')?.type
|
||||
).toBe('money');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'discovered_date')
|
||||
?.type
|
||||
).toBe('date');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'discovered_time')
|
||||
?.type
|
||||
).toBe('time');
|
||||
expect(
|
||||
itemsTable?.columns.find((c) => c.name === 'full_discovered_at')
|
||||
?.type
|
||||
).toBe('datetimeoffset');
|
||||
|
||||
// Verify IDENTITY columns
|
||||
const itemTypesTable = result.tables.find(
|
||||
(t) => t.name === 'item_types'
|
||||
);
|
||||
expect(
|
||||
itemTypesTable?.columns.find((c) => c.name === 'id')?.increment
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,675 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Fantasy Database Import Tests', () => {
|
||||
it('should parse the magical realm database correctly', async () => {
|
||||
// Fantasy-themed SQL Server database with multiple schemas
|
||||
const sql = `
|
||||
USE [MagicalRealmDB]
|
||||
GO
|
||||
/****** Object: Schema [spellcasting] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [spellcasting]
|
||||
GO
|
||||
/****** Object: Schema [enchantments] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [enchantments]
|
||||
GO
|
||||
/****** Object: Schema [artifacts] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [artifacts]
|
||||
GO
|
||||
/****** Object: Schema [wizards] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
CREATE SCHEMA [wizards]
|
||||
GO
|
||||
|
||||
/****** Object: Table [spellcasting].[Spell] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [spellcasting].[Spell](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[UpdatedBy] [uniqueidentifier] NULL,
|
||||
[UpdatedAt] [datetime2](7) NULL,
|
||||
[PowerLevel] [decimal](18, 2) NOT NULL,
|
||||
[Incantation] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Description] [nvarchar](max) NOT NULL,
|
||||
[RunicInscription] [varchar](max) NULL,
|
||||
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [spellcasting].[SpellCasting] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [spellcasting].[SpellCasting](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[UpdatedBy] [uniqueidentifier] NULL,
|
||||
[UpdatedAt] [datetime2](7) NULL,
|
||||
[WizardLevel] [int] NOT NULL,
|
||||
[ManaCost] [decimal](18, 2) NOT NULL,
|
||||
[CastingTime] [decimal](18, 2) NULL,
|
||||
[Components] [nvarchar](max) NULL,
|
||||
[CastingNumber] [int] NULL,
|
||||
[SuccessRate] [decimal](18, 2) NULL,
|
||||
[CriticalChance] [decimal](18, 2) NULL,
|
||||
[ExtendedDuration] [decimal](18, 2) NULL,
|
||||
[Status] [int] NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[CastingNotes] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [PK_SpellCasting] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [enchantments].[MagicalItem] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [enchantments].[MagicalItem](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[WandId] [uniqueidentifier] NOT NULL,
|
||||
[EnchanterId] [uniqueidentifier] NOT NULL,
|
||||
[OrderNumber] [nvarchar](max) NOT NULL,
|
||||
[EnchantmentDate] [datetime2](7) NOT NULL,
|
||||
[IsCertified] [bit] NOT NULL,
|
||||
[CertificationCode] [nvarchar](max) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[EnchantmentLevel] [int] NOT NULL,
|
||||
CONSTRAINT [PK_MagicalItem] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [enchantments].[EnchantmentFormula] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [enchantments].[EnchantmentFormula](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[RealmId] [uniqueidentifier] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[FormulaTypeId] [int] NOT NULL,
|
||||
[Definition] [nvarchar](max) NOT NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[HasMultipleApplications] [bit] NOT NULL,
|
||||
[StepNumber] [int] NOT NULL,
|
||||
[Identifier] [int] NOT NULL,
|
||||
CONSTRAINT [PK_EnchantmentFormula] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [wizards].[Wizard] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [wizards].[Wizard](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Title] [nvarchar](255) NULL,
|
||||
[Biography] [nvarchar](max) NULL,
|
||||
[SpecialtySchool] [nvarchar](100) NULL,
|
||||
[PowerLevel] [int] NOT NULL,
|
||||
[JoinedGuildDate] [datetime2](7) NOT NULL,
|
||||
[IsActive] [bit] NOT NULL,
|
||||
[MagicalSignature] [nvarchar](max) NOT NULL,
|
||||
[TowerId] [uniqueidentifier] NOT NULL,
|
||||
[MentorId] [uniqueidentifier] NULL,
|
||||
[SpellbookNotes] [varchar](max) NULL,
|
||||
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [AK_Wizard_HelpId] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[HelpId] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [wizards].[WizardSpellbook] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [wizards].[WizardSpellbook](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[DeletedByEmail] [nvarchar](max) NULL,
|
||||
[DeletedByFullName] [nvarchar](max) NULL,
|
||||
[DeletedById] [uniqueidentifier] NULL,
|
||||
[SuccessRate] [decimal](18, 2) NOT NULL,
|
||||
[ManaCostReduction] [decimal](18, 2) NOT NULL,
|
||||
[CriticalBonus] [decimal](18, 2) NOT NULL,
|
||||
[PageNumber] [int] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[TowerId] [uniqueidentifier] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[ReasonForAction] [nvarchar](max) NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[EnchanterId] [uniqueidentifier] NOT NULL,
|
||||
[OrderNumber] [nvarchar](max) NOT NULL,
|
||||
[LearnedDate] [datetime2](7) NOT NULL,
|
||||
[IsMastered] [bit] NOT NULL,
|
||||
[MasteryCertificate] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_WizardSpellbook] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[MagicSchool] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[MagicSchool](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Value] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_MagicSchool] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[ArtifactType] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[ArtifactType](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Key] [nvarchar](max) NOT NULL,
|
||||
[ItemCategoryId] [int] NOT NULL,
|
||||
CONSTRAINT [PK_ArtifactType] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[AncientRelic] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[AncientRelic](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[DiscoveryDate] [datetime2](7) NULL,
|
||||
[VaultId] [uniqueidentifier] NULL,
|
||||
[AppraiserId] [uniqueidentifier] NULL,
|
||||
[NumberOfRunes] [int] NULL,
|
||||
[MagicalAura] [decimal](18, 2) NULL,
|
||||
[AuraReadingDeviceId] [uniqueidentifier] NULL,
|
||||
[PowerOutput] [decimal](18, 2) NULL,
|
||||
[PowerGaugeTypeId] [int] NULL,
|
||||
[AgeInCenturies] [decimal](18, 2) NULL,
|
||||
[CarbonDatingDeviceId] [uniqueidentifier] NULL,
|
||||
[HistoricalEra] [nvarchar](max) NULL,
|
||||
[EraVerificationMethod] [int] NULL,
|
||||
[Curse] [nvarchar](max) NULL,
|
||||
[CurseDetectorId] [uniqueidentifier] NULL,
|
||||
[CurseStrength] [decimal](18, 2) NULL,
|
||||
[ProtectionLevel] [int] NULL,
|
||||
[MagicalResonance] [decimal](18, 2) NULL,
|
||||
[ResonanceWithAdjustment] [decimal](18, 2) NULL,
|
||||
[AuthenticityVerified] [bit] NOT NULL,
|
||||
[VerificationWizardId] [uniqueidentifier] NULL,
|
||||
[RestorationNeeded] [bit] NOT NULL,
|
||||
[RestorationCost] [decimal](18, 2) NULL,
|
||||
[EstimatedValue] [decimal](18, 2) NULL,
|
||||
[MarketDemand] [decimal](18, 2) NULL,
|
||||
[ArtifactCatalogId] [uniqueidentifier] NULL,
|
||||
[OriginRealm] [nvarchar](max) NULL,
|
||||
[CreatorWizard] [nvarchar](max) NULL,
|
||||
[LegendaryStatus] [bit] NOT NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[IsSealed] [bit] NOT NULL,
|
||||
CONSTRAINT [PK_AncientRelic] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [AK_AncientRelic_HelpId] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[HelpId] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
|
||||
/****** Object: Table [artifacts].[RelicPowerMeasurements] Script Date: 25.7.2025. 9:42:07 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
CREATE TABLE [artifacts].[RelicPowerMeasurements](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
|
||||
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
|
||||
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCorrection] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyCalculated] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCalculated] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCalculated] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCalculated] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyUncertainty] [decimal](31, 15) NULL,
|
||||
[AuraIntensityUncertainty] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyUncertainty] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxUncertainty] [decimal](31, 15) NULL,
|
||||
[MagicalEnergyDrift] [decimal](31, 15) NULL,
|
||||
[AuraIntensityDrift] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyDrift] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxDrift] [decimal](31, 15) NULL,
|
||||
[AncientRelicId] [uniqueidentifier] NULL,
|
||||
CONSTRAINT [PK_RelicPowerMeasurements] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Get unique schemas from parsed tables
|
||||
const foundSchemas = [
|
||||
...new Set(result.tables.map((t) => t.schema || 'dbo')),
|
||||
];
|
||||
|
||||
// Verify we found tables in multiple schemas
|
||||
expect(foundSchemas.length).toBeGreaterThan(1);
|
||||
expect(foundSchemas).toContain('spellcasting');
|
||||
expect(foundSchemas).toContain('enchantments');
|
||||
expect(foundSchemas).toContain('wizards');
|
||||
expect(foundSchemas).toContain('artifacts');
|
||||
|
||||
// Check for some specific tables we know should exist
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'SpellCasting' && t.schema === 'spellcasting'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.tables.some(
|
||||
(t) => t.name === 'Wizard' && t.schema === 'wizards'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check data types are handled correctly
|
||||
const spellTable = result.tables.find(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
);
|
||||
expect(spellTable).toBeDefined();
|
||||
|
||||
if (spellTable) {
|
||||
expect(spellTable.columns.find((c) => c.name === 'Id')?.type).toBe(
|
||||
'uniqueidentifier'
|
||||
);
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'PowerLevel')?.type
|
||||
).toBe('decimal');
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'IsDeleted')?.type
|
||||
).toBe('bit');
|
||||
expect(
|
||||
spellTable.columns.find((c) => c.name === 'CreatedAt')?.type
|
||||
).toBe('datetime2');
|
||||
|
||||
// Check nvarchar(max) fields
|
||||
const incantationField = spellTable.columns.find(
|
||||
(c) => c.name === 'Incantation'
|
||||
);
|
||||
expect(incantationField?.type).toBe('nvarchar');
|
||||
expect(incantationField?.typeArgs).toBe('max');
|
||||
|
||||
// Check varchar(max) fields
|
||||
const runicField = spellTable.columns.find(
|
||||
(c) => c.name === 'RunicInscription'
|
||||
);
|
||||
expect(runicField?.type).toBe('varchar');
|
||||
expect(runicField?.typeArgs).toBe('max');
|
||||
}
|
||||
|
||||
// Check IDENTITY columns
|
||||
const magicSchoolTable = result.tables.find(
|
||||
(t) => t.name === 'MagicSchool' && t.schema === 'artifacts'
|
||||
);
|
||||
expect(magicSchoolTable).toBeDefined();
|
||||
if (magicSchoolTable) {
|
||||
const idColumn = magicSchoolTable.columns.find(
|
||||
(c) => c.name === 'Id'
|
||||
);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
expect(idColumn?.type).toBe('int');
|
||||
}
|
||||
|
||||
// Check unique constraints converted to indexes
|
||||
const wizardTable = result.tables.find(
|
||||
(t) => t.name === 'Wizard' && t.schema === 'wizards'
|
||||
);
|
||||
expect(wizardTable).toBeDefined();
|
||||
if (wizardTable) {
|
||||
expect(wizardTable.indexes).toHaveLength(1);
|
||||
expect(wizardTable.indexes[0].unique).toBe(true);
|
||||
expect(wizardTable.indexes[0].columns).toContain('HelpId');
|
||||
expect(wizardTable.indexes[0].name).toBe('AK_Wizard_HelpId');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD CONSTRAINT statements for magical artifacts', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[MagicalArtifact] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [enchantments].[ArtifactEnchantment] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[PrimaryArtifactId] [uniqueidentifier] NOT NULL,
|
||||
[SecondaryArtifactId] [uniqueidentifier] NOT NULL,
|
||||
[EnchantmentStrength] [decimal](18, 2) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [enchantments].[ArtifactEnchantment]
|
||||
ADD CONSTRAINT [FK_ArtifactEnchantment_Primary]
|
||||
FOREIGN KEY ([PrimaryArtifactId])
|
||||
REFERENCES [artifacts].[MagicalArtifact]([Id]);
|
||||
|
||||
ALTER TABLE [enchantments].[ArtifactEnchantment]
|
||||
ADD CONSTRAINT [FK_ArtifactEnchantment_Secondary]
|
||||
FOREIGN KEY ([SecondaryArtifactId])
|
||||
REFERENCES [artifacts].[MagicalArtifact]([Id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check both foreign keys were parsed
|
||||
const primaryRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceColumn === 'PrimaryArtifactId' &&
|
||||
r.name === 'FK_ArtifactEnchantment_Primary'
|
||||
);
|
||||
expect(primaryRel).toBeDefined();
|
||||
expect(primaryRel?.sourceTable).toBe('ArtifactEnchantment');
|
||||
expect(primaryRel?.targetTable).toBe('MagicalArtifact');
|
||||
|
||||
const secondaryRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceColumn === 'SecondaryArtifactId' &&
|
||||
r.name === 'FK_ArtifactEnchantment_Secondary'
|
||||
);
|
||||
expect(secondaryRel).toBeDefined();
|
||||
expect(secondaryRel?.sourceTable).toBe('ArtifactEnchantment');
|
||||
expect(secondaryRel?.targetTable).toBe('MagicalArtifact');
|
||||
});
|
||||
|
||||
it('should handle tables with many columns including nvarchar(max)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [wizards].[SpellResearchEnvironment](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[HelpId] [uniqueidentifier] NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[CreatedAt] [datetime2](7) NOT NULL,
|
||||
[CreatedById] [uniqueidentifier] NULL,
|
||||
[CreatedByUsername] [nvarchar](max) NOT NULL,
|
||||
[ResearchDate] [datetime2](7) NULL,
|
||||
[LaboratoryId] [uniqueidentifier] NULL,
|
||||
[EvaluationCriteriaId] [uniqueidentifier] NULL,
|
||||
[NumberOfExperiments] [int] NULL,
|
||||
[ManaLevelStart] [decimal](18, 2) NULL,
|
||||
[ManaGaugeId] [uniqueidentifier] NULL,
|
||||
[ManaLevelEnd] [decimal](18, 2) NULL,
|
||||
[ManaGaugeTypeId] [int] NULL,
|
||||
[AetherDensityStart] [decimal](18, 2) NULL,
|
||||
[AetherGaugeId] [uniqueidentifier] NULL,
|
||||
[AetherDensityEnd] [decimal](18, 2) NULL,
|
||||
[AetherGaugeTypeId] [int] NULL,
|
||||
[MagicalFieldStart] [decimal](18, 2) NULL,
|
||||
[MagicalFieldGaugeId] [uniqueidentifier] NULL,
|
||||
[MagicalFieldEnd] [decimal](18, 2) NULL,
|
||||
[MagicalFieldGaugeTypeId] [int] NULL,
|
||||
[MagicalFieldWithCorrection] [decimal](18, 2) NULL,
|
||||
[AetherDensityWithCorrection] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceStart] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceGaugeId] [uniqueidentifier] NULL,
|
||||
[ElementalBalanceEnd] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceGaugeTypeId] [int] NULL,
|
||||
[ManaLevelWithCorrection] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceWithCorrection] [decimal](18, 2) NULL,
|
||||
[SpellResearchId] [uniqueidentifier] NULL,
|
||||
[AetherDensityValue] [decimal](18, 2) NULL,
|
||||
[MagicalFieldValue] [decimal](18, 2) NULL,
|
||||
[ManaLevelValue] [decimal](18, 2) NULL,
|
||||
[ElementalBalanceValue] [decimal](18, 2) NULL,
|
||||
[ParentId] [uniqueidentifier] NULL,
|
||||
[IsLocked] [bit] NOT NULL,
|
||||
CONSTRAINT [PK_SpellResearchEnvironment] PRIMARY KEY CLUSTERED ([Id] ASC),
|
||||
CONSTRAINT [AK_SpellResearchEnvironment_HelpId] UNIQUE NONCLUSTERED ([HelpId] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Should have all columns
|
||||
expect(table.columns.length).toBeGreaterThan(30);
|
||||
|
||||
// Check nvarchar(max) handling
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'CreatedByUsername')?.type
|
||||
).toBe('nvarchar');
|
||||
|
||||
// Check decimal precision handling
|
||||
const decimalColumn = table.columns.find(
|
||||
(c) => c.name === 'ManaLevelStart'
|
||||
);
|
||||
expect(decimalColumn?.type).toBe('decimal');
|
||||
expect(decimalColumn?.typeArgs).toEqual([18, 2]);
|
||||
|
||||
// Check unique constraint was converted to index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
expect(table.indexes[0].name).toBe(
|
||||
'AK_SpellResearchEnvironment_HelpId'
|
||||
);
|
||||
expect(table.indexes[0].unique).toBe(true);
|
||||
expect(table.indexes[0].columns).toContain('HelpId');
|
||||
});
|
||||
|
||||
it('should handle complex decimal types like decimal(31, 15)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[RelicPowerCalculatedValues](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
|
||||
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
|
||||
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
|
||||
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
|
||||
[AuraIntensityCorrection] [decimal](31, 15) NULL,
|
||||
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
|
||||
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
|
||||
CONSTRAINT [PK_RelicPowerCalculatedValues] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Check high precision decimal handling
|
||||
const magicalEnergyColumn = table.columns.find(
|
||||
(c) => c.name === 'MagicalEnergyMeasured'
|
||||
);
|
||||
expect(magicalEnergyColumn?.type).toBe('decimal');
|
||||
expect(magicalEnergyColumn?.typeArgs).toEqual([31, 15]);
|
||||
});
|
||||
|
||||
it('should handle IDENTITY columns in artifact lookup tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [artifacts].[SpellComponent](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[ComponentName] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_SpellComponent] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
|
||||
CREATE TABLE [artifacts].[RuneType](
|
||||
[Id] [int] IDENTITY(1,1) NOT NULL,
|
||||
[IsDeleted] [bit] NOT NULL,
|
||||
[DeletedAt] [datetime2](7) NULL,
|
||||
[Name] [nvarchar](max) NOT NULL,
|
||||
[Symbol] [nvarchar](max) NOT NULL,
|
||||
[MagicSchoolId] [int] NOT NULL,
|
||||
CONSTRAINT [PK_RuneType] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Both tables should have IDENTITY columns
|
||||
result.tables.forEach((table) => {
|
||||
const idColumn = table.columns.find((c) => c.name === 'Id');
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
expect(idColumn?.type).toBe('int');
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse all table constraints with complex WITH options', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[MagicalRegistry](
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[RegistrationCode] [nvarchar](50) NOT NULL,
|
||||
[PowerLevel] [int] NOT NULL,
|
||||
CONSTRAINT [PK_MagicalRegistry] PRIMARY KEY CLUSTERED
|
||||
(
|
||||
[Id] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
|
||||
CONSTRAINT [UQ_MagicalRegistry_Code] UNIQUE NONCLUSTERED
|
||||
(
|
||||
[RegistrationCode] ASC
|
||||
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Primary key should be set
|
||||
expect(table.columns.find((c) => c.name === 'Id')?.primaryKey).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Unique constraint should be converted to index
|
||||
expect(table.indexes).toHaveLength(1);
|
||||
expect(table.indexes[0].unique).toBe(true);
|
||||
expect(table.indexes[0].columns).toContain('RegistrationCode');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,253 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
|
||||
describe('SQL Server Foreign Key Relationship Tests', () => {
|
||||
it('should properly link foreign key relationships with correct table IDs', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [magic].[schools] (
|
||||
[id] [uniqueidentifier] PRIMARY KEY,
|
||||
[name] [nvarchar](100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [magic].[wizards] (
|
||||
[id] [uniqueidentifier] PRIMARY KEY,
|
||||
[school_id] [uniqueidentifier] NOT NULL,
|
||||
[name] [nvarchar](100) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [magic].[wizards] WITH CHECK ADD CONSTRAINT [FK_wizards_schools]
|
||||
FOREIGN KEY ([school_id]) REFERENCES [magic].[schools]([id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Check tables are parsed
|
||||
expect(result.tables).toHaveLength(2);
|
||||
const schoolsTable = result.tables.find((t) => t.name === 'schools');
|
||||
const wizardsTable = result.tables.find((t) => t.name === 'wizards');
|
||||
expect(schoolsTable).toBeDefined();
|
||||
expect(wizardsTable).toBeDefined();
|
||||
|
||||
// Check relationship is parsed
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
const rel = result.relationships[0];
|
||||
|
||||
// Verify the relationship has proper table IDs
|
||||
expect(rel.sourceTableId).toBe(wizardsTable!.id);
|
||||
expect(rel.targetTableId).toBe(schoolsTable!.id);
|
||||
|
||||
// Verify other relationship properties
|
||||
expect(rel.sourceTable).toBe('wizards');
|
||||
expect(rel.targetTable).toBe('schools');
|
||||
expect(rel.sourceColumn).toBe('school_id');
|
||||
expect(rel.targetColumn).toBe('id');
|
||||
expect(rel.sourceSchema).toBe('magic');
|
||||
expect(rel.targetSchema).toBe('magic');
|
||||
});
|
||||
|
||||
it('should handle cross-schema foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [users].[accounts] (
|
||||
[id] [int] PRIMARY KEY,
|
||||
[username] [nvarchar](50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [orders].[purchases] (
|
||||
[id] [int] PRIMARY KEY,
|
||||
[account_id] [int] NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE [orders].[purchases] ADD CONSTRAINT [FK_purchases_accounts]
|
||||
FOREIGN KEY ([account_id]) REFERENCES [users].[accounts]([id]);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const rel = result.relationships[0];
|
||||
const accountsTable = result.tables.find(
|
||||
(t) => t.name === 'accounts' && t.schema === 'users'
|
||||
);
|
||||
const purchasesTable = result.tables.find(
|
||||
(t) => t.name === 'purchases' && t.schema === 'orders'
|
||||
);
|
||||
|
||||
// Verify cross-schema relationship IDs are properly linked
|
||||
expect(rel.sourceTableId).toBe(purchasesTable!.id);
|
||||
expect(rel.targetTableId).toBe(accountsTable!.id);
|
||||
});
|
||||
|
||||
it('should parse complex foreign keys from magical realm database with proper table IDs', async () => {
|
||||
// Fantasy-themed SQL with multiple schemas and relationships
|
||||
const sql = `
|
||||
-- Spell casting schema
|
||||
CREATE SCHEMA [spellcasting];
|
||||
GO
|
||||
|
||||
-- Create spell table
|
||||
CREATE TABLE [spellcasting].[Spell] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[School] [nvarchar](100) NOT NULL,
|
||||
[Level] [int] NOT NULL,
|
||||
[Description] [nvarchar](max) NOT NULL,
|
||||
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Create spell casting process table
|
||||
CREATE TABLE [spellcasting].[SpellCastingProcess] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[SpellId] [uniqueidentifier] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[CastingDate] [datetime2](7) NOT NULL,
|
||||
[SuccessRate] [decimal](18, 2) NOT NULL,
|
||||
[ManaCost] [int] NOT NULL,
|
||||
[Notes] [nvarchar](max) NULL,
|
||||
CONSTRAINT [PK_SpellCastingProcess] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Wizards schema
|
||||
CREATE SCHEMA [wizards];
|
||||
GO
|
||||
|
||||
-- Create wizard table
|
||||
CREATE TABLE [wizards].[Wizard] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[Name] [nvarchar](255) NOT NULL,
|
||||
[Title] [nvarchar](100) NULL,
|
||||
[Level] [int] NOT NULL,
|
||||
[Specialization] [nvarchar](100) NULL,
|
||||
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Create wizard apprentice table
|
||||
CREATE TABLE [wizards].[Apprentice] (
|
||||
[Id] [uniqueidentifier] NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[MentorId] [uniqueidentifier] NOT NULL,
|
||||
[StartDate] [datetime2](7) NOT NULL,
|
||||
[EndDate] [datetime2](7) NULL,
|
||||
CONSTRAINT [PK_Apprentice] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
);
|
||||
GO
|
||||
|
||||
-- Add foreign key constraints
|
||||
ALTER TABLE [spellcasting].[SpellCastingProcess]
|
||||
ADD CONSTRAINT [FK_SpellCastingProcess_Spell]
|
||||
FOREIGN KEY ([SpellId])
|
||||
REFERENCES [spellcasting].[Spell]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [spellcasting].[SpellCastingProcess]
|
||||
ADD CONSTRAINT [FK_SpellCastingProcess_Wizard]
|
||||
FOREIGN KEY ([WizardId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [wizards].[Apprentice]
|
||||
ADD CONSTRAINT [FK_Apprentice_Wizard]
|
||||
FOREIGN KEY ([WizardId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
|
||||
ALTER TABLE [wizards].[Apprentice]
|
||||
ADD CONSTRAINT [FK_Apprentice_Mentor]
|
||||
FOREIGN KEY ([MentorId])
|
||||
REFERENCES [wizards].[Wizard]([Id]);
|
||||
GO
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
// Debug output
|
||||
console.log('Total tables:', result.tables.length);
|
||||
console.log('Total relationships:', result.relationships.length);
|
||||
|
||||
// Check if we have the expected number of tables and relationships
|
||||
expect(result.tables).toHaveLength(4);
|
||||
expect(result.relationships).toHaveLength(4);
|
||||
|
||||
// Check a specific relationship we know should exist
|
||||
const spellCastingRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'SpellCastingProcess' &&
|
||||
r.targetTable === 'Spell' &&
|
||||
r.sourceColumn === 'SpellId'
|
||||
);
|
||||
|
||||
expect(spellCastingRel).toBeDefined();
|
||||
|
||||
if (spellCastingRel) {
|
||||
// Find the corresponding tables
|
||||
const spellTable = result.tables.find(
|
||||
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
|
||||
);
|
||||
const spellCastingProcessTable = result.tables.find(
|
||||
(t) =>
|
||||
t.name === 'SpellCastingProcess' &&
|
||||
t.schema === 'spellcasting'
|
||||
);
|
||||
|
||||
console.log('SpellCastingProcess relationship:', {
|
||||
sourceTableId: spellCastingRel.sourceTableId,
|
||||
targetTableId: spellCastingRel.targetTableId,
|
||||
spellCastingProcessTableId: spellCastingProcessTable?.id,
|
||||
spellTableId: spellTable?.id,
|
||||
isSourceIdValid:
|
||||
spellCastingRel.sourceTableId ===
|
||||
spellCastingProcessTable?.id,
|
||||
isTargetIdValid:
|
||||
spellCastingRel.targetTableId === spellTable?.id,
|
||||
});
|
||||
|
||||
// Verify the IDs are properly linked
|
||||
expect(spellCastingRel.sourceTableId).toBeTruthy();
|
||||
expect(spellCastingRel.targetTableId).toBeTruthy();
|
||||
expect(spellCastingRel.sourceTableId).toBe(
|
||||
spellCastingProcessTable!.id
|
||||
);
|
||||
expect(spellCastingRel.targetTableId).toBe(spellTable!.id);
|
||||
}
|
||||
|
||||
// Check the apprentice self-referencing relationships
|
||||
const apprenticeWizardRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'Apprentice' &&
|
||||
r.targetTable === 'Wizard' &&
|
||||
r.sourceColumn === 'WizardId'
|
||||
);
|
||||
|
||||
const apprenticeMentorRel = result.relationships.find(
|
||||
(r) =>
|
||||
r.sourceTable === 'Apprentice' &&
|
||||
r.targetTable === 'Wizard' &&
|
||||
r.sourceColumn === 'MentorId'
|
||||
);
|
||||
|
||||
expect(apprenticeWizardRel).toBeDefined();
|
||||
expect(apprenticeMentorRel).toBeDefined();
|
||||
|
||||
// Check that all relationships have valid table IDs
|
||||
const relationshipsWithMissingIds = result.relationships.filter(
|
||||
(r) =>
|
||||
!r.sourceTableId ||
|
||||
!r.targetTableId ||
|
||||
r.sourceTableId === '' ||
|
||||
r.targetTableId === ''
|
||||
);
|
||||
|
||||
if (relationshipsWithMissingIds.length > 0) {
|
||||
console.log(
|
||||
'Relationships with missing IDs:',
|
||||
relationshipsWithMissingIds.slice(0, 5)
|
||||
);
|
||||
}
|
||||
|
||||
expect(relationshipsWithMissingIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,198 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromSQLServer } from '../sqlserver';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('SQL Server varchar(max) and nvarchar(max) preservation', () => {
|
||||
it('should preserve varchar(max) and nvarchar(max) in column definitions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[magical_texts] (
|
||||
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
|
||||
[Title] [nvarchar](255) NOT NULL,
|
||||
[Description] [nvarchar](max) NULL,
|
||||
[Content] [varchar](max) NOT NULL,
|
||||
[ShortNote] [varchar](100) NULL,
|
||||
[Metadata] [nvarchar](4000) NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
expect(table.columns).toHaveLength(6);
|
||||
|
||||
// Check that max is preserved in typeArgs
|
||||
const descriptionCol = table.columns.find(
|
||||
(c) => c.name === 'Description'
|
||||
);
|
||||
expect(descriptionCol).toBeDefined();
|
||||
expect(descriptionCol?.type).toBe('nvarchar');
|
||||
expect(descriptionCol?.typeArgs).toBe('max');
|
||||
|
||||
const contentCol = table.columns.find((c) => c.name === 'Content');
|
||||
expect(contentCol).toBeDefined();
|
||||
expect(contentCol?.type).toBe('varchar');
|
||||
expect(contentCol?.typeArgs).toBe('max');
|
||||
|
||||
// Check that numeric lengths are preserved as arrays
|
||||
const titleCol = table.columns.find((c) => c.name === 'Title');
|
||||
expect(titleCol).toBeDefined();
|
||||
expect(titleCol?.type).toBe('nvarchar');
|
||||
expect(titleCol?.typeArgs).toEqual([255]);
|
||||
|
||||
const shortNoteCol = table.columns.find((c) => c.name === 'ShortNote');
|
||||
expect(shortNoteCol).toBeDefined();
|
||||
expect(shortNoteCol?.type).toBe('varchar');
|
||||
expect(shortNoteCol?.typeArgs).toEqual([100]);
|
||||
});
|
||||
|
||||
it('should convert varchar(max) to characterMaximumLength field in diagram', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [dbo].[spell_scrolls] (
|
||||
[Id] [int] IDENTITY(1,1) PRIMARY KEY,
|
||||
[SpellName] [nvarchar](50) NOT NULL,
|
||||
[Incantation] [nvarchar](max) NOT NULL,
|
||||
[Instructions] [varchar](max) NULL,
|
||||
[PowerLevel] [decimal](10, 2) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
const diagram = convertToChartDBDiagram(
|
||||
result,
|
||||
DatabaseType.SQL_SERVER,
|
||||
DatabaseType.SQL_SERVER
|
||||
);
|
||||
|
||||
expect(diagram.tables).toBeDefined();
|
||||
expect(diagram.tables).toHaveLength(1);
|
||||
const table = diagram.tables![0];
|
||||
|
||||
// Check that 'max' is preserved in characterMaximumLength
|
||||
const incantationField = table.fields.find(
|
||||
(f) => f.name === 'Incantation'
|
||||
);
|
||||
expect(incantationField).toBeDefined();
|
||||
expect(incantationField?.characterMaximumLength).toBe('max');
|
||||
|
||||
const instructionsField = table.fields.find(
|
||||
(f) => f.name === 'Instructions'
|
||||
);
|
||||
expect(instructionsField).toBeDefined();
|
||||
expect(instructionsField?.characterMaximumLength).toBe('max');
|
||||
|
||||
// Check that numeric lengths are preserved
|
||||
const spellNameField = table.fields.find((f) => f.name === 'SpellName');
|
||||
expect(spellNameField).toBeDefined();
|
||||
expect(spellNameField?.characterMaximumLength).toBe('50');
|
||||
|
||||
// Check decimal precision/scale
|
||||
const powerLevelField = table.fields.find(
|
||||
(f) => f.name === 'PowerLevel'
|
||||
);
|
||||
expect(powerLevelField).toBeDefined();
|
||||
expect(powerLevelField?.precision).toBe(10);
|
||||
expect(powerLevelField?.scale).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle mixed varchar types with schema and relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [content].[authors] (
|
||||
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[Name] [nvarchar](100) NOT NULL,
|
||||
[Bio] [nvarchar](max) NULL
|
||||
);
|
||||
|
||||
CREATE TABLE [content].[books] (
|
||||
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
|
||||
[AuthorId] [uniqueidentifier] NOT NULL,
|
||||
[Title] [nvarchar](500) NOT NULL,
|
||||
[Summary] [nvarchar](max) NULL,
|
||||
[FullText] [varchar](max) NOT NULL,
|
||||
[ISBN] [varchar](13) NULL,
|
||||
CONSTRAINT [FK_books_authors] FOREIGN KEY ([AuthorId]) REFERENCES [content].[authors]([Id])
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check authors table
|
||||
const authorsTable = result.tables.find((t) => t.name === 'authors');
|
||||
expect(authorsTable).toBeDefined();
|
||||
|
||||
const bioCol = authorsTable?.columns.find((c) => c.name === 'Bio');
|
||||
expect(bioCol?.typeArgs).toBe('max');
|
||||
|
||||
// Check books table
|
||||
const booksTable = result.tables.find((t) => t.name === 'books');
|
||||
expect(booksTable).toBeDefined();
|
||||
|
||||
const summaryCol = booksTable?.columns.find(
|
||||
(c) => c.name === 'Summary'
|
||||
);
|
||||
expect(summaryCol?.typeArgs).toBe('max');
|
||||
|
||||
const fullTextCol = booksTable?.columns.find(
|
||||
(c) => c.name === 'FullText'
|
||||
);
|
||||
expect(fullTextCol?.typeArgs).toBe('max');
|
||||
|
||||
const isbnCol = booksTable?.columns.find((c) => c.name === 'ISBN');
|
||||
expect(isbnCol?.typeArgs).toEqual([13]);
|
||||
|
||||
// Verify relationship is preserved
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('books');
|
||||
expect(result.relationships[0].targetTable).toBe('authors');
|
||||
});
|
||||
|
||||
it('should handle complex table with various SQL Server features including varchar(max)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE [reporting].[wizard_performance](\
|
||||
[Id] [bigint] IDENTITY(1,1) NOT NULL,
|
||||
[WizardId] [uniqueidentifier] NOT NULL,
|
||||
[EvaluationDate] [datetime2](7) NOT NULL,
|
||||
[PerformanceScore] [decimal](5, 2) NOT NULL,
|
||||
[Comments] [nvarchar](max) NULL,
|
||||
[DetailedReport] [varchar](max) NULL,
|
||||
[Signature] [varbinary](max) NULL,
|
||||
[ReviewerNotes] [text] NULL,
|
||||
[IsActive] [bit] NOT NULL DEFAULT 1,
|
||||
CONSTRAINT [PK_wizard_performance] PRIMARY KEY CLUSTERED ([Id] ASC)
|
||||
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY];
|
||||
`;
|
||||
|
||||
const result = await fromSQLServer(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
// Check varchar(max) columns
|
||||
const commentsCol = table.columns.find((c) => c.name === 'Comments');
|
||||
expect(commentsCol?.type).toBe('nvarchar');
|
||||
expect(commentsCol?.typeArgs).toBe('max');
|
||||
|
||||
const reportCol = table.columns.find(
|
||||
(c) => c.name === 'DetailedReport'
|
||||
);
|
||||
expect(reportCol?.type).toBe('varchar');
|
||||
expect(reportCol?.typeArgs).toBe('max');
|
||||
|
||||
// Note: varbinary(max) should also be preserved but might need special handling
|
||||
const signatureCol = table.columns.find((c) => c.name === 'Signature');
|
||||
expect(signatureCol?.type).toBe('varbinary');
|
||||
// varbinary(max) handling might differ
|
||||
|
||||
// Check other column types
|
||||
const scoreCol = table.columns.find(
|
||||
(c) => c.name === 'PerformanceScore'
|
||||
);
|
||||
expect(scoreCol?.typeArgs).toEqual([5, 2]);
|
||||
|
||||
const idCol = table.columns.find((c) => c.name === 'Id');
|
||||
expect(idCol?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -7,111 +7,126 @@ import type {
|
||||
SQLForeignKey,
|
||||
SQLASTNode,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
CreateIndexStatement,
|
||||
AlterTableStatement,
|
||||
} from './sqlserver-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
findTableWithSchemaSupport,
|
||||
} from './sqlserver-common';
|
||||
|
||||
/**
|
||||
* Helper function to safely build SQL from AST nodes, handling null/undefined/invalid cases
|
||||
*/
|
||||
function safelyBuildSQLFromAST(ast: unknown): string | undefined {
|
||||
if (!ast) return undefined;
|
||||
|
||||
// Make sure it's a valid AST node with a 'type' property
|
||||
if (typeof ast === 'object' && ast !== null && 'type' in ast) {
|
||||
return buildSQLFromAST(ast as SQLASTNode, DatabaseType.SQL_SERVER);
|
||||
}
|
||||
|
||||
// Return string representation for non-AST objects
|
||||
if (ast !== null && (typeof ast === 'string' || typeof ast === 'number')) {
|
||||
return String(ast);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocess SQL Server script to remove or modify parts that the parser can't handle
|
||||
*/
|
||||
function preprocessSQLServerScript(sqlContent: string): string {
|
||||
// 1. Remove IF NOT EXISTS ... BEGIN ... END blocks (typically used for schema creation)
|
||||
// 1. Remove USE statements
|
||||
sqlContent = sqlContent.replace(/USE\s+\[[^\]]+\]\s*;?/gi, '');
|
||||
|
||||
// 2. Remove SET statements
|
||||
sqlContent = sqlContent.replace(/SET\s+\w+\s+\w+\s*;?/gi, '');
|
||||
|
||||
// 3. Remove GO statements (batch separators)
|
||||
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
|
||||
|
||||
// 4. Remove CREATE SCHEMA statements
|
||||
sqlContent = sqlContent.replace(/CREATE\s+SCHEMA\s+\[[^\]]+\]\s*;?/gi, '');
|
||||
|
||||
// 5. Remove IF NOT EXISTS ... BEGIN ... END blocks
|
||||
sqlContent = sqlContent.replace(
|
||||
/IF\s+NOT\s+EXISTS\s*\([^)]+\)\s*BEGIN\s+[^;]+;\s*END;?/gi,
|
||||
''
|
||||
);
|
||||
|
||||
// 2. Remove any GO statements (batch separators)
|
||||
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
|
||||
|
||||
// 3. Remove any EXEC statements
|
||||
// 6. Remove any EXEC statements
|
||||
sqlContent = sqlContent.replace(/EXEC\s*\([^)]+\)\s*;?/gi, '');
|
||||
sqlContent = sqlContent.replace(/EXEC\s+[^;]+;/gi, '');
|
||||
|
||||
// 4. Replace any remaining procedural code blocks that might cause issues
|
||||
// 7. Replace any remaining procedural code blocks
|
||||
sqlContent = sqlContent.replace(
|
||||
/BEGIN\s+TRANSACTION|COMMIT\s+TRANSACTION|ROLLBACK\s+TRANSACTION/gi,
|
||||
'-- $&'
|
||||
);
|
||||
|
||||
// 5. Special handling for CREATE TABLE with reserved keywords as column names
|
||||
// Find CREATE TABLE statements
|
||||
const createTablePattern =
|
||||
/CREATE\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(([^;]*)\)/gi;
|
||||
// 8. Remove square brackets (SQL Server specific)
|
||||
sqlContent = sqlContent.replace(/\[/g, '');
|
||||
sqlContent = sqlContent.replace(/\]/g, '');
|
||||
|
||||
// 9. Remove ON PRIMARY and TEXTIMAGE_ON PRIMARY clauses
|
||||
sqlContent = sqlContent.replace(
|
||||
createTablePattern,
|
||||
(_, schema, tableName, columnDefs) => {
|
||||
// Process column definitions to rename problematic columns
|
||||
let processedColumnDefs = columnDefs;
|
||||
|
||||
// Replace any column named "column" with "column_name"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[column\]/gi,
|
||||
'[column_name]'
|
||||
);
|
||||
|
||||
// Replace any column named "int" with "int_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[int\]/gi,
|
||||
'[int_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "time" with "time_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[time\]/gi,
|
||||
'[time_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "order" with "order_column"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[order\]/gi,
|
||||
'[order_column]'
|
||||
);
|
||||
|
||||
// Rebuild the CREATE TABLE statement
|
||||
return `CREATE TABLE [${schema || 'dbo'}].[${tableName}] (${processedColumnDefs})`;
|
||||
}
|
||||
/ON\s+PRIMARY(\s+TEXTIMAGE_ON\s+PRIMARY)?/gi,
|
||||
''
|
||||
);
|
||||
|
||||
// 6. Handle default value expressions with functions - replace with simpler defaults
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+\([^)]+\)/gi, 'DEFAULT 0');
|
||||
// 10. Remove WITH options from constraints
|
||||
sqlContent = sqlContent.replace(/WITH\s*\([^)]+\)/gi, '');
|
||||
|
||||
// 7. Split into individual statements to handle them separately
|
||||
// 11. Handle default value expressions with functions
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+NEWID\(\)/gi, "DEFAULT 'newid'");
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+NEWSEQUENTIALID\(\)/gi,
|
||||
"DEFAULT 'newsequentialid'"
|
||||
);
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+GETDATE\(\)/gi,
|
||||
"DEFAULT 'getdate'"
|
||||
);
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+SYSDATETIME\(\)/gi,
|
||||
"DEFAULT 'sysdatetime'"
|
||||
);
|
||||
// Don't replace numeric defaults or simple values
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
|
||||
// Only replace function calls in DEFAULT, not numeric literals
|
||||
sqlContent = sqlContent.replace(
|
||||
/DEFAULT\s+(\w+)\s*\([^)]*\)/gi,
|
||||
"DEFAULT '0'"
|
||||
);
|
||||
|
||||
// 12. Replace SQL Server specific data types with standard types
|
||||
// Note: We preserve varchar(max) and nvarchar(max) for accurate export
|
||||
sqlContent = sqlContent.replace(/\buniqueid\b/gi, 'uniqueidentifier'); // Fix common typo
|
||||
sqlContent = sqlContent.replace(
|
||||
/\bdatetime2\s*\(\s*\d+\s*\)/gi,
|
||||
'datetime2'
|
||||
);
|
||||
sqlContent = sqlContent.replace(/\btime\s*\(\s*\d+\s*\)/gi, 'time');
|
||||
sqlContent = sqlContent.replace(
|
||||
/\bdatetimeoffset\s*\(\s*\d+\s*\)/gi,
|
||||
'datetimeoffset'
|
||||
);
|
||||
|
||||
// 13. Handle IDENTITY columns - convert to a simpler format
|
||||
sqlContent = sqlContent.replace(
|
||||
/IDENTITY\s*\(\s*\d+\s*,\s*\d+\s*\)/gi,
|
||||
'AUTO_INCREMENT'
|
||||
);
|
||||
sqlContent = sqlContent.replace(/IDENTITY/gi, 'AUTO_INCREMENT');
|
||||
|
||||
// 14. Replace CHECK constraints with comments (parser doesn't handle well)
|
||||
sqlContent = sqlContent.replace(
|
||||
/CHECK\s*\([^)]+\)/gi,
|
||||
'/* CHECK CONSTRAINT */'
|
||||
);
|
||||
|
||||
// 15. Handle FOREIGN KEY constraints within CREATE TABLE
|
||||
// Convert inline foreign key syntax to be more parser-friendly
|
||||
sqlContent = sqlContent.replace(
|
||||
/(\w+)\s+(\w+(?:\s*\(\s*\d+(?:\s*,\s*\d+)?\s*\))?)\s+(?:NOT\s+NULL\s+)?FOREIGN\s+KEY\s+REFERENCES\s+(\w+)\.?(\w+)\s*\((\w+)\)/gi,
|
||||
'$1 $2 /* FK TO $3.$4($5) */'
|
||||
);
|
||||
|
||||
// Handle standalone FOREIGN KEY constraints
|
||||
sqlContent = sqlContent.replace(
|
||||
/CONSTRAINT\s+(\w+)\s+FOREIGN\s+KEY\s*\((\w+)\)\s+REFERENCES\s+(\w+)\.?(\w+)?\s*\((\w+)\)(?:\s+ON\s+DELETE\s+(\w+))?(?:\s+ON\s+UPDATE\s+(\w+))?/gi,
|
||||
'/* CONSTRAINT $1 FK($2) REF $3.$4($5) */'
|
||||
);
|
||||
|
||||
// 16. Split into individual statements to handle them separately
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
@@ -120,30 +135,27 @@ function preprocessSQLServerScript(sqlContent: string): string {
|
||||
const filteredStatements = statements.filter((stmt) => {
|
||||
const trimmedStmt = stmt.trim().toUpperCase();
|
||||
return (
|
||||
trimmedStmt.startsWith('CREATE TABLE') ||
|
||||
trimmedStmt.startsWith('CREATE UNIQUE INDEX') ||
|
||||
trimmedStmt.startsWith('CREATE INDEX') ||
|
||||
trimmedStmt.startsWith('ALTER TABLE')
|
||||
trimmedStmt.includes('CREATE TABLE') ||
|
||||
trimmedStmt.includes('CREATE UNIQUE INDEX') ||
|
||||
trimmedStmt.includes('CREATE INDEX') ||
|
||||
trimmedStmt.includes('ALTER TABLE')
|
||||
);
|
||||
});
|
||||
|
||||
return filteredStatements.join(';') + ';';
|
||||
return filteredStatements.join(';\n') + ';';
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual parsing of ALTER TABLE ADD CONSTRAINT statements
|
||||
* This is a fallback for when the node-sql-parser fails to properly parse the constraints
|
||||
*/
|
||||
function parseAlterTableAddConstraint(statements: string[]): {
|
||||
fkData: SQLForeignKey[];
|
||||
tableMap: Record<string, string>;
|
||||
} {
|
||||
function parseAlterTableAddConstraint(statements: string[]): SQLForeignKey[] {
|
||||
const fkData: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {};
|
||||
|
||||
// Regular expressions to extract information from ALTER TABLE statements
|
||||
// Handle multi-line ALTER TABLE statements
|
||||
const alterTableRegex =
|
||||
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s+REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/i;
|
||||
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+(?:WITH\s+CHECK\s+)?ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s*REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/is;
|
||||
|
||||
for (const stmt of statements) {
|
||||
const match = stmt.match(alterTableRegex);
|
||||
@@ -159,18 +171,6 @@ function parseAlterTableAddConstraint(statements: string[]): {
|
||||
targetColumn,
|
||||
] = match;
|
||||
|
||||
// Generate IDs for tables if they don't already exist
|
||||
const sourceTableKey = `${sourceSchema}.${sourceTable}`;
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
|
||||
if (!tableMap[sourceTableKey]) {
|
||||
tableMap[sourceTableKey] = generateId();
|
||||
}
|
||||
|
||||
if (!tableMap[targetTableKey]) {
|
||||
tableMap[targetTableKey] = generateId();
|
||||
}
|
||||
|
||||
fkData.push({
|
||||
name: constraintName,
|
||||
sourceTable: sourceTable,
|
||||
@@ -179,13 +179,13 @@ function parseAlterTableAddConstraint(statements: string[]): {
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetColumn,
|
||||
sourceTableId: tableMap[sourceTableKey],
|
||||
targetTableId: tableMap[targetTableKey],
|
||||
sourceTableId: '', // Will be filled by linkRelationships
|
||||
targetTableId: '', // Will be filled by linkRelationships
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { fkData, tableMap };
|
||||
return fkData;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -267,6 +267,239 @@ function normalizeSQLServerDataType(dataType: string): string {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual parsing of CREATE TABLE statements when node-sql-parser fails
|
||||
*/
|
||||
function parseCreateTableManually(
|
||||
statement: string,
|
||||
tables: SQLTable[],
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
// Extract table name and schema (handling square brackets)
|
||||
const tableMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(/i
|
||||
);
|
||||
if (!tableMatch) return;
|
||||
|
||||
const [, schema = 'dbo', tableName] = tableMatch;
|
||||
|
||||
// Generate table ID
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schema}.${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Extract column definitions
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
// Find the content between the parentheses
|
||||
const tableContentMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+[^(]+\(([\s\S]*)\)\s*(?:ON\s+|$)/i
|
||||
);
|
||||
if (!tableContentMatch) return;
|
||||
|
||||
const tableContent = tableContentMatch[1];
|
||||
|
||||
// Split table content by commas but not within parentheses
|
||||
const parts = [];
|
||||
let current = '';
|
||||
let parenDepth = 0;
|
||||
|
||||
for (let i = 0; i < tableContent.length; i++) {
|
||||
const char = tableContent[i];
|
||||
if (char === '(') parenDepth++;
|
||||
else if (char === ')') parenDepth--;
|
||||
else if (char === ',' && parenDepth === 0) {
|
||||
parts.push(current.trim());
|
||||
current = '';
|
||||
continue;
|
||||
}
|
||||
current += char;
|
||||
}
|
||||
if (current.trim()) parts.push(current.trim());
|
||||
|
||||
// Process each part (column or constraint)
|
||||
for (const part of parts) {
|
||||
// Handle constraint definitions
|
||||
if (part.match(/^\s*CONSTRAINT/i)) {
|
||||
// Parse constraints
|
||||
const constraintMatch = part.match(
|
||||
/CONSTRAINT\s+\[?(\w+)\]?\s+(PRIMARY\s+KEY|UNIQUE|FOREIGN\s+KEY)/i
|
||||
);
|
||||
if (constraintMatch) {
|
||||
const [, constraintName, constraintType] = constraintMatch;
|
||||
|
||||
if (constraintType.match(/PRIMARY\s+KEY/i)) {
|
||||
// Extract columns from PRIMARY KEY constraint - handle multi-line format
|
||||
const pkColumnsMatch = part.match(
|
||||
/PRIMARY\s+KEY(?:\s+CLUSTERED)?\s*\(([\s\S]+?)\)/i
|
||||
);
|
||||
if (pkColumnsMatch) {
|
||||
const pkColumns = pkColumnsMatch[1]
|
||||
.split(',')
|
||||
.map((c) =>
|
||||
c
|
||||
.trim()
|
||||
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
|
||||
.trim()
|
||||
);
|
||||
pkColumns.forEach((col) => {
|
||||
const column = columns.find((c) => c.name === col);
|
||||
if (column) column.primaryKey = true;
|
||||
});
|
||||
}
|
||||
} else if (constraintType === 'UNIQUE') {
|
||||
// Extract columns from UNIQUE constraint
|
||||
const uniqueColumnsMatch = part.match(
|
||||
/UNIQUE(?:\s+NONCLUSTERED)?\s*\(([\s\S]+?)\)/i
|
||||
);
|
||||
if (uniqueColumnsMatch) {
|
||||
const uniqueColumns = uniqueColumnsMatch[1]
|
||||
.split(',')
|
||||
.map((c) =>
|
||||
c
|
||||
.trim()
|
||||
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
|
||||
.trim()
|
||||
);
|
||||
indexes.push({
|
||||
name: constraintName,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
} else if (constraintType.match(/FOREIGN\s+KEY/i)) {
|
||||
// Parse foreign key constraint
|
||||
const fkMatch = part.match(
|
||||
/FOREIGN\s+KEY\s*\(([^)]+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (fkMatch) {
|
||||
const [
|
||||
,
|
||||
sourceCol,
|
||||
targetSchema = 'dbo',
|
||||
targetTable,
|
||||
targetCol,
|
||||
] = fkMatch;
|
||||
relationships.push({
|
||||
name: constraintName,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: sourceCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetCol
|
||||
.trim()
|
||||
.replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse column definition - handle both numeric args and 'max'
|
||||
// Handle brackets around column names and types
|
||||
let columnMatch = part.match(
|
||||
/^\s*\[?(\w+)\]?\s+\[?(\w+)\]?(?:\s*\(\s*([\d,\s]+|max)\s*\))?(.*)$/i
|
||||
);
|
||||
|
||||
// If no match, try pattern for preprocessed types without parentheses
|
||||
if (!columnMatch) {
|
||||
columnMatch = part.match(/^\s*(\w+)\s+(\w+)\s+([\d,\s]+)\s+(.*)$/i);
|
||||
}
|
||||
|
||||
if (columnMatch) {
|
||||
const [, colName, baseType, typeArgs, rest] = columnMatch;
|
||||
|
||||
if (
|
||||
colName &&
|
||||
!colName.match(/^(PRIMARY|FOREIGN|UNIQUE|CHECK)$/i)
|
||||
) {
|
||||
// Check for inline foreign key
|
||||
const inlineFkMatch = rest.match(
|
||||
/FOREIGN\s+KEY\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (inlineFkMatch) {
|
||||
const [, targetSchema = 'dbo', targetTable, targetCol] =
|
||||
inlineFkMatch;
|
||||
relationships.push({
|
||||
name: `FK_${tableName}_${colName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schema,
|
||||
sourceColumn: colName,
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetCol.trim().replace(/\[|\]/g, ''),
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
});
|
||||
}
|
||||
|
||||
const isPrimaryKey = !!rest.match(/PRIMARY\s+KEY/i);
|
||||
const isNotNull = !!rest.match(/NOT\s+NULL/i);
|
||||
const isIdentity = !!rest.match(
|
||||
/IDENTITY(?:\s*\(\s*\d+\s*,\s*\d+\s*\))?/i
|
||||
);
|
||||
const isUnique = !!rest.match(/UNIQUE/i);
|
||||
const defaultMatch = rest.match(/DEFAULT\s+([^,]+)/i);
|
||||
|
||||
// Parse type arguments
|
||||
let parsedTypeArgs: number[] | string | undefined;
|
||||
if (typeArgs) {
|
||||
if (typeArgs.toLowerCase() === 'max') {
|
||||
// Preserve 'max' keyword for varchar/nvarchar types
|
||||
parsedTypeArgs = 'max';
|
||||
} else {
|
||||
// Parse numeric args
|
||||
parsedTypeArgs = typeArgs
|
||||
.split(',')
|
||||
.map((arg) => parseInt(arg.trim()));
|
||||
}
|
||||
}
|
||||
|
||||
const column: SQLColumn = {
|
||||
name: colName,
|
||||
type: normalizeSQLServerDataType(baseType.trim()),
|
||||
nullable: !isNotNull && !isPrimaryKey,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: isUnique,
|
||||
increment: isIdentity,
|
||||
default: defaultMatch ? defaultMatch[1].trim() : undefined,
|
||||
};
|
||||
|
||||
// Add type arguments if present
|
||||
if (parsedTypeArgs) {
|
||||
if (typeof parsedTypeArgs === 'string') {
|
||||
// For 'max' keyword
|
||||
column.typeArgs = parsedTypeArgs;
|
||||
} else if (parsedTypeArgs.length > 0) {
|
||||
// For numeric arguments
|
||||
column.typeArgs = parsedTypeArgs;
|
||||
}
|
||||
}
|
||||
|
||||
columns.push(column);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the table
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schema,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse SQL Server DDL scripts and extract database structure
|
||||
* @param sqlContent SQL Server DDL content as string
|
||||
@@ -280,84 +513,131 @@ export async function fromSQLServer(
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// Preprocess the SQL content to handle T-SQL specific syntax
|
||||
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
|
||||
|
||||
// First, handle ALTER TABLE statements for foreign keys
|
||||
// Split by GO or semicolon for SQL Server
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.split(/(?:GO\s*$|;\s*$)/im)
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
|
||||
const alterTableStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().startsWith('ALTER TABLE') &&
|
||||
stmt.trim().toUpperCase().includes('ALTER TABLE') &&
|
||||
stmt.includes('FOREIGN KEY')
|
||||
);
|
||||
|
||||
if (alterTableStatements.length > 0) {
|
||||
const { fkData, tableMap: fkTableMap } =
|
||||
parseAlterTableAddConstraint(alterTableStatements);
|
||||
|
||||
// Store table IDs from alter statements
|
||||
Object.assign(tableMap, fkTableMap);
|
||||
|
||||
const fkData = parseAlterTableAddConstraint(alterTableStatements);
|
||||
// Store foreign key relationships for later processing
|
||||
relationships.push(...fkData);
|
||||
}
|
||||
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
let ast;
|
||||
try {
|
||||
ast = parser.astify(preprocessedSQL, parserOpts);
|
||||
} catch {
|
||||
// Fallback: Try to parse each statement individually
|
||||
const statements = preprocessedSQL
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
ast = [];
|
||||
// Parse CREATE TABLE statements manually first
|
||||
const createTableStatements = statements.filter((stmt) =>
|
||||
stmt.trim().toUpperCase().includes('CREATE TABLE')
|
||||
);
|
||||
|
||||
for (const stmt of statements) {
|
||||
try {
|
||||
const stmtAst = parser.astify(stmt + ';', parserOpts);
|
||||
if (Array.isArray(stmtAst)) {
|
||||
ast.push(...stmtAst);
|
||||
} else if (stmtAst) {
|
||||
ast.push(stmtAst);
|
||||
for (const stmt of createTableStatements) {
|
||||
parseCreateTableManually(stmt, tables, tableMap, relationships);
|
||||
}
|
||||
|
||||
// Preprocess the SQL content for node-sql-parser
|
||||
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
|
||||
|
||||
// Try to use node-sql-parser for additional parsing
|
||||
try {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
let ast;
|
||||
try {
|
||||
ast = parser.astify(preprocessedSQL, parserOpts);
|
||||
} catch {
|
||||
// Fallback: Try to parse each statement individually
|
||||
const statements = preprocessedSQL
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
ast = [];
|
||||
|
||||
for (const stmt of statements) {
|
||||
try {
|
||||
const stmtAst = parser.astify(stmt + ';', parserOpts);
|
||||
if (Array.isArray(stmtAst)) {
|
||||
ast.push(...stmtAst);
|
||||
} else if (stmtAst) {
|
||||
ast.push(stmtAst);
|
||||
}
|
||||
} catch {
|
||||
// Skip statements that can't be parsed
|
||||
}
|
||||
} catch {
|
||||
// Skip statements that can't be parsed
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(ast) && ast.length > 0) {
|
||||
// Process each statement
|
||||
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
|
||||
// Process CREATE INDEX statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndex(
|
||||
stmt as CreateIndexStatement,
|
||||
tables
|
||||
);
|
||||
}
|
||||
// Process ALTER TABLE statements for non-FK constraints
|
||||
else if (
|
||||
stmt.type === 'alter' &&
|
||||
stmt.keyword === 'table'
|
||||
) {
|
||||
processAlterTable(
|
||||
stmt as AlterTableStatement,
|
||||
tables,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (parserError) {
|
||||
// If parser fails completely, continue with manual parsing results
|
||||
console.warn(
|
||||
'node-sql-parser failed, using manual parsing only:',
|
||||
parserError
|
||||
);
|
||||
}
|
||||
|
||||
// Parse CREATE INDEX statements manually
|
||||
const createIndexStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().includes('CREATE') &&
|
||||
stmt.trim().toUpperCase().includes('INDEX')
|
||||
);
|
||||
|
||||
for (const stmt of createIndexStatements) {
|
||||
const indexMatch = stmt.match(
|
||||
/CREATE\s+(UNIQUE\s+)?INDEX\s+\[?(\w+)\]?\s+ON\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (indexMatch) {
|
||||
const [
|
||||
,
|
||||
unique,
|
||||
indexName,
|
||||
schema = 'dbo',
|
||||
tableName,
|
||||
columnsStr,
|
||||
] = indexMatch;
|
||||
const table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === schema
|
||||
);
|
||||
if (table) {
|
||||
const columns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim().replace(/\[|\]/g, ''));
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns,
|
||||
unique: !!unique,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(ast) || ast.length === 0) {
|
||||
throw new Error('Failed to parse SQL DDL - Empty or invalid AST');
|
||||
}
|
||||
|
||||
// Process each statement
|
||||
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
|
||||
// Process CREATE TABLE statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'table') {
|
||||
processCreateTable(
|
||||
stmt as CreateTableStatement,
|
||||
tables,
|
||||
tableMap,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
// Process CREATE INDEX statements
|
||||
else if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndex(stmt as CreateIndexStatement, tables);
|
||||
}
|
||||
// Process ALTER TABLE statements
|
||||
else if (stmt.type === 'alter' && stmt.keyword === 'table') {
|
||||
processAlterTable(
|
||||
stmt as AlterTableStatement,
|
||||
tables,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Link relationships to ensure all targetTableId and sourceTableId fields are filled
|
||||
const validRelationships = linkRelationships(
|
||||
tables,
|
||||
@@ -379,233 +659,6 @@ export async function fromSQLServer(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE TABLE statement
|
||||
*/
|
||||
function processCreateTable(
|
||||
stmt: CreateTableStatement,
|
||||
tables: SQLTable[],
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
// Extract table name and schema
|
||||
if (stmt.table && typeof stmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
|
||||
const tableObj = stmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
// SQL Server uses 'schema' or 'db' field
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = stmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If no schema specified, use default 'dbo' schema for SQL Server
|
||||
if (!schemaName) {
|
||||
schemaName = 'dbo';
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Process table columns
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
if (stmt.create_definitions && Array.isArray(stmt.create_definitions)) {
|
||||
stmt.create_definitions.forEach(
|
||||
(def: ColumnDefinition | ConstraintDefinition) => {
|
||||
if (def.resource === 'column') {
|
||||
// Process column definition
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(columnDef.column);
|
||||
const rawDataType = columnDef.definition?.dataType || '';
|
||||
const normalizedDataType =
|
||||
normalizeSQLServerDataType(rawDataType);
|
||||
|
||||
if (columnName) {
|
||||
// Check for SQL Server specific column properties
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key === 'primary key';
|
||||
|
||||
// For SQL Server, check for IDENTITY property in suffixes
|
||||
const hasIdentity = columnDef.definition?.suffix?.some(
|
||||
(suffix) =>
|
||||
suffix.toLowerCase().includes('identity')
|
||||
);
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: normalizedDataType,
|
||||
nullable: columnDef.nullable?.type !== 'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(columnDef.definition),
|
||||
default: columnDef.default_val
|
||||
? safelyBuildSQLFromAST(columnDef.default_val)
|
||||
: undefined,
|
||||
increment: hasIdentity,
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
// Handle constraint definitions
|
||||
const constraintDef = def as ConstraintDefinition;
|
||||
|
||||
// Handle PRIMARY KEY constraints
|
||||
if (constraintDef.constraint_type === 'primary key') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
// Extract column names from primary key constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const pkColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
// Find and mark the column as primary key
|
||||
const column = columns.find(
|
||||
(col) => col.name === pkColumnName
|
||||
);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle UNIQUE constraints
|
||||
else if (constraintDef.constraint_type === 'unique') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
const uniqueColumns: string[] = [];
|
||||
// Extract column names from unique constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const uniqueColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
uniqueColumns.push(uniqueColumnName);
|
||||
}
|
||||
}
|
||||
|
||||
// Add as an index
|
||||
if (uniqueColumns.length > 0) {
|
||||
indexes.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`unique_${tableName}_${uniqueColumns.join('_')}`,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle FOREIGN KEY constraints
|
||||
else if (
|
||||
constraintDef.constraint_type === 'foreign key' &&
|
||||
constraintDef.reference
|
||||
) {
|
||||
const reference = constraintDef.reference;
|
||||
if (
|
||||
reference &&
|
||||
reference.table &&
|
||||
reference.columns &&
|
||||
reference.columns.length > 0
|
||||
) {
|
||||
// Extract target table info
|
||||
const targetTable =
|
||||
reference.table as TableReference;
|
||||
const targetTableName = targetTable.table;
|
||||
const targetSchemaName =
|
||||
targetTable.schema || targetTable.db || 'dbo';
|
||||
|
||||
// Extract source column
|
||||
let sourceColumnName = '';
|
||||
if (
|
||||
Array.isArray(constraintDef.definition) &&
|
||||
constraintDef.definition.length > 0
|
||||
) {
|
||||
const sourceColDef =
|
||||
constraintDef.definition[0];
|
||||
if (
|
||||
sourceColDef &&
|
||||
typeof sourceColDef === 'object' &&
|
||||
'type' in sourceColDef &&
|
||||
sourceColDef.type === 'column_ref'
|
||||
) {
|
||||
sourceColumnName = extractColumnName(
|
||||
sourceColDef as ColumnReference
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract target column
|
||||
const targetColumnName = extractColumnName(
|
||||
reference.columns[0]
|
||||
);
|
||||
|
||||
if (
|
||||
sourceColumnName &&
|
||||
targetTableName &&
|
||||
targetColumnName
|
||||
) {
|
||||
// Create a foreign key relationship
|
||||
relationships.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`fk_${tableName}_${sourceColumnName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn: sourceColumnName,
|
||||
targetTable: targetTableName,
|
||||
targetSchema: targetSchemaName,
|
||||
targetColumn: targetColumnName,
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
updateAction: reference.on_update,
|
||||
deleteAction: reference.on_delete,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create the table object
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE INDEX statement
|
||||
*/
|
||||
|
||||
1251
src/lib/dbml/apply-dbml/__tests__/apply-dbml.test.ts
Normal file
1251
src/lib/dbml/apply-dbml/__tests__/apply-dbml.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
624
src/lib/dbml/apply-dbml/apply-dbml.ts
Normal file
624
src/lib/dbml/apply-dbml/apply-dbml.ts
Normal file
@@ -0,0 +1,624 @@
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import type { Area } from '../../domain/area';
|
||||
import {
|
||||
DBCustomTypeKind,
|
||||
type DBCustomType,
|
||||
} from '../../domain/db-custom-type';
|
||||
import type { DBDependency } from '../../domain/db-dependency';
|
||||
import type { DBField } from '../../domain/db-field';
|
||||
import type { DBIndex } from '../../domain/db-index';
|
||||
import type { DBRelationship } from '../../domain/db-relationship';
|
||||
import type { DBTable } from '../../domain/db-table';
|
||||
import type { Diagram } from '../../domain/diagram';
|
||||
|
||||
type SourceIdToDataMap = Record<
|
||||
string,
|
||||
{ schema?: string | null; name: string; color?: string }
|
||||
>;
|
||||
|
||||
type IdMappings = {
|
||||
tables: Record<string, string>;
|
||||
fields: Record<string, string>;
|
||||
};
|
||||
|
||||
// Key generation functions remain the same for consistency
|
||||
const createObjectKey = ({
|
||||
type,
|
||||
schema,
|
||||
otherSchema,
|
||||
parentName,
|
||||
otherParentName,
|
||||
name,
|
||||
otherName,
|
||||
}: {
|
||||
type:
|
||||
| 'table'
|
||||
| 'field'
|
||||
| 'index'
|
||||
| 'relationship'
|
||||
| 'customType'
|
||||
| 'dependency'
|
||||
| 'area';
|
||||
schema?: string | null;
|
||||
otherSchema?: string | null;
|
||||
parentName?: string | null;
|
||||
otherParentName?: string | null;
|
||||
name: string;
|
||||
otherName?: string | null;
|
||||
}) =>
|
||||
`${type}-${schema ? `${schema}.` : ''}${otherSchema ? `${otherSchema}.` : ''}${parentName ? `${parentName}.` : ''}${otherParentName ? `${otherParentName}.` : ''}${name}${otherName ? `.${otherName}` : ''}`;
|
||||
|
||||
const createObjectKeyFromTable = (table: DBTable) =>
|
||||
createObjectKey({
|
||||
type: 'table',
|
||||
schema: table.schema,
|
||||
name: table.name,
|
||||
});
|
||||
|
||||
const createObjectKeyFromField = (table: DBTable, field: DBField) =>
|
||||
createObjectKey({
|
||||
type: 'field',
|
||||
schema: table.schema,
|
||||
parentName: table.name,
|
||||
name: field.name,
|
||||
});
|
||||
|
||||
const createObjectKeyFromIndex = (table: DBTable, index: DBIndex) =>
|
||||
createObjectKey({
|
||||
type: 'index',
|
||||
schema: table.schema,
|
||||
parentName: table.name,
|
||||
name: index.name,
|
||||
});
|
||||
|
||||
const createObjectKeyFromRelationship = (
|
||||
relationship: DBRelationship,
|
||||
sourceIdToNameMap: SourceIdToDataMap
|
||||
) => {
|
||||
const sourceTable = sourceIdToNameMap[relationship.sourceTableId];
|
||||
const targetTable = sourceIdToNameMap[relationship.targetTableId];
|
||||
const sourceField = sourceIdToNameMap[relationship.sourceFieldId];
|
||||
const targetField = sourceIdToNameMap[relationship.targetFieldId];
|
||||
|
||||
if (!sourceTable || !targetTable || !sourceField || !targetField) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return createObjectKey({
|
||||
type: 'relationship',
|
||||
schema: sourceTable.schema,
|
||||
otherSchema: targetTable.schema,
|
||||
parentName: sourceTable.name,
|
||||
otherParentName: targetTable.name,
|
||||
name: sourceField.name,
|
||||
otherName: targetField.name,
|
||||
});
|
||||
};
|
||||
|
||||
const createObjectKeyFromCustomType = (customType: DBCustomType) =>
|
||||
createObjectKey({
|
||||
type: 'customType',
|
||||
schema: customType.schema,
|
||||
name: customType.name,
|
||||
});
|
||||
|
||||
const createObjectKeyFromDependency = (
|
||||
dependency: DBDependency,
|
||||
sourceIdToNameMap: SourceIdToDataMap
|
||||
) => {
|
||||
const dependentTable = sourceIdToNameMap[dependency.dependentTableId];
|
||||
const table = sourceIdToNameMap[dependency.tableId];
|
||||
|
||||
if (!dependentTable || !table) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return createObjectKey({
|
||||
type: 'dependency',
|
||||
schema: dependentTable.schema,
|
||||
otherSchema: table.schema,
|
||||
name: dependentTable.name,
|
||||
otherName: table.name,
|
||||
});
|
||||
};
|
||||
|
||||
const createObjectKeyFromArea = (area: Area) =>
|
||||
createObjectKey({
|
||||
type: 'area',
|
||||
name: area.name,
|
||||
});
|
||||
|
||||
// Helper function to build source mappings
|
||||
const buildSourceMappings = (sourceDiagram: Diagram) => {
|
||||
const objectKeysToIdsMap: Record<string, string> = {};
|
||||
const sourceIdToDataMap: SourceIdToDataMap = {};
|
||||
|
||||
// Map tables and their fields/indexes
|
||||
sourceDiagram.tables?.forEach((table) => {
|
||||
const tableKey = createObjectKeyFromTable(table);
|
||||
objectKeysToIdsMap[tableKey] = table.id;
|
||||
sourceIdToDataMap[table.id] = {
|
||||
schema: table.schema,
|
||||
name: table.name,
|
||||
color: table.color,
|
||||
};
|
||||
|
||||
table.fields?.forEach((field) => {
|
||||
const fieldKey = createObjectKeyFromField(table, field);
|
||||
objectKeysToIdsMap[fieldKey] = field.id;
|
||||
sourceIdToDataMap[field.id] = {
|
||||
schema: table.schema,
|
||||
name: field.name,
|
||||
};
|
||||
});
|
||||
|
||||
table.indexes?.forEach((index) => {
|
||||
const indexKey = createObjectKeyFromIndex(table, index);
|
||||
objectKeysToIdsMap[indexKey] = index.id;
|
||||
});
|
||||
});
|
||||
|
||||
// Map relationships
|
||||
sourceDiagram.relationships?.forEach((relationship) => {
|
||||
const key = createObjectKeyFromRelationship(
|
||||
relationship,
|
||||
sourceIdToDataMap
|
||||
);
|
||||
if (key) {
|
||||
objectKeysToIdsMap[key] = relationship.id;
|
||||
}
|
||||
});
|
||||
|
||||
// Map custom types
|
||||
sourceDiagram.customTypes?.forEach((customType) => {
|
||||
const key = createObjectKeyFromCustomType(customType);
|
||||
objectKeysToIdsMap[key] = customType.id;
|
||||
});
|
||||
|
||||
// Map dependencies
|
||||
sourceDiagram.dependencies?.forEach((dependency) => {
|
||||
const key = createObjectKeyFromDependency(
|
||||
dependency,
|
||||
sourceIdToDataMap
|
||||
);
|
||||
if (key) {
|
||||
objectKeysToIdsMap[key] = dependency.id;
|
||||
}
|
||||
});
|
||||
|
||||
// Map areas
|
||||
sourceDiagram.areas?.forEach((area) => {
|
||||
const key = createObjectKeyFromArea(area);
|
||||
objectKeysToIdsMap[key] = area.id;
|
||||
});
|
||||
|
||||
return { objectKeysToIdsMap, sourceIdToDataMap };
|
||||
};
|
||||
|
||||
// Functional helper to update tables and collect ID mappings
|
||||
const updateTables = ({
|
||||
targetTables,
|
||||
sourceTables,
|
||||
defaultDatabaseSchema,
|
||||
}: {
|
||||
targetTables: DBTable[] | undefined;
|
||||
sourceTables: DBTable[] | undefined;
|
||||
objectKeysToIdsMap: Record<string, string>;
|
||||
sourceIdToDataMap: SourceIdToDataMap;
|
||||
defaultDatabaseSchema?: string;
|
||||
}): { tables: DBTable[]; idMappings: IdMappings } => {
|
||||
if (!targetTables)
|
||||
return { tables: [], idMappings: { tables: {}, fields: {} } };
|
||||
if (!sourceTables)
|
||||
return { tables: targetTables, idMappings: { tables: {}, fields: {} } };
|
||||
|
||||
const idMappings: IdMappings = { tables: {}, fields: {} };
|
||||
|
||||
// Create a map of source tables by schema + name
|
||||
const sourceTablesByKey = new Map<string, DBTable>();
|
||||
sourceTables.forEach((table) => {
|
||||
const key = createObjectKeyFromTable(table);
|
||||
sourceTablesByKey.set(key, table);
|
||||
});
|
||||
|
||||
const updatedTables = targetTables.map((targetTable) => {
|
||||
// Try to find matching source table by schema + name
|
||||
const targetKey = createObjectKeyFromTable(targetTable);
|
||||
let sourceTable = sourceTablesByKey.get(targetKey);
|
||||
|
||||
if (!sourceTable && defaultDatabaseSchema) {
|
||||
if (!targetTable.schema) {
|
||||
// If target table has no schema, try matching with default schema
|
||||
const defaultKey = createObjectKeyFromTable({
|
||||
...targetTable,
|
||||
schema: defaultDatabaseSchema,
|
||||
});
|
||||
sourceTable = sourceTablesByKey.get(defaultKey);
|
||||
} else if (targetTable.schema === defaultDatabaseSchema) {
|
||||
// If target table's schema matches default, try matching without schema
|
||||
const noSchemaKey = createObjectKeyFromTable({
|
||||
...targetTable,
|
||||
schema: undefined,
|
||||
});
|
||||
sourceTable = sourceTablesByKey.get(noSchemaKey);
|
||||
}
|
||||
}
|
||||
|
||||
if (!sourceTable) {
|
||||
// No matching source table found - keep target as-is
|
||||
return targetTable;
|
||||
}
|
||||
|
||||
const sourceId = sourceTable.id;
|
||||
idMappings.tables[targetTable.id] = sourceId;
|
||||
|
||||
// Update fields by matching on name within the table
|
||||
const sourceFieldsByName = new Map<string, DBField>();
|
||||
sourceTable.fields?.forEach((field) => {
|
||||
sourceFieldsByName.set(field.name, field);
|
||||
});
|
||||
|
||||
const updatedFields = targetTable.fields?.map((targetField) => {
|
||||
const sourceField = sourceFieldsByName.get(targetField.name);
|
||||
if (sourceField) {
|
||||
idMappings.fields[targetField.id] = sourceField.id;
|
||||
|
||||
// Use source field properties when there's a match
|
||||
return {
|
||||
...targetField,
|
||||
id: sourceField.id,
|
||||
createdAt: sourceField.createdAt,
|
||||
};
|
||||
}
|
||||
// For new fields not in source, keep target field as-is
|
||||
return targetField;
|
||||
});
|
||||
|
||||
// Update indexes by matching on name within the table
|
||||
const sourceIndexesByName = new Map<string, DBIndex>();
|
||||
sourceTable.indexes?.forEach((index) => {
|
||||
sourceIndexesByName.set(index.name, index);
|
||||
});
|
||||
|
||||
const updatedIndexes = targetTable.indexes?.map((targetIndex) => {
|
||||
const sourceIndex = sourceIndexesByName.get(targetIndex.name);
|
||||
if (sourceIndex) {
|
||||
return {
|
||||
...targetIndex,
|
||||
id: sourceIndex.id,
|
||||
createdAt: sourceIndex.createdAt,
|
||||
};
|
||||
}
|
||||
return targetIndex;
|
||||
});
|
||||
|
||||
// Build the result table, preserving source structure
|
||||
const resultTable: DBTable = {
|
||||
...sourceTable,
|
||||
fields: updatedFields,
|
||||
indexes: updatedIndexes,
|
||||
comments: targetTable.comments,
|
||||
};
|
||||
|
||||
// Update nullable, unique, primaryKey from target fields
|
||||
if (targetTable.fields) {
|
||||
resultTable.fields = resultTable.fields?.map((field) => {
|
||||
const targetField = targetTable.fields?.find(
|
||||
(f) => f.name === field.name
|
||||
);
|
||||
if (targetField) {
|
||||
return {
|
||||
...field,
|
||||
nullable: targetField.nullable,
|
||||
unique: targetField.unique,
|
||||
primaryKey: targetField.primaryKey,
|
||||
type: targetField.type,
|
||||
};
|
||||
}
|
||||
return field;
|
||||
});
|
||||
}
|
||||
|
||||
return resultTable;
|
||||
});
|
||||
|
||||
return { tables: updatedTables, idMappings };
|
||||
};
|
||||
|
||||
// Functional helper to update custom types
|
||||
const updateCustomTypes = (
|
||||
customTypes: DBCustomType[] | undefined,
|
||||
objectKeysToIdsMap: Record<string, string>
|
||||
): DBCustomType[] => {
|
||||
if (!customTypes) return [];
|
||||
|
||||
return customTypes.map((customType) => {
|
||||
const key = createObjectKeyFromCustomType(customType);
|
||||
const sourceId = objectKeysToIdsMap[key];
|
||||
|
||||
if (sourceId) {
|
||||
return { ...customType, id: sourceId };
|
||||
}
|
||||
return customType;
|
||||
});
|
||||
};
|
||||
|
||||
// Functional helper to update relationships
|
||||
const updateRelationships = (
|
||||
targetRelationships: DBRelationship[] | undefined,
|
||||
sourceRelationships: DBRelationship[] | undefined,
|
||||
idMappings: IdMappings
|
||||
): DBRelationship[] => {
|
||||
// If target has no relationships, return empty array (relationships were removed)
|
||||
if (!targetRelationships || targetRelationships.length === 0) return [];
|
||||
|
||||
// If source has no relationships, we need to add the target relationships with updated IDs
|
||||
if (!sourceRelationships || sourceRelationships.length === 0) {
|
||||
return targetRelationships.map((targetRel) => {
|
||||
// Find the source IDs by reversing the mapping lookup
|
||||
let sourceTableId = targetRel.sourceTableId;
|
||||
let targetTableId = targetRel.targetTableId;
|
||||
let sourceFieldId = targetRel.sourceFieldId;
|
||||
let targetFieldId = targetRel.targetFieldId;
|
||||
|
||||
// Find source table/field IDs from the mappings
|
||||
for (const [targetId, srcId] of Object.entries(idMappings.tables)) {
|
||||
if (targetId === targetRel.sourceTableId) {
|
||||
sourceTableId = srcId;
|
||||
}
|
||||
if (targetId === targetRel.targetTableId) {
|
||||
targetTableId = srcId;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [targetId, srcId] of Object.entries(idMappings.fields)) {
|
||||
if (targetId === targetRel.sourceFieldId) {
|
||||
sourceFieldId = srcId;
|
||||
}
|
||||
if (targetId === targetRel.targetFieldId) {
|
||||
targetFieldId = srcId;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...targetRel,
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
sourceFieldId,
|
||||
targetFieldId,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Map source relationships that have matches in target
|
||||
const resultRelationships: DBRelationship[] = [];
|
||||
const matchedTargetRelIds = new Set<string>();
|
||||
|
||||
sourceRelationships.forEach((sourceRel) => {
|
||||
// Find matching target relationship by checking if the target has a relationship
|
||||
// between the same tables and fields (using the ID mappings)
|
||||
const targetRel = targetRelationships.find((tgtRel) => {
|
||||
const mappedSourceTableId = idMappings.tables[tgtRel.sourceTableId];
|
||||
const mappedTargetTableId = idMappings.tables[tgtRel.targetTableId];
|
||||
const mappedSourceFieldId = idMappings.fields[tgtRel.sourceFieldId];
|
||||
const mappedTargetFieldId = idMappings.fields[tgtRel.targetFieldId];
|
||||
|
||||
// Check both directions since relationships can be defined in either direction
|
||||
const directMatch =
|
||||
sourceRel.sourceTableId === mappedSourceTableId &&
|
||||
sourceRel.targetTableId === mappedTargetTableId &&
|
||||
sourceRel.sourceFieldId === mappedSourceFieldId &&
|
||||
sourceRel.targetFieldId === mappedTargetFieldId;
|
||||
|
||||
const reverseMatch =
|
||||
sourceRel.sourceTableId === mappedTargetTableId &&
|
||||
sourceRel.targetTableId === mappedSourceTableId &&
|
||||
sourceRel.sourceFieldId === mappedTargetFieldId &&
|
||||
sourceRel.targetFieldId === mappedSourceFieldId;
|
||||
|
||||
return directMatch || reverseMatch;
|
||||
});
|
||||
|
||||
if (targetRel) {
|
||||
matchedTargetRelIds.add(targetRel.id);
|
||||
// Preserve source relationship but update cardinalities from target
|
||||
const result: DBRelationship = {
|
||||
...sourceRel,
|
||||
sourceCardinality: targetRel.sourceCardinality,
|
||||
targetCardinality: targetRel.targetCardinality,
|
||||
};
|
||||
|
||||
// Only include schema fields if they exist in the source relationship
|
||||
if (!sourceRel.sourceSchema) {
|
||||
delete result.sourceSchema;
|
||||
}
|
||||
if (!sourceRel.targetSchema) {
|
||||
delete result.targetSchema;
|
||||
}
|
||||
|
||||
resultRelationships.push(result);
|
||||
}
|
||||
});
|
||||
|
||||
// Add any target relationships that weren't matched (new relationships)
|
||||
targetRelationships.forEach((targetRel) => {
|
||||
if (!matchedTargetRelIds.has(targetRel.id)) {
|
||||
// Find the source IDs by reversing the mapping lookup
|
||||
let sourceTableId = targetRel.sourceTableId;
|
||||
let targetTableId = targetRel.targetTableId;
|
||||
let sourceFieldId = targetRel.sourceFieldId;
|
||||
let targetFieldId = targetRel.targetFieldId;
|
||||
|
||||
// Find source table/field IDs from the mappings
|
||||
for (const [targetId, srcId] of Object.entries(idMappings.tables)) {
|
||||
if (targetId === targetRel.sourceTableId) {
|
||||
sourceTableId = srcId;
|
||||
}
|
||||
if (targetId === targetRel.targetTableId) {
|
||||
targetTableId = srcId;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [targetId, srcId] of Object.entries(idMappings.fields)) {
|
||||
if (targetId === targetRel.sourceFieldId) {
|
||||
sourceFieldId = srcId;
|
||||
}
|
||||
if (targetId === targetRel.targetFieldId) {
|
||||
targetFieldId = srcId;
|
||||
}
|
||||
}
|
||||
|
||||
resultRelationships.push({
|
||||
...targetRel,
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
sourceFieldId,
|
||||
targetFieldId,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return resultRelationships;
|
||||
};
|
||||
|
||||
// Functional helper to update dependencies
|
||||
const updateDependencies = (
|
||||
targetDependencies: DBDependency[] | undefined,
|
||||
sourceDependencies: DBDependency[] | undefined,
|
||||
idMappings: IdMappings
|
||||
): DBDependency[] => {
|
||||
if (!targetDependencies) return [];
|
||||
if (!sourceDependencies) return targetDependencies;
|
||||
|
||||
return targetDependencies.map((targetDep) => {
|
||||
// Find matching source dependency
|
||||
const sourceDep = sourceDependencies.find((srcDep) => {
|
||||
const srcTableId = idMappings.tables[targetDep.tableId];
|
||||
const srcDependentTableId =
|
||||
idMappings.tables[targetDep.dependentTableId];
|
||||
|
||||
return (
|
||||
srcDep.tableId === srcTableId &&
|
||||
srcDep.dependentTableId === srcDependentTableId
|
||||
);
|
||||
});
|
||||
|
||||
if (sourceDep) {
|
||||
return {
|
||||
...targetDep,
|
||||
id: sourceDep.id,
|
||||
tableId:
|
||||
idMappings.tables[targetDep.tableId] || targetDep.tableId,
|
||||
dependentTableId:
|
||||
idMappings.tables[targetDep.dependentTableId] ||
|
||||
targetDep.dependentTableId,
|
||||
};
|
||||
}
|
||||
|
||||
// If no match found, just update the table references
|
||||
return {
|
||||
...targetDep,
|
||||
tableId: idMappings.tables[targetDep.tableId] || targetDep.tableId,
|
||||
dependentTableId:
|
||||
idMappings.tables[targetDep.dependentTableId] ||
|
||||
targetDep.dependentTableId,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
// Functional helper to update index field references
|
||||
const updateIndexFieldReferences = (
|
||||
tables: DBTable[] | undefined,
|
||||
idMappings: IdMappings
|
||||
): DBTable[] => {
|
||||
if (!tables) return [];
|
||||
|
||||
return tables.map((table) => ({
|
||||
...table,
|
||||
indexes: table.indexes?.map((index) => ({
|
||||
...index,
|
||||
fieldIds: index.fieldIds.map(
|
||||
(fieldId) => idMappings.fields[fieldId] || fieldId
|
||||
),
|
||||
})),
|
||||
}));
|
||||
};
|
||||
|
||||
export const applyDBMLChanges = ({
|
||||
sourceDiagram,
|
||||
targetDiagram,
|
||||
}: {
|
||||
sourceDiagram: Diagram;
|
||||
targetDiagram: Diagram;
|
||||
}): Diagram => {
|
||||
// Step 1: Build mappings from source diagram
|
||||
const { objectKeysToIdsMap, sourceIdToDataMap } =
|
||||
buildSourceMappings(sourceDiagram);
|
||||
|
||||
// Step 2: Update tables and collect ID mappings
|
||||
const { tables: updatedTables, idMappings } = updateTables({
|
||||
targetTables: targetDiagram.tables,
|
||||
sourceTables: sourceDiagram.tables,
|
||||
objectKeysToIdsMap,
|
||||
sourceIdToDataMap,
|
||||
defaultDatabaseSchema: defaultSchemas[sourceDiagram.databaseType],
|
||||
});
|
||||
|
||||
// Step 3: Update all other entities functionally
|
||||
const newCustomTypes = updateCustomTypes(
|
||||
targetDiagram.customTypes,
|
||||
objectKeysToIdsMap
|
||||
);
|
||||
|
||||
const updatedCustomTypes = [
|
||||
...(sourceDiagram.customTypes?.filter(
|
||||
(ct) => ct.kind === DBCustomTypeKind.composite
|
||||
) ?? []),
|
||||
...newCustomTypes,
|
||||
];
|
||||
|
||||
const updatedRelationships = updateRelationships(
|
||||
targetDiagram.relationships,
|
||||
sourceDiagram.relationships,
|
||||
idMappings
|
||||
);
|
||||
|
||||
const updatedDependencies = updateDependencies(
|
||||
targetDiagram.dependencies,
|
||||
sourceDiagram.dependencies,
|
||||
idMappings
|
||||
);
|
||||
|
||||
// Step 4: Update index field references
|
||||
const finalTables = updateIndexFieldReferences(updatedTables, idMappings);
|
||||
|
||||
// Sort relationships to match source order
|
||||
const sortedRelationships = [...updatedRelationships].sort((a, b) => {
|
||||
// Find source relationships to get their order
|
||||
const sourceRelA = sourceDiagram.relationships?.find(
|
||||
(r) => r.id === a.id
|
||||
);
|
||||
const sourceRelB = sourceDiagram.relationships?.find(
|
||||
(r) => r.id === b.id
|
||||
);
|
||||
|
||||
if (!sourceRelA || !sourceRelB) return 0;
|
||||
|
||||
const indexA = sourceDiagram.relationships?.indexOf(sourceRelA) ?? 0;
|
||||
const indexB = sourceDiagram.relationships?.indexOf(sourceRelB) ?? 0;
|
||||
|
||||
return indexA - indexB;
|
||||
});
|
||||
|
||||
// Return a new diagram object with tables sorted by order
|
||||
const result: Diagram = {
|
||||
...sourceDiagram,
|
||||
tables: finalTables.sort((a, b) => (a.order ?? 0) - (b.order ?? 0)),
|
||||
areas: targetDiagram.areas,
|
||||
relationships: sortedRelationships,
|
||||
dependencies: updatedDependencies,
|
||||
customTypes: updatedCustomTypes,
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
1418
src/lib/dbml/dbml-export/__tests__/dbml-export-issue-fix.test.ts
Normal file
1418
src/lib/dbml/dbml-export/__tests__/dbml-export-issue-fix.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -210,14 +210,17 @@ export const sanitizeSQLforDBML = (sql: string): string => {
|
||||
|
||||
// Post-process DBML to convert separate Ref statements to inline refs
|
||||
const convertToInlineRefs = (dbml: string): string => {
|
||||
// Extract all Ref statements - Corrected pattern
|
||||
// Extract all Ref statements - Updated pattern to handle schema.table.field format
|
||||
// Matches both "table"."field" and "schema"."table"."field" formats
|
||||
const refPattern =
|
||||
/Ref\s+"([^"]+)"\s*:\s*"([^"]+)"\."([^"]+)"\s*([<>*])\s*"([^"]+)"\."([^"]+)"/g;
|
||||
/Ref\s+"([^"]+)"\s*:\s*(?:"([^"]+)"\.)?"([^"]+)"\."([^"]+)"\s*([<>*])\s*(?:"([^"]+)"\.)?"([^"]+)"\."([^"]+)"/g;
|
||||
const refs: Array<{
|
||||
refName: string;
|
||||
sourceSchema?: string;
|
||||
sourceTable: string;
|
||||
sourceField: string;
|
||||
direction: string;
|
||||
targetSchema?: string;
|
||||
targetTable: string;
|
||||
targetField: string;
|
||||
}> = [];
|
||||
@@ -226,28 +229,86 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
while ((match = refPattern.exec(dbml)) !== null) {
|
||||
refs.push({
|
||||
refName: match[1], // Reference name
|
||||
sourceTable: match[2], // Source table
|
||||
sourceField: match[3], // Source field
|
||||
direction: match[4], // Direction (<, >)
|
||||
targetTable: match[5], // Target table
|
||||
targetField: match[6], // Target field
|
||||
sourceSchema: match[2] || undefined, // Source schema (optional)
|
||||
sourceTable: match[3], // Source table
|
||||
sourceField: match[4], // Source field
|
||||
direction: match[5], // Direction (<, >)
|
||||
targetSchema: match[6] || undefined, // Target schema (optional)
|
||||
targetTable: match[7], // Target table
|
||||
targetField: match[8], // Target field
|
||||
});
|
||||
}
|
||||
|
||||
// Extract all table definitions - Corrected pattern and handling
|
||||
// Extract all table definitions - Support both quoted and bracketed table names
|
||||
const tables: {
|
||||
[key: string]: { start: number; end: number; content: string };
|
||||
} = {};
|
||||
const tablePattern = /Table\s+"([^"]+)"\s*{([^}]*)}/g; // Simpler pattern, assuming content doesn't have {}
|
||||
|
||||
let tableMatch;
|
||||
while ((tableMatch = tablePattern.exec(dbml)) !== null) {
|
||||
const tableName = tableMatch[1];
|
||||
tables[tableName] = {
|
||||
start: tableMatch.index,
|
||||
end: tableMatch.index + tableMatch[0].length,
|
||||
content: tableMatch[2],
|
||||
[key: string]: {
|
||||
start: number;
|
||||
end: number;
|
||||
content: string;
|
||||
fullMatch: string;
|
||||
};
|
||||
} = {};
|
||||
|
||||
// Use a more sophisticated approach to handle nested braces
|
||||
let currentPos = 0;
|
||||
while (currentPos < dbml.length) {
|
||||
// Find the next table definition
|
||||
const tableStartPattern =
|
||||
/Table\s+(?:"([^"]+)"(?:\."([^"]+)")?|(\[?[^\s[]+\]?\.\[?[^\s\]]+\]?)|(\[?[^\s[{]+\]?))\s*{/g;
|
||||
tableStartPattern.lastIndex = currentPos;
|
||||
const tableStartMatch = tableStartPattern.exec(dbml);
|
||||
|
||||
if (!tableStartMatch) break;
|
||||
|
||||
// Extract table name
|
||||
let tableName;
|
||||
if (tableStartMatch[1] && tableStartMatch[2]) {
|
||||
tableName = `${tableStartMatch[1]}.${tableStartMatch[2]}`;
|
||||
} else if (tableStartMatch[1]) {
|
||||
tableName = tableStartMatch[1];
|
||||
} else {
|
||||
tableName = tableStartMatch[3] || tableStartMatch[4];
|
||||
}
|
||||
|
||||
// Clean up any bracket syntax from table names
|
||||
const cleanTableName = tableName.replace(/\[([^\]]+)\]/g, '$1');
|
||||
|
||||
// Find the matching closing brace by counting nested braces
|
||||
const openBracePos =
|
||||
tableStartMatch.index + tableStartMatch[0].length - 1;
|
||||
let braceCount = 1;
|
||||
const contentStart = openBracePos + 1;
|
||||
let contentEnd = contentStart;
|
||||
|
||||
for (let i = contentStart; i < dbml.length && braceCount > 0; i++) {
|
||||
if (dbml[i] === '{') braceCount++;
|
||||
else if (dbml[i] === '}') {
|
||||
braceCount--;
|
||||
if (braceCount === 0) {
|
||||
contentEnd = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (braceCount === 0) {
|
||||
const content = dbml.substring(contentStart, contentEnd);
|
||||
const fullMatch = dbml.substring(
|
||||
tableStartMatch.index,
|
||||
contentEnd + 1
|
||||
);
|
||||
|
||||
tables[cleanTableName] = {
|
||||
start: tableStartMatch.index,
|
||||
end: contentEnd + 1,
|
||||
content: content,
|
||||
fullMatch: fullMatch,
|
||||
};
|
||||
|
||||
currentPos = contentEnd + 1;
|
||||
} else {
|
||||
// Malformed DBML, skip this table
|
||||
currentPos = tableStartMatch.index + tableStartMatch[0].length;
|
||||
}
|
||||
}
|
||||
|
||||
if (refs.length === 0 || Object.keys(tables).length === 0) {
|
||||
@@ -257,50 +318,102 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
// Create a map for faster table lookup
|
||||
const tableMap = new Map(Object.entries(tables));
|
||||
|
||||
// 1. Add inline refs to table contents
|
||||
// 1. First, collect all refs per field
|
||||
const fieldRefs = new Map<
|
||||
string,
|
||||
{ table: string; refs: string[]; relatedTables: string[] }
|
||||
>();
|
||||
|
||||
refs.forEach((ref) => {
|
||||
let targetTableName, fieldNameToModify, inlineRefSyntax;
|
||||
let targetTableName, fieldNameToModify, inlineRefSyntax, relatedTable;
|
||||
|
||||
if (ref.direction === '<') {
|
||||
targetTableName = ref.targetTable;
|
||||
targetTableName = ref.targetSchema
|
||||
? `${ref.targetSchema}.${ref.targetTable}`
|
||||
: ref.targetTable;
|
||||
fieldNameToModify = ref.targetField;
|
||||
inlineRefSyntax = `[ref: < "${ref.sourceTable}"."${ref.sourceField}"]`;
|
||||
const sourceRef = ref.sourceSchema
|
||||
? `"${ref.sourceSchema}"."${ref.sourceTable}"."${ref.sourceField}"`
|
||||
: `"${ref.sourceTable}"."${ref.sourceField}"`;
|
||||
inlineRefSyntax = `ref: < ${sourceRef}`;
|
||||
relatedTable = ref.sourceTable;
|
||||
} else {
|
||||
targetTableName = ref.sourceTable;
|
||||
targetTableName = ref.sourceSchema
|
||||
? `${ref.sourceSchema}.${ref.sourceTable}`
|
||||
: ref.sourceTable;
|
||||
fieldNameToModify = ref.sourceField;
|
||||
inlineRefSyntax = `[ref: > "${ref.targetTable}"."${ref.targetField}"]`;
|
||||
const targetRef = ref.targetSchema
|
||||
? `"${ref.targetSchema}"."${ref.targetTable}"."${ref.targetField}"`
|
||||
: `"${ref.targetTable}"."${ref.targetField}"`;
|
||||
inlineRefSyntax = `ref: > ${targetRef}`;
|
||||
relatedTable = ref.targetTable;
|
||||
}
|
||||
|
||||
const tableData = tableMap.get(targetTableName);
|
||||
const fieldKey = `${targetTableName}.${fieldNameToModify}`;
|
||||
const existing = fieldRefs.get(fieldKey) || {
|
||||
table: targetTableName,
|
||||
refs: [],
|
||||
relatedTables: [],
|
||||
};
|
||||
existing.refs.push(inlineRefSyntax);
|
||||
existing.relatedTables.push(relatedTable);
|
||||
fieldRefs.set(fieldKey, existing);
|
||||
});
|
||||
|
||||
// 2. Apply all refs to fields
|
||||
fieldRefs.forEach((fieldData, fieldKey) => {
|
||||
// fieldKey might be "schema.table.field" or just "table.field"
|
||||
const lastDotIndex = fieldKey.lastIndexOf('.');
|
||||
const tableName = fieldKey.substring(0, lastDotIndex);
|
||||
const fieldName = fieldKey.substring(lastDotIndex + 1);
|
||||
const tableData = tableMap.get(tableName);
|
||||
|
||||
if (tableData) {
|
||||
// Updated pattern to capture field definition and all existing attributes in brackets
|
||||
const fieldPattern = new RegExp(
|
||||
`("(${fieldNameToModify})"[^\n]*?)([ \t]*[[].*?[]])?([ \t]*//.*)?$`,
|
||||
'm'
|
||||
`^([ \t]*"${fieldName}"[^\\n]*?)(?:\\s*(\\[[^\\]]*\\]))*\\s*(//.*)?$`,
|
||||
'gm'
|
||||
);
|
||||
let newContent = tableData.content;
|
||||
|
||||
newContent = newContent.replace(
|
||||
fieldPattern,
|
||||
(
|
||||
lineMatch,
|
||||
fieldPart,
|
||||
_fieldName,
|
||||
existingAttributes,
|
||||
commentPart
|
||||
) => {
|
||||
// Avoid adding duplicate refs
|
||||
if (lineMatch.includes('[ref:')) {
|
||||
return lineMatch;
|
||||
(lineMatch, fieldPart, existingBrackets, commentPart) => {
|
||||
// Collect all attributes from existing brackets
|
||||
const allAttributes: string[] = [];
|
||||
if (existingBrackets) {
|
||||
// Extract all bracket contents
|
||||
const bracketPattern = /\[([^\]]*)\]/g;
|
||||
let bracketMatch;
|
||||
while (
|
||||
(bracketMatch = bracketPattern.exec(lineMatch)) !==
|
||||
null
|
||||
) {
|
||||
const content = bracketMatch[1].trim();
|
||||
if (content) {
|
||||
allAttributes.push(content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return `${fieldPart.trim()} ${inlineRefSyntax}${existingAttributes || ''}${commentPart || ''}`;
|
||||
// Add all refs for this field
|
||||
allAttributes.push(...fieldData.refs);
|
||||
|
||||
// Combine all attributes into a single bracket
|
||||
const combinedAttributes = allAttributes.join(', ');
|
||||
|
||||
// Preserve original spacing from fieldPart
|
||||
const leadingSpaces = fieldPart.match(/^(\s*)/)?.[1] || '';
|
||||
const fieldDefWithoutSpaces = fieldPart.trim();
|
||||
|
||||
return `${leadingSpaces}${fieldDefWithoutSpaces} [${combinedAttributes}]${commentPart || ''}`;
|
||||
}
|
||||
);
|
||||
|
||||
// Update the table content if modified
|
||||
if (newContent !== tableData.content) {
|
||||
tableData.content = newContent;
|
||||
tableMap.set(targetTableName, tableData);
|
||||
tableMap.set(tableName, tableData);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -312,9 +425,53 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
([, a], [, b]) => a.start - b.start
|
||||
);
|
||||
|
||||
for (const [tableName, tableData] of sortedTables) {
|
||||
for (const [, tableData] of sortedTables) {
|
||||
reconstructedDbml += dbml.substring(lastIndex, tableData.start);
|
||||
reconstructedDbml += `Table "${tableName}" {${tableData.content}}`;
|
||||
// Preserve the original table definition format but with updated content
|
||||
const originalTableDef = tableData.fullMatch;
|
||||
let formattedContent = tableData.content;
|
||||
|
||||
// Clean up content formatting:
|
||||
// 1. Split into lines to handle each line individually
|
||||
const lines = formattedContent.split('\n');
|
||||
|
||||
// 2. Process lines to ensure proper formatting
|
||||
const processedLines = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const trimmedLine = line.trimEnd();
|
||||
|
||||
// Skip empty lines at the end if followed by a closing brace
|
||||
if (trimmedLine === '' && i === lines.length - 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip empty lines before a closing brace
|
||||
if (
|
||||
trimmedLine === '' &&
|
||||
i < lines.length - 1 &&
|
||||
lines[i + 1].trim().startsWith('}')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
processedLines.push(line);
|
||||
}
|
||||
|
||||
formattedContent = processedLines.join('\n');
|
||||
|
||||
// Ensure content ends with a newline before the table's closing brace
|
||||
if (!formattedContent.endsWith('\n')) {
|
||||
formattedContent = formattedContent + '\n';
|
||||
}
|
||||
|
||||
// Since we properly extracted content with nested braces, we need to rebuild the table definition
|
||||
const tableHeader = originalTableDef.substring(
|
||||
0,
|
||||
originalTableDef.indexOf('{') + 1
|
||||
);
|
||||
const updatedTableDef = `${tableHeader}${formattedContent}}`;
|
||||
reconstructedDbml += updatedTableDef;
|
||||
lastIndex = tableData.end;
|
||||
}
|
||||
reconstructedDbml += dbml.substring(lastIndex);
|
||||
@@ -325,7 +482,13 @@ const convertToInlineRefs = (dbml: string): string => {
|
||||
.filter((line) => !line.trim().startsWith('Ref '));
|
||||
const finalDbml = finalLines.join('\n').trim();
|
||||
|
||||
return finalDbml;
|
||||
// Clean up excessive empty lines - replace multiple consecutive empty lines with just one
|
||||
// But ensure there's at least one blank line between tables
|
||||
const cleanedDbml = finalDbml
|
||||
.replace(/\n\s*\n\s*\n/g, '\n\n')
|
||||
.replace(/}\n(?=Table)/g, '}\n\n');
|
||||
|
||||
return cleanedDbml;
|
||||
};
|
||||
|
||||
// Function to check for SQL keywords (add more if needed)
|
||||
@@ -410,6 +573,125 @@ const normalizeCharTypeFormat = (dbml: string): string => {
|
||||
.replace(/character \(([0-9]+)\)/g, 'character($1)');
|
||||
};
|
||||
|
||||
// Fix table definitions with incorrect bracket syntax
|
||||
const fixTableBracketSyntax = (dbml: string): string => {
|
||||
// Fix patterns like Table [schema].[table] to Table "schema"."table"
|
||||
return dbml.replace(
|
||||
/Table\s+\[([^\]]+)\]\.\[([^\]]+)\]/g,
|
||||
'Table "$1"."$2"'
|
||||
);
|
||||
};
|
||||
|
||||
// Restore schema information that may have been stripped by the DBML importer
|
||||
const restoreTableSchemas = (dbml: string, tables: DBTable[]): string => {
|
||||
if (!tables || tables.length === 0) return dbml;
|
||||
|
||||
// Group tables by name to handle duplicates
|
||||
const tablesByName = new Map<
|
||||
string,
|
||||
Array<{ table: DBTable; index: number }>
|
||||
>();
|
||||
tables.forEach((table, index) => {
|
||||
const existing = tablesByName.get(table.name) || [];
|
||||
existing.push({ table, index });
|
||||
tablesByName.set(table.name, existing);
|
||||
});
|
||||
|
||||
let result = dbml;
|
||||
|
||||
// Process each group of tables with the same name
|
||||
tablesByName.forEach((tablesGroup, tableName) => {
|
||||
if (tablesGroup.length === 1) {
|
||||
// Single table with this name - simple case
|
||||
const table = tablesGroup[0].table;
|
||||
if (table.schema) {
|
||||
// Match table definition without schema (e.g., Table "users" {)
|
||||
const tablePattern = new RegExp(
|
||||
`Table\\s+"${table.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
const schemaTableName = `Table "${table.schema}"."${table.name}" {`;
|
||||
result = result.replace(tablePattern, schemaTableName);
|
||||
|
||||
// Update references in Ref statements
|
||||
const escapedTableName = table.name.replace(
|
||||
/[.*+?^${}()|[\]\\]/g,
|
||||
'\\$&'
|
||||
);
|
||||
|
||||
// Pattern 1: In Ref definitions - :"tablename"."field"
|
||||
const refDefPattern = new RegExp(
|
||||
`(Ref\\s+"[^"]+")\\s*:\\s*"${escapedTableName}"\\."([^"]+)"`,
|
||||
'g'
|
||||
);
|
||||
result = result.replace(
|
||||
refDefPattern,
|
||||
`$1:"${table.schema}"."${table.name}"."$2"`
|
||||
);
|
||||
|
||||
// Pattern 2: In Ref targets - [<>] "tablename"."field"
|
||||
const refTargetPattern = new RegExp(
|
||||
`([<>])\\s*"${escapedTableName}"\\."([^"]+)"`,
|
||||
'g'
|
||||
);
|
||||
result = result.replace(
|
||||
refTargetPattern,
|
||||
`$1 "${table.schema}"."${table.name}"."$2"`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Multiple tables with the same name - need to be more careful
|
||||
// Find all table definitions for this name
|
||||
const escapedTableName = tableName.replace(
|
||||
/[.*+?^${}()|[\]\\]/g,
|
||||
'\\$&'
|
||||
);
|
||||
|
||||
// Get tables that need schema restoration (those without schema in DBML)
|
||||
const tablesNeedingSchema = tablesGroup.filter(({ table }) => {
|
||||
// Check if this table's schema is already in the DBML
|
||||
const schemaPattern = new RegExp(
|
||||
`Table\\s+"${table.schema}"\\.\\s*"${escapedTableName}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
return !result.match(schemaPattern);
|
||||
});
|
||||
|
||||
// Then handle tables without schema in DBML
|
||||
const noSchemaTablePattern = new RegExp(
|
||||
`Table\\s+"${escapedTableName}"\\s*{`,
|
||||
'g'
|
||||
);
|
||||
|
||||
let noSchemaMatchIndex = 0;
|
||||
result = result.replace(noSchemaTablePattern, (match) => {
|
||||
// We need to match based on the order in the DBML output
|
||||
// For PostgreSQL DBML, the @dbml/core sorts tables by:
|
||||
// 1. Tables with schemas (alphabetically)
|
||||
// 2. Tables without schemas
|
||||
// Since both our tables have schemas, they should appear in order
|
||||
|
||||
// Only process tables that need schema restoration
|
||||
if (noSchemaMatchIndex >= tablesNeedingSchema.length) {
|
||||
return match;
|
||||
}
|
||||
|
||||
const correspondingTable =
|
||||
tablesNeedingSchema[noSchemaMatchIndex];
|
||||
noSchemaMatchIndex++;
|
||||
|
||||
if (correspondingTable && correspondingTable.table.schema) {
|
||||
return `Table "${correspondingTable.table.schema}"."${tableName}" {`;
|
||||
}
|
||||
// If the table doesn't have a schema, keep it as is
|
||||
return match;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export interface DBMLExportResult {
|
||||
standardDbml: string;
|
||||
inlineDbml: string;
|
||||
@@ -429,13 +711,18 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
};
|
||||
}) ?? [];
|
||||
|
||||
// Remove duplicate tables (keep first occurrence by table name)
|
||||
const seenTableNames = new Set<string>();
|
||||
// Remove duplicate tables (consider both schema and table name)
|
||||
const seenTableIdentifiers = new Set<string>();
|
||||
const uniqueTables = sanitizedTables.filter((table) => {
|
||||
if (seenTableNames.has(table.name)) {
|
||||
// Create a unique identifier combining schema and table name
|
||||
const tableIdentifier = table.schema
|
||||
? `${table.schema}.${table.name}`
|
||||
: table.name;
|
||||
|
||||
if (seenTableIdentifiers.has(tableIdentifier)) {
|
||||
return false; // Skip duplicate
|
||||
}
|
||||
seenTableNames.add(table.name);
|
||||
seenTableIdentifiers.add(tableIdentifier);
|
||||
return true; // Keep unique table
|
||||
});
|
||||
|
||||
@@ -483,43 +770,58 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
|
||||
const processTable = (table: DBTable) => {
|
||||
const originalName = table.name;
|
||||
let safeTableName = originalName.replace(/[^\w]/g, '_');
|
||||
let safeTableName = originalName;
|
||||
|
||||
// If name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(originalName)) {
|
||||
safeTableName = `"${originalName.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
// Rename table if SQL keyword (PostgreSQL only)
|
||||
if (shouldRenameKeywords && isSQLKeyword(safeTableName)) {
|
||||
const newName = `${safeTableName}_table`;
|
||||
if (shouldRenameKeywords && isSQLKeyword(originalName)) {
|
||||
const newName = `${originalName}_table`;
|
||||
sqlRenamedTables.set(newName, originalName);
|
||||
safeTableName = newName;
|
||||
safeTableName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
}
|
||||
|
||||
const fieldNameCounts = new Map<string, number>();
|
||||
const processedFields = table.fields.map((field) => {
|
||||
const originalSafeName = field.name.replace(/[^\w]/g, '_');
|
||||
let finalSafeName = originalSafeName;
|
||||
let finalSafeName = field.name;
|
||||
|
||||
// If field name contains spaces or special characters, wrap in quotes
|
||||
if (/[^\w]/.test(field.name)) {
|
||||
finalSafeName = `"${field.name.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
|
||||
// Handle duplicate field names
|
||||
const count = fieldNameCounts.get(originalSafeName) || 0;
|
||||
const count = fieldNameCounts.get(field.name) || 0;
|
||||
if (count > 0) {
|
||||
finalSafeName = `${originalSafeName}_${count + 1}`;
|
||||
const newName = `${field.name}_${count + 1}`;
|
||||
finalSafeName = /[^\w]/.test(newName)
|
||||
? `"${newName.replace(/"/g, '\\"')}"`
|
||||
: newName;
|
||||
}
|
||||
fieldNameCounts.set(originalSafeName, count + 1);
|
||||
fieldNameCounts.set(field.name, count + 1);
|
||||
|
||||
// Create sanitized field
|
||||
const sanitizedField: DBField = {
|
||||
...field,
|
||||
name: finalSafeName,
|
||||
};
|
||||
delete sanitizedField.comments;
|
||||
|
||||
// Rename field if SQL keyword (PostgreSQL only)
|
||||
if (shouldRenameKeywords && isSQLKeyword(finalSafeName)) {
|
||||
const newFieldName = `${finalSafeName}_field`;
|
||||
if (shouldRenameKeywords && isSQLKeyword(field.name)) {
|
||||
const newFieldName = `${field.name}_field`;
|
||||
fieldRenames.push({
|
||||
table: safeTableName,
|
||||
originalName: finalSafeName,
|
||||
originalName: field.name,
|
||||
newName: newFieldName,
|
||||
});
|
||||
sanitizedField.name = newFieldName;
|
||||
sanitizedField.name = /[^\w]/.test(newFieldName)
|
||||
? `"${newFieldName.replace(/"/g, '\\"')}"`
|
||||
: newFieldName;
|
||||
}
|
||||
|
||||
return sanitizedField;
|
||||
@@ -532,7 +834,9 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
indexes: (table.indexes || []).map((index) => ({
|
||||
...index,
|
||||
name: index.name
|
||||
? index.name.replace(/[^\w]/g, '_')
|
||||
? /[^\w]/.test(index.name)
|
||||
? `"${index.name.replace(/"/g, '\\"')}"`
|
||||
: index.name
|
||||
: `idx_${Math.random().toString(36).substring(2, 8)}`,
|
||||
})),
|
||||
};
|
||||
@@ -542,10 +846,15 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
...cleanDiagram,
|
||||
tables: cleanDiagram.tables?.map(processTable) ?? [],
|
||||
relationships:
|
||||
cleanDiagram.relationships?.map((rel, index) => ({
|
||||
...rel,
|
||||
name: `fk_${index}_${rel.name ? rel.name.replace(/[^\w]/g, '_') : Math.random().toString(36).substring(2, 8)}`,
|
||||
})) ?? [],
|
||||
cleanDiagram.relationships?.map((rel, index) => {
|
||||
const safeName = rel.name
|
||||
? rel.name.replace(/[^\w]/g, '_')
|
||||
: Math.random().toString(36).substring(2, 8);
|
||||
return {
|
||||
...rel,
|
||||
name: `fk_${index}_${safeName}`,
|
||||
};
|
||||
}) ?? [],
|
||||
} as Diagram);
|
||||
|
||||
let standard = '';
|
||||
@@ -577,16 +886,35 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
}
|
||||
|
||||
standard = normalizeCharTypeFormat(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
fixTableBracketSyntax(
|
||||
importer.import(
|
||||
baseScript,
|
||||
databaseTypeToImportFormat(diagram.databaseType)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// Restore schema information that may have been stripped by DBML importer
|
||||
standard = restoreTableSchemas(standard, uniqueTables);
|
||||
|
||||
// Prepend Enum DBML to the standard output
|
||||
standard = enumsDBML + '\n' + standard;
|
||||
if (enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
}
|
||||
|
||||
inline = normalizeCharTypeFormat(convertToInlineRefs(standard));
|
||||
|
||||
// Clean up excessive empty lines in both outputs
|
||||
standard = standard.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
inline = inline.replace(/\n\s*\n\s*\n/g, '\n\n');
|
||||
|
||||
// Ensure proper formatting with newline at end
|
||||
if (!standard.endsWith('\n')) {
|
||||
standard += '\n';
|
||||
}
|
||||
if (!inline.endsWith('\n')) {
|
||||
inline += '\n';
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
'Error during DBML generation process:',
|
||||
@@ -602,11 +930,11 @@ export function generateDBMLFromDiagram(diagram: Diagram): DBMLExportResult {
|
||||
|
||||
// If an error occurred, still prepend enums if they exist, or they'll be lost.
|
||||
// The error message will then follow.
|
||||
if (standard.startsWith('// Error generating DBML:')) {
|
||||
standard = enumsDBML + standard;
|
||||
if (standard.startsWith('// Error generating DBML:') && enumsDBML) {
|
||||
standard = enumsDBML + '\n\n' + standard;
|
||||
}
|
||||
if (inline.startsWith('// Error generating DBML:')) {
|
||||
inline = enumsDBML + inline;
|
||||
if (inline.startsWith('// Error generating DBML:') && enumsDBML) {
|
||||
inline = enumsDBML + '\n\n' + inline;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importDBMLToDiagram } from '../dbml-import';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
|
||||
describe('DBML Import - Fantasy Examples', () => {
|
||||
describe('Magical Academy System', () => {
|
||||
@@ -613,6 +614,228 @@ Note quest_system_note {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enum Support', () => {
|
||||
it('should import enums as customTypes', async () => {
|
||||
const dbmlWithEnums = `
|
||||
// Test DBML with various enum definitions
|
||||
enum job_status {
|
||||
created [note: 'Waiting to be processed']
|
||||
running
|
||||
done
|
||||
failure
|
||||
}
|
||||
|
||||
// Enum with schema
|
||||
enum hr.employee_type {
|
||||
full_time
|
||||
part_time
|
||||
contractor
|
||||
intern
|
||||
}
|
||||
|
||||
// Enum with special characters and spaces
|
||||
enum grade {
|
||||
"A+"
|
||||
"A"
|
||||
"A-"
|
||||
"Not Yet Set"
|
||||
}
|
||||
|
||||
Table employees {
|
||||
id integer [pk]
|
||||
name varchar(200) [not null]
|
||||
status job_status
|
||||
type hr.employee_type
|
||||
performance_grade grade
|
||||
created_at timestamp [default: 'now()']
|
||||
}
|
||||
|
||||
Table projects {
|
||||
id integer [pk]
|
||||
name varchar(300) [not null]
|
||||
status job_status [not null]
|
||||
priority enum // inline enum without values - will be converted to varchar
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlWithEnums);
|
||||
|
||||
// Verify customTypes are created for enums
|
||||
expect(diagram.customTypes).toBeDefined();
|
||||
expect(diagram.customTypes).toHaveLength(3); // job_status, hr.employee_type, grade
|
||||
|
||||
// Check job_status enum
|
||||
const jobStatusEnum = diagram.customTypes?.find(
|
||||
(ct) => ct.name === 'job_status' && !ct.schema
|
||||
);
|
||||
expect(jobStatusEnum).toBeDefined();
|
||||
expect(jobStatusEnum?.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(jobStatusEnum?.values).toEqual([
|
||||
'created',
|
||||
'running',
|
||||
'done',
|
||||
'failure',
|
||||
]);
|
||||
|
||||
// Check hr.employee_type enum with schema
|
||||
const employeeTypeEnum = diagram.customTypes?.find(
|
||||
(ct) => ct.name === 'employee_type' && ct.schema === 'hr'
|
||||
);
|
||||
expect(employeeTypeEnum).toBeDefined();
|
||||
expect(employeeTypeEnum?.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(employeeTypeEnum?.values).toEqual([
|
||||
'full_time',
|
||||
'part_time',
|
||||
'contractor',
|
||||
'intern',
|
||||
]);
|
||||
|
||||
// Check grade enum with quoted values
|
||||
const gradeEnum = diagram.customTypes?.find(
|
||||
(ct) => ct.name === 'grade' && !ct.schema
|
||||
);
|
||||
expect(gradeEnum).toBeDefined();
|
||||
expect(gradeEnum?.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(gradeEnum?.values).toEqual(['A+', 'A', 'A-', 'Not Yet Set']);
|
||||
|
||||
// Verify tables are created
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
|
||||
// Check that enum fields in tables reference the custom types
|
||||
const employeesTable = diagram.tables?.find(
|
||||
(t) => t.name === 'employees'
|
||||
);
|
||||
const statusField = employeesTable?.fields.find(
|
||||
(f) => f.name === 'status'
|
||||
);
|
||||
const typeField = employeesTable?.fields.find(
|
||||
(f) => f.name === 'type'
|
||||
);
|
||||
const gradeField = employeesTable?.fields.find(
|
||||
(f) => f.name === 'performance_grade'
|
||||
);
|
||||
|
||||
// Verify fields have correct types
|
||||
expect(statusField?.type.id).toBe('job_status');
|
||||
expect(typeField?.type.id).toBe('employee_type');
|
||||
expect(gradeField?.type.id).toBe('grade');
|
||||
|
||||
// Check inline enum was converted to varchar
|
||||
const projectsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'projects'
|
||||
);
|
||||
const priorityField = projectsTable?.fields.find(
|
||||
(f) => f.name === 'priority'
|
||||
);
|
||||
expect(priorityField?.type.id).toBe('varchar');
|
||||
});
|
||||
|
||||
it('should handle enum values with notes', async () => {
|
||||
const dbmlWithEnumNotes = `
|
||||
enum order_status {
|
||||
pending [note: 'Order has been placed but not confirmed']
|
||||
confirmed [note: 'Payment received and order confirmed']
|
||||
shipped [note: 'Order has been dispatched']
|
||||
delivered [note: 'Order delivered to customer']
|
||||
cancelled [note: 'Order cancelled by customer or system']
|
||||
}
|
||||
|
||||
Table orders {
|
||||
id integer [pk]
|
||||
status order_status [not null]
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlWithEnumNotes);
|
||||
|
||||
// Verify enum is created
|
||||
expect(diagram.customTypes).toHaveLength(1);
|
||||
|
||||
const orderStatusEnum = diagram.customTypes?.[0];
|
||||
expect(orderStatusEnum?.name).toBe('order_status');
|
||||
expect(orderStatusEnum?.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(orderStatusEnum?.values).toEqual([
|
||||
'pending',
|
||||
'confirmed',
|
||||
'shipped',
|
||||
'delivered',
|
||||
'cancelled',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle multiple schemas with same enum names', async () => {
|
||||
const dbmlWithSameEnumNames = `
|
||||
// Public schema status enum
|
||||
enum status {
|
||||
active
|
||||
inactive
|
||||
deleted
|
||||
}
|
||||
|
||||
// Admin schema status enum with different values
|
||||
enum admin.status {
|
||||
pending_approval
|
||||
approved
|
||||
rejected
|
||||
suspended
|
||||
}
|
||||
|
||||
Table public.users {
|
||||
id integer [pk]
|
||||
status status
|
||||
}
|
||||
|
||||
Table admin.users {
|
||||
id integer [pk]
|
||||
status admin.status
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlWithSameEnumNames);
|
||||
|
||||
// Verify both enums are created
|
||||
expect(diagram.customTypes).toHaveLength(2);
|
||||
|
||||
// Check public.status enum
|
||||
const publicStatusEnum = diagram.customTypes?.find(
|
||||
(ct) => ct.name === 'status' && !ct.schema
|
||||
);
|
||||
expect(publicStatusEnum).toBeDefined();
|
||||
expect(publicStatusEnum?.values).toEqual([
|
||||
'active',
|
||||
'inactive',
|
||||
'deleted',
|
||||
]);
|
||||
|
||||
// Check admin.status enum
|
||||
const adminStatusEnum = diagram.customTypes?.find(
|
||||
(ct) => ct.name === 'status' && ct.schema === 'admin'
|
||||
);
|
||||
expect(adminStatusEnum).toBeDefined();
|
||||
expect(adminStatusEnum?.values).toEqual([
|
||||
'pending_approval',
|
||||
'approved',
|
||||
'rejected',
|
||||
'suspended',
|
||||
]);
|
||||
|
||||
// Verify fields reference correct enums
|
||||
const publicUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'public'
|
||||
);
|
||||
const adminUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'admin'
|
||||
);
|
||||
|
||||
const publicStatusField = publicUsersTable?.fields.find(
|
||||
(f) => f.name === 'status'
|
||||
);
|
||||
const adminStatusField = adminUsersTable?.fields.find(
|
||||
(f) => f.name === 'status'
|
||||
);
|
||||
|
||||
expect(publicStatusField?.type.id).toBe('status');
|
||||
expect(adminStatusField?.type.id).toBe('status');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Special Features', () => {
|
||||
it('should handle tables with all DBML features', async () => {
|
||||
const edgeCaseDBML = `
|
||||
@@ -695,13 +918,34 @@ Note dragon_note {
|
||||
// Check that table header color was removed
|
||||
expect(hoardsTable).toBeDefined();
|
||||
|
||||
// Verify all indexes are imported correctly
|
||||
expect(hoardsTable?.indexes).toHaveLength(3); // Should have 3 indexes as defined in DBML
|
||||
|
||||
// Verify named indexes
|
||||
const uniqueDragonIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_unique_dragon'
|
||||
);
|
||||
expect(uniqueDragonIndex).toBeDefined();
|
||||
expect(uniqueDragonIndex?.name).toBe('idx_unique_dragon'); // Verify exact name from DBML
|
||||
expect(uniqueDragonIndex?.unique).toBe(true);
|
||||
expect(uniqueDragonIndex?.fieldIds).toHaveLength(1);
|
||||
|
||||
const hoardValueIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_hoard_value'
|
||||
);
|
||||
expect(hoardValueIndex).toBeDefined();
|
||||
expect(hoardValueIndex?.name).toBe('idx_hoard_value'); // Verify exact name from DBML
|
||||
expect(hoardValueIndex?.unique).toBe(false);
|
||||
expect(hoardValueIndex?.fieldIds).toHaveLength(1);
|
||||
|
||||
const dragonActiveIndex = hoardsTable?.indexes.find(
|
||||
(idx) => idx.name === 'idx_dragon_active'
|
||||
);
|
||||
expect(dragonActiveIndex).toBeDefined();
|
||||
expect(dragonActiveIndex?.name).toBe('idx_dragon_active'); // Verify exact name from DBML
|
||||
expect(dragonActiveIndex?.unique).toBe(false);
|
||||
expect(dragonActiveIndex?.fieldIds).toHaveLength(2);
|
||||
|
||||
// Check relationship
|
||||
expect(diagram.relationships).toHaveLength(1);
|
||||
const relationship = diagram.relationships?.[0];
|
||||
@@ -741,5 +985,306 @@ Table empty_table {
|
||||
expect(diagram.tables?.[0]?.fields).toHaveLength(1);
|
||||
expect(diagram.tables?.[0]?.name).toBe('empty_table');
|
||||
});
|
||||
|
||||
it('should import tables with same name but different schemas', async () => {
|
||||
const dbml = `
|
||||
Table "aa"."users" {
|
||||
id integer [primary key]
|
||||
}
|
||||
|
||||
Table "bb"."users" {
|
||||
id integer [primary key]
|
||||
}`;
|
||||
const diagram = await importDBMLToDiagram(dbml);
|
||||
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
|
||||
const aaUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'aa'
|
||||
);
|
||||
const bbUsersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'bb'
|
||||
);
|
||||
|
||||
expect(aaUsersTable).toBeDefined();
|
||||
expect(bbUsersTable).toBeDefined();
|
||||
|
||||
expect(aaUsersTable?.schema).toBe('aa');
|
||||
expect(bbUsersTable?.schema).toBe('bb');
|
||||
|
||||
expect(aaUsersTable?.fields).toHaveLength(1);
|
||||
expect(bbUsersTable?.fields).toHaveLength(1);
|
||||
|
||||
expect(aaUsersTable?.fields[0].name).toBe('id');
|
||||
expect(aaUsersTable?.fields[0].type.id).toBe('integer');
|
||||
expect(aaUsersTable?.fields[0].primaryKey).toBe(true);
|
||||
|
||||
expect(bbUsersTable?.fields[0].name).toBe('id');
|
||||
expect(bbUsersTable?.fields[0].type.id).toBe('integer');
|
||||
expect(bbUsersTable?.fields[0].primaryKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should import complex multi-schema DBML with inline refs and various indexes', async () => {
|
||||
// This test validates:
|
||||
// - 3 tables across different schemas (public, public_2, public_3)
|
||||
// - Table-level notes (Note: 'my comment' on users table)
|
||||
// - 3 indexes:
|
||||
// * Composite unique index: (content, user_id) on posts table
|
||||
// * Single non-unique index: created_at on posts table
|
||||
// * Single unique index: id on comments table
|
||||
// - 3 inline foreign key relationships:
|
||||
// * posts.user_id -> users.id
|
||||
// * comments.post_id -> posts.id
|
||||
// * comments.user_id -> users.id
|
||||
// - Quoted identifiers for all table and field names
|
||||
|
||||
const dbml = `
|
||||
Table "public"."users" {
|
||||
"id" varchar(500) [pk]
|
||||
"name" varchar(500)
|
||||
"email" varchar(500)
|
||||
Note: 'my comment'
|
||||
}
|
||||
|
||||
Table "public_2"."posts" {
|
||||
"id" varchar(500) [pk]
|
||||
"title" varchar(500)
|
||||
"content" text
|
||||
"user_id" varchar(500) [ref: < "public"."users"."id"]
|
||||
"created_at" timestamp
|
||||
|
||||
Indexes {
|
||||
(content, user_id) [unique, name: "public_2_content_user_id_idx"]
|
||||
created_at [name: "public_2_index_2"]
|
||||
}
|
||||
}
|
||||
|
||||
Table "public_3"."comments" {
|
||||
"id" varchar(500) [pk]
|
||||
"content" text
|
||||
"post_id" varchar(500) [ref: < "public_2"."posts"."id"]
|
||||
"user_id" varchar(500) [ref: < "public"."users"."id"]
|
||||
"created_at" timestamp
|
||||
|
||||
Indexes {
|
||||
id [unique, name: "public_3_index_1"]
|
||||
}
|
||||
}`;
|
||||
const diagram = await importDBMLToDiagram(dbml);
|
||||
|
||||
// Verify tables
|
||||
expect(diagram.tables).toHaveLength(3);
|
||||
|
||||
const usersTable = diagram.tables?.find(
|
||||
(t) => t.name === 'users' && t.schema === 'public'
|
||||
);
|
||||
const postsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'posts' && t.schema === 'public_2'
|
||||
);
|
||||
const commentsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'comments' && t.schema === 'public_3'
|
||||
);
|
||||
|
||||
expect(usersTable).toBeDefined();
|
||||
expect(postsTable).toBeDefined();
|
||||
expect(commentsTable).toBeDefined();
|
||||
|
||||
// Check users table
|
||||
expect(usersTable?.fields).toHaveLength(3);
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'id')?.primaryKey
|
||||
).toBe(true);
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'id')?.type.id
|
||||
).toBe('varchar');
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'name')?.type.id
|
||||
).toBe('varchar');
|
||||
expect(
|
||||
usersTable?.fields.find((f) => f.name === 'email')?.type.id
|
||||
).toBe('varchar');
|
||||
|
||||
// Check if table note is preserved
|
||||
expect(usersTable?.comments).toBe('my comment');
|
||||
|
||||
// Check posts table
|
||||
expect(postsTable?.fields).toHaveLength(5);
|
||||
expect(
|
||||
postsTable?.fields.find((f) => f.name === 'content')?.type.id
|
||||
).toBe('text');
|
||||
expect(
|
||||
postsTable?.fields.find((f) => f.name === 'created_at')?.type.id
|
||||
).toBe('timestamp');
|
||||
|
||||
// Check posts indexes thoroughly
|
||||
expect(postsTable?.indexes).toHaveLength(2);
|
||||
|
||||
// Index 1: Composite unique index on (content, user_id)
|
||||
const compositeIndex = postsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_2_content_user_id_idx'
|
||||
);
|
||||
expect(compositeIndex).toBeDefined();
|
||||
expect(compositeIndex?.name).toBe('public_2_content_user_id_idx'); // Verify exact name from DBML
|
||||
expect(compositeIndex?.unique).toBe(true);
|
||||
expect(compositeIndex?.fieldIds).toHaveLength(2);
|
||||
// Verify it includes the correct fields
|
||||
const contentFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'content'
|
||||
)?.id;
|
||||
const userIdFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'user_id'
|
||||
)?.id;
|
||||
expect(compositeIndex?.fieldIds).toContain(contentFieldId);
|
||||
expect(compositeIndex?.fieldIds).toContain(userIdFieldId);
|
||||
|
||||
// Index 2: Non-unique index on created_at
|
||||
const singleIndex = postsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_2_index_2'
|
||||
);
|
||||
expect(singleIndex).toBeDefined();
|
||||
expect(singleIndex?.name).toBe('public_2_index_2'); // Verify exact name from DBML
|
||||
expect(singleIndex?.unique).toBe(false);
|
||||
expect(singleIndex?.fieldIds).toHaveLength(1);
|
||||
const createdAtFieldId = postsTable?.fields.find(
|
||||
(f) => f.name === 'created_at'
|
||||
)?.id;
|
||||
expect(singleIndex?.fieldIds[0]).toBe(createdAtFieldId);
|
||||
|
||||
// Check comments table
|
||||
expect(commentsTable?.fields).toHaveLength(5);
|
||||
expect(commentsTable?.indexes).toHaveLength(1);
|
||||
|
||||
// Index: Unique index on id
|
||||
const idIndex = commentsTable?.indexes.find(
|
||||
(idx) => idx.name === 'public_3_index_1'
|
||||
);
|
||||
expect(idIndex).toBeDefined();
|
||||
expect(idIndex?.name).toBe('public_3_index_1'); // Verify exact name from DBML
|
||||
expect(idIndex?.unique).toBe(true);
|
||||
expect(idIndex?.fieldIds).toHaveLength(1);
|
||||
const idFieldId = commentsTable?.fields.find(
|
||||
(f) => f.name === 'id'
|
||||
)?.id;
|
||||
expect(idIndex?.fieldIds[0]).toBe(idFieldId);
|
||||
|
||||
// Verify relationships (inline refs should create relationships)
|
||||
// From DBML:
|
||||
// 1. posts.user_id -> users.id
|
||||
// 2. comments.post_id -> posts.id
|
||||
// 3. comments.user_id -> users.id
|
||||
expect(diagram.relationships).toHaveLength(3);
|
||||
|
||||
// Find relationships - check the actual field references
|
||||
const findRelationshipByFields = (
|
||||
sourceTableId: string,
|
||||
sourceFieldName: string,
|
||||
targetTableId: string,
|
||||
targetFieldName: string
|
||||
) => {
|
||||
const sourceField = diagram.tables
|
||||
?.find((t) => t.id === sourceTableId)
|
||||
?.fields.find((f) => f.name === sourceFieldName);
|
||||
const targetField = diagram.tables
|
||||
?.find((t) => t.id === targetTableId)
|
||||
?.fields.find((f) => f.name === targetFieldName);
|
||||
|
||||
return diagram.relationships?.find(
|
||||
(r) =>
|
||||
(r.sourceFieldId === sourceField?.id &&
|
||||
r.targetFieldId === targetField?.id) ||
|
||||
(r.sourceFieldId === targetField?.id &&
|
||||
r.targetFieldId === sourceField?.id)
|
||||
);
|
||||
};
|
||||
|
||||
// Relationship 1: posts.user_id -> users.id
|
||||
const postsUsersRel = findRelationshipByFields(
|
||||
postsTable!.id,
|
||||
'user_id',
|
||||
usersTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(postsUsersRel).toBeDefined();
|
||||
expect(postsUsersRel?.sourceSchema).toBeDefined();
|
||||
expect(postsUsersRel?.targetSchema).toBeDefined();
|
||||
|
||||
// Relationship 2: comments.post_id -> posts.id
|
||||
const commentsPostsRel = findRelationshipByFields(
|
||||
commentsTable!.id,
|
||||
'post_id',
|
||||
postsTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(commentsPostsRel).toBeDefined();
|
||||
|
||||
// Relationship 3: comments.user_id -> users.id
|
||||
const commentsUsersRel = findRelationshipByFields(
|
||||
commentsTable!.id,
|
||||
'user_id',
|
||||
usersTable!.id,
|
||||
'id'
|
||||
);
|
||||
expect(commentsUsersRel).toBeDefined();
|
||||
|
||||
// Verify all relationships have the expected cardinality
|
||||
// In DBML, inline refs create relationships where the referenced table (with PK)
|
||||
// is the "one" side and the referencing table (with FK) is the "many" side
|
||||
const allOneToMany = diagram.relationships?.every(
|
||||
(r) =>
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
);
|
||||
expect(allOneToMany).toBe(true);
|
||||
|
||||
// Verify schemas are preserved in relationships
|
||||
const relationshipsHaveSchemas = diagram.relationships?.every(
|
||||
(r) =>
|
||||
r.sourceSchema !== undefined && r.targetSchema !== undefined
|
||||
);
|
||||
expect(relationshipsHaveSchemas).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Notes Support', () => {
|
||||
it('should import table with note', async () => {
|
||||
const dbmlWithTableNote = `
|
||||
Table products {
|
||||
id integer [pk]
|
||||
name varchar(100)
|
||||
Note: 'This table stores product information'
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlWithTableNote);
|
||||
|
||||
expect(diagram.tables).toHaveLength(1);
|
||||
const productsTable = diagram.tables?.[0];
|
||||
expect(productsTable?.name).toBe('products');
|
||||
expect(productsTable?.comments).toBe(
|
||||
'This table stores product information'
|
||||
);
|
||||
});
|
||||
|
||||
it('should import field with note', async () => {
|
||||
const dbmlWithFieldNote = `
|
||||
Table orders {
|
||||
id integer [pk]
|
||||
total numeric(10,2) [note: 'Order total including tax']
|
||||
}`;
|
||||
|
||||
const diagram = await importDBMLToDiagram(dbmlWithFieldNote);
|
||||
|
||||
expect(diagram.tables).toHaveLength(1);
|
||||
const ordersTable = diagram.tables?.[0];
|
||||
expect(ordersTable?.fields).toHaveLength(2);
|
||||
|
||||
const totalField = ordersTable?.fields.find(
|
||||
(f) => f.name === 'total'
|
||||
);
|
||||
|
||||
// Field notes should be imported
|
||||
expect(totalField).toBeDefined();
|
||||
expect(totalField?.name).toBe('total');
|
||||
expect(totalField?.comments).toBe('Order total including tax');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
40
src/lib/dbml/dbml-import/dbml-import-error.ts
Normal file
40
src/lib/dbml/dbml-import/dbml-import-error.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
export interface DBMLError {
|
||||
message: string;
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
export function parseDBMLError(error: unknown): DBMLError | null {
|
||||
try {
|
||||
if (typeof error === 'string') {
|
||||
const parsed = JSON.parse(error);
|
||||
if (parsed.diags?.[0]) {
|
||||
const diag = parsed.diags[0];
|
||||
|
||||
return {
|
||||
message: diag.message,
|
||||
line: diag.location.start.line,
|
||||
column: diag.location.start.column,
|
||||
};
|
||||
}
|
||||
} else if (error && typeof error === 'object' && 'diags' in error) {
|
||||
const parsed = error as {
|
||||
diags: Array<{
|
||||
message: string;
|
||||
location: { start: { line: number; column: number } };
|
||||
}>;
|
||||
};
|
||||
if (parsed.diags?.[0]) {
|
||||
return {
|
||||
message: parsed.diags[0].message,
|
||||
line: parsed.diags[0].location.start.line,
|
||||
column: parsed.diags[0].location.start.column,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing DBML error:', e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -4,10 +4,16 @@ import { generateDiagramId, generateId } from '@/lib/utils';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { Cardinality, DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import { genericDataTypes } from '@/lib/data/data-types/generic-data-types';
|
||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
import { findDataTypeDataById } from '@/lib/data/data-types/data-types';
|
||||
import { randomColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type Field from '@dbml/core/types/model_structure/field';
|
||||
import type { DBIndex } from '@/lib/domain';
|
||||
import {
|
||||
DBCustomTypeKind,
|
||||
type DBCustomType,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
|
||||
// Preprocess DBML to handle unsupported features
|
||||
export const preprocessDBML = (content: string): string => {
|
||||
@@ -19,8 +25,8 @@ export const preprocessDBML = (content: string): string => {
|
||||
// Remove Note blocks
|
||||
processed = processed.replace(/Note\s+\w+\s*\{[^}]*\}/gs, '');
|
||||
|
||||
// Remove enum definitions (blocks)
|
||||
processed = processed.replace(/enum\s+\w+\s*\{[^}]*\}/gs, '');
|
||||
// Don't remove enum definitions - we'll parse them
|
||||
// processed = processed.replace(/enum\s+\w+\s*\{[^}]*\}/gs, '');
|
||||
|
||||
// Handle array types by converting them to text
|
||||
processed = processed.replace(/(\w+)\[\]/g, 'text');
|
||||
@@ -32,8 +38,9 @@ export const preprocessDBML = (content: string): string => {
|
||||
);
|
||||
|
||||
// Handle Table headers with color attributes
|
||||
// This regex handles both simple table names and schema.table patterns with quotes
|
||||
processed = processed.replace(
|
||||
/Table\s+(\w+)\s*\[[^\]]*\]\s*\{/g,
|
||||
/Table\s+((?:"[^"]+"\."[^"]+")|(?:\w+))\s*\[[^\]]*\]\s*\{/g,
|
||||
'Table $1 {'
|
||||
);
|
||||
|
||||
@@ -76,6 +83,10 @@ interface DBMLField {
|
||||
pk?: boolean;
|
||||
not_null?: boolean;
|
||||
increment?: boolean;
|
||||
characterMaximumLength?: string | null;
|
||||
precision?: number | null;
|
||||
scale?: number | null;
|
||||
note?: string | { value: string } | null;
|
||||
}
|
||||
|
||||
interface DBMLIndexColumn {
|
||||
@@ -86,7 +97,7 @@ interface DBMLIndexColumn {
|
||||
}
|
||||
|
||||
interface DBMLIndex {
|
||||
columns: string | (string | DBMLIndexColumn)[];
|
||||
columns: (string | DBMLIndexColumn)[];
|
||||
unique?: boolean;
|
||||
name?: string;
|
||||
}
|
||||
@@ -96,6 +107,7 @@ interface DBMLTable {
|
||||
schema?: string | { name: string };
|
||||
fields: DBMLField[];
|
||||
indexes?: DBMLIndex[];
|
||||
note?: string | { value: string } | null;
|
||||
}
|
||||
|
||||
interface DBMLEndpoint {
|
||||
@@ -108,32 +120,51 @@ interface DBMLRef {
|
||||
endpoints: [DBMLEndpoint, DBMLEndpoint];
|
||||
}
|
||||
|
||||
const mapDBMLTypeToGenericType = (dbmlType: string): DataType => {
|
||||
interface DBMLEnum {
|
||||
name: string;
|
||||
schema?: string | { name: string };
|
||||
values: Array<{ name: string; note?: string }>;
|
||||
note?: string | { value: string } | null;
|
||||
}
|
||||
|
||||
const mapDBMLTypeToDataType = (
|
||||
dbmlType: string,
|
||||
options?: { databaseType?: DatabaseType; enums?: DBMLEnum[] }
|
||||
): DataTypeData => {
|
||||
const normalizedType = dbmlType.toLowerCase().replace(/\(.*\)/, '');
|
||||
const matchedType = genericDataTypes.find((t) => t.id === normalizedType);
|
||||
if (matchedType) return matchedType;
|
||||
const typeMap: Record<string, string> = {
|
||||
int: 'integer',
|
||||
varchar: 'varchar',
|
||||
bool: 'boolean',
|
||||
number: 'numeric',
|
||||
string: 'varchar',
|
||||
text: 'text',
|
||||
timestamp: 'timestamp',
|
||||
datetime: 'timestamp',
|
||||
float: 'float',
|
||||
double: 'double',
|
||||
decimal: 'decimal',
|
||||
bigint: 'bigint',
|
||||
smallint: 'smallint',
|
||||
char: 'char',
|
||||
};
|
||||
const mappedType = typeMap[normalizedType];
|
||||
if (mappedType) {
|
||||
const foundType = genericDataTypes.find((t) => t.id === mappedType);
|
||||
if (foundType) return foundType;
|
||||
|
||||
// Check if it's an enum type
|
||||
if (options?.enums) {
|
||||
const enumDef = options.enums.find((e) => {
|
||||
// Check both with and without schema prefix
|
||||
const enumName = e.name.toLowerCase();
|
||||
const enumFullName = e.schema
|
||||
? `${e.schema}.${enumName}`
|
||||
: enumName;
|
||||
return (
|
||||
normalizedType === enumName || normalizedType === enumFullName
|
||||
);
|
||||
});
|
||||
|
||||
if (enumDef) {
|
||||
// Return enum as custom type reference
|
||||
return {
|
||||
id: enumDef.name,
|
||||
name: enumDef.name,
|
||||
} satisfies DataTypeData;
|
||||
}
|
||||
}
|
||||
return genericDataTypes.find((t) => t.id === 'varchar')!;
|
||||
|
||||
const matchedType = findDataTypeDataById(
|
||||
normalizedType,
|
||||
options?.databaseType
|
||||
);
|
||||
if (matchedType) return matchedType;
|
||||
|
||||
return {
|
||||
id: normalizedType.split(' ').join('_').toLowerCase(),
|
||||
name: normalizedType,
|
||||
} satisfies DataTypeData;
|
||||
};
|
||||
|
||||
const determineCardinality = (
|
||||
@@ -154,7 +185,10 @@ const determineCardinality = (
|
||||
};
|
||||
|
||||
export const importDBMLToDiagram = async (
|
||||
dbmlContent: string
|
||||
dbmlContent: string,
|
||||
options?: {
|
||||
databaseType?: DatabaseType;
|
||||
}
|
||||
): Promise<Diagram> => {
|
||||
try {
|
||||
// Handle empty content
|
||||
@@ -162,7 +196,7 @@ export const importDBMLToDiagram = async (
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
databaseType: DatabaseType.GENERIC,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
createdAt: new Date(),
|
||||
@@ -180,7 +214,7 @@ export const importDBMLToDiagram = async (
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
databaseType: DatabaseType.GENERIC,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
createdAt: new Date(),
|
||||
@@ -189,14 +223,13 @@ export const importDBMLToDiagram = async (
|
||||
}
|
||||
|
||||
const parsedData = parser.parse(sanitizedContent, 'dbml');
|
||||
const dbmlData = parsedData.schemas[0];
|
||||
|
||||
// Handle case where no schema is found
|
||||
if (!dbmlData || !dbmlData.tables) {
|
||||
// Handle case where no schemas are found
|
||||
if (!parsedData.schemas || parsedData.schemas.length === 0) {
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
databaseType: DatabaseType.GENERIC,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
createdAt: new Date(),
|
||||
@@ -204,71 +237,186 @@ export const importDBMLToDiagram = async (
|
||||
};
|
||||
}
|
||||
|
||||
// Extract only the necessary data from the parsed DBML
|
||||
const extractedData = {
|
||||
tables: (dbmlData.tables as unknown as DBMLTable[]).map(
|
||||
(table) => ({
|
||||
name: table.name,
|
||||
schema: table.schema,
|
||||
fields: table.fields.map((field: DBMLField) => ({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
unique: field.unique,
|
||||
pk: field.pk,
|
||||
not_null: field.not_null,
|
||||
increment: field.increment,
|
||||
})),
|
||||
indexes:
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
let indexColumns: string[];
|
||||
// Process all schemas, not just the first one
|
||||
const allTables: DBMLTable[] = [];
|
||||
const allRefs: DBMLRef[] = [];
|
||||
const allEnums: DBMLEnum[] = [];
|
||||
|
||||
// Handle composite index case "(col1, col2)"
|
||||
if (typeof dbmlIndex.columns === 'string') {
|
||||
if (dbmlIndex.columns.includes('(')) {
|
||||
// Composite index
|
||||
const columnsStr =
|
||||
dbmlIndex.columns.replace(/[()]/g, '');
|
||||
indexColumns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim());
|
||||
} else {
|
||||
// Single column
|
||||
indexColumns = [dbmlIndex.columns.trim()];
|
||||
}
|
||||
} else {
|
||||
// Handle array of columns
|
||||
indexColumns = Array.isArray(dbmlIndex.columns)
|
||||
? dbmlIndex.columns.map((col) =>
|
||||
typeof col === 'object' &&
|
||||
'value' in col
|
||||
? (col.value as string).trim()
|
||||
: (col as string).trim()
|
||||
)
|
||||
: [String(dbmlIndex.columns).trim()];
|
||||
}
|
||||
const getFieldExtraAttributes = (
|
||||
field: Field,
|
||||
enums: DBMLEnum[]
|
||||
): Partial<DBMLField> => {
|
||||
if (!field.type || !field.type.args) {
|
||||
return {};
|
||||
}
|
||||
|
||||
// Generate a consistent index name
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
const args = field.type.args.split(',') as string[];
|
||||
|
||||
const dataType = mapDBMLTypeToDataType(field.type.type_name, {
|
||||
...options,
|
||||
enums,
|
||||
});
|
||||
|
||||
if (dataType.fieldAttributes?.hasCharMaxLength) {
|
||||
const charMaxLength = args?.[0];
|
||||
return {
|
||||
characterMaximumLength: charMaxLength,
|
||||
};
|
||||
} else if (
|
||||
dataType.fieldAttributes?.precision &&
|
||||
dataType.fieldAttributes?.scale
|
||||
) {
|
||||
const precisionNum = args?.[0] ? parseInt(args[0]) : undefined;
|
||||
const scaleNum = args?.[1] ? parseInt(args[1]) : undefined;
|
||||
|
||||
const precision = precisionNum
|
||||
? isNaN(precisionNum)
|
||||
? undefined
|
||||
: precisionNum
|
||||
: undefined;
|
||||
|
||||
const scale = scaleNum
|
||||
? isNaN(scaleNum)
|
||||
? undefined
|
||||
: scaleNum
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
precision,
|
||||
scale,
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
parsedData.schemas.forEach((schema) => {
|
||||
if (schema.tables) {
|
||||
schema.tables.forEach((table) => {
|
||||
// For tables with explicit schema, use the schema name
|
||||
// For tables without explicit schema, use empty string
|
||||
const schemaName =
|
||||
typeof table.schema === 'string'
|
||||
? table.schema
|
||||
: table.schema?.name || '';
|
||||
|
||||
allTables.push({
|
||||
name: table.name,
|
||||
schema: schemaName,
|
||||
note: table.note,
|
||||
fields: table.fields.map((field): DBMLField => {
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
};
|
||||
}) || [],
|
||||
})
|
||||
),
|
||||
refs: (dbmlData.refs as unknown as DBMLRef[]).map((ref) => ({
|
||||
endpoints: (ref.endpoints as [DBMLEndpoint, DBMLEndpoint]).map(
|
||||
(endpoint) => ({
|
||||
tableName: endpoint.tableName,
|
||||
fieldNames: endpoint.fieldNames,
|
||||
relation: endpoint.relation,
|
||||
})
|
||||
),
|
||||
})),
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
unique: field.unique,
|
||||
pk: field.pk,
|
||||
not_null: field.not_null,
|
||||
increment: field.increment,
|
||||
note: field.note,
|
||||
...getFieldExtraAttributes(field, allEnums),
|
||||
} satisfies DBMLField;
|
||||
}),
|
||||
indexes:
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
let indexColumns: string[];
|
||||
|
||||
// Handle both string and array formats
|
||||
if (typeof dbmlIndex.columns === 'string') {
|
||||
// Handle composite index case "(col1, col2)"
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
if (dbmlIndex.columns.includes('(')) {
|
||||
const columnsStr: string =
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
dbmlIndex.columns.replace(
|
||||
/[()]/g,
|
||||
''
|
||||
);
|
||||
indexColumns = columnsStr
|
||||
.split(',')
|
||||
.map((c) => c.trim());
|
||||
} else {
|
||||
// Single column as string
|
||||
|
||||
indexColumns = [
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
dbmlIndex.columns.trim(),
|
||||
];
|
||||
}
|
||||
} else {
|
||||
// Handle array of columns
|
||||
indexColumns = dbmlIndex.columns.map(
|
||||
(col) => {
|
||||
if (typeof col === 'string') {
|
||||
// @ts-expect-error "columns" can be a string in some DBML versions
|
||||
return col.trim();
|
||||
} else if (
|
||||
typeof col === 'object' &&
|
||||
'value' in col
|
||||
) {
|
||||
return col.value.trim();
|
||||
} else {
|
||||
return String(col).trim();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Generate a consistent index name
|
||||
const indexName =
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${indexColumns.join('_')}`;
|
||||
|
||||
return {
|
||||
columns: indexColumns,
|
||||
unique: dbmlIndex.unique || false,
|
||||
name: indexName,
|
||||
};
|
||||
}) || [],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (schema.refs) {
|
||||
schema.refs.forEach((ref) => {
|
||||
// Convert the ref to ensure it has exactly two endpoints
|
||||
if (ref.endpoints && ref.endpoints.length >= 2) {
|
||||
allRefs.push({
|
||||
endpoints: [ref.endpoints[0], ref.endpoints[1]] as [
|
||||
DBMLEndpoint,
|
||||
DBMLEndpoint,
|
||||
],
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (schema.enums) {
|
||||
schema.enums.forEach((enumDef) => {
|
||||
// Get schema name from enum or use schema's name
|
||||
const enumSchema =
|
||||
typeof enumDef.schema === 'string'
|
||||
? enumDef.schema
|
||||
: enumDef.schema?.name || schema.name;
|
||||
|
||||
allEnums.push({
|
||||
name: enumDef.name,
|
||||
schema: enumSchema === 'public' ? '' : enumSchema,
|
||||
values: enumDef.values || [],
|
||||
note: enumDef.note,
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Extract only the necessary data from the parsed DBML
|
||||
const extractedData: {
|
||||
tables: DBMLTable[];
|
||||
refs: DBMLRef[];
|
||||
enums: DBMLEnum[];
|
||||
} = {
|
||||
tables: allTables,
|
||||
refs: allRefs,
|
||||
enums: allEnums,
|
||||
};
|
||||
|
||||
// Convert DBML tables to ChartDB table objects
|
||||
@@ -278,18 +426,40 @@ export const importDBMLToDiagram = async (
|
||||
const tableSpacing = 300;
|
||||
|
||||
// Create fields first so we have their IDs
|
||||
const fields = table.fields.map((field) => ({
|
||||
id: generateId(),
|
||||
name: field.name.replace(/['"]/g, ''),
|
||||
type: mapDBMLTypeToGenericType(field.type.type_name),
|
||||
nullable: !field.not_null,
|
||||
primaryKey: field.pk || false,
|
||||
unique: field.unique || false,
|
||||
createdAt: Date.now(),
|
||||
}));
|
||||
const fields: DBField[] = table.fields.map((field) => {
|
||||
// Extract field note/comment
|
||||
let fieldComment: string | undefined;
|
||||
if (field.note) {
|
||||
if (typeof field.note === 'string') {
|
||||
fieldComment = field.note;
|
||||
} else if (
|
||||
typeof field.note === 'object' &&
|
||||
'value' in field.note
|
||||
) {
|
||||
fieldComment = field.note.value;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: field.name.replace(/['"]/g, ''),
|
||||
type: mapDBMLTypeToDataType(field.type.type_name, {
|
||||
...options,
|
||||
enums: extractedData.enums,
|
||||
}),
|
||||
nullable: !field.not_null,
|
||||
primaryKey: field.pk || false,
|
||||
unique: field.unique || false,
|
||||
createdAt: Date.now(),
|
||||
characterMaximumLength: field.characterMaximumLength,
|
||||
precision: field.precision,
|
||||
scale: field.scale,
|
||||
...(fieldComment ? { comments: fieldComment } : {}),
|
||||
};
|
||||
});
|
||||
|
||||
// Convert DBML indexes to ChartDB indexes
|
||||
const indexes =
|
||||
const indexes: DBIndex[] =
|
||||
table.indexes?.map((dbmlIndex) => {
|
||||
const fieldIds = dbmlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
@@ -305,13 +475,26 @@ export const importDBMLToDiagram = async (
|
||||
id: generateId(),
|
||||
name:
|
||||
dbmlIndex.name ||
|
||||
`idx_${table.name}_${dbmlIndex.columns.join('_')}`,
|
||||
`idx_${table.name}_${(dbmlIndex.columns as string[]).join('_')}`,
|
||||
fieldIds,
|
||||
unique: dbmlIndex.unique || false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
// Extract table note/comment
|
||||
let tableComment: string | undefined;
|
||||
if (table.note) {
|
||||
if (typeof table.note === 'string') {
|
||||
tableComment = table.note;
|
||||
} else if (
|
||||
typeof table.note === 'object' &&
|
||||
'value' in table.note
|
||||
) {
|
||||
tableComment = table.note.value;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: table.name.replace(/['"]/g, ''),
|
||||
@@ -327,7 +510,8 @@ export const importDBMLToDiagram = async (
|
||||
color: randomColor(),
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
comments: tableComment,
|
||||
} as DBTable;
|
||||
});
|
||||
|
||||
// Create relationships using the refs
|
||||
@@ -381,12 +565,43 @@ export const importDBMLToDiagram = async (
|
||||
}
|
||||
);
|
||||
|
||||
// Convert DBML enums to custom types
|
||||
const customTypes: DBCustomType[] = extractedData.enums.map(
|
||||
(enumDef) => {
|
||||
// Extract values from enum
|
||||
const values = enumDef.values
|
||||
.map((v) => {
|
||||
// Handle both string values and objects with name property
|
||||
if (typeof v === 'string') {
|
||||
return v;
|
||||
} else if (v && typeof v === 'object' && 'name' in v) {
|
||||
return v.name.replace(/["']/g, ''); // Remove quotes from values
|
||||
}
|
||||
return '';
|
||||
})
|
||||
.filter((v) => v !== '');
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
schema:
|
||||
typeof enumDef.schema === 'string'
|
||||
? enumDef.schema
|
||||
: undefined,
|
||||
name: enumDef.name,
|
||||
kind: DBCustomTypeKind.enum,
|
||||
values,
|
||||
order: 0,
|
||||
} satisfies DBCustomType;
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
id: generateDiagramId(),
|
||||
name: 'DBML Import',
|
||||
databaseType: DatabaseType.GENERIC,
|
||||
databaseType: options?.databaseType ?? DatabaseType.GENERIC,
|
||||
tables,
|
||||
relationships,
|
||||
customTypes,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { z } from 'zod';
|
||||
import { dataTypeSchema, type DataType } from '../data/data-types/data-types';
|
||||
import {
|
||||
dataTypeSchema,
|
||||
findDataTypeDataById,
|
||||
type DataType,
|
||||
} from '../data/data-types/data-types';
|
||||
import type { ColumnInfo } from '../data/import-metadata/metadata-types/column-info';
|
||||
import type { AggregatedIndexInfo } from '../data/import-metadata/metadata-types/index-info';
|
||||
import type { PrimaryKeyInfo } from '../data/import-metadata/metadata-types/primary-key-info';
|
||||
import type { TableInfo } from '../data/import-metadata/metadata-types/table-info';
|
||||
import { generateId } from '../utils';
|
||||
import type { DatabaseType } from './database-type';
|
||||
|
||||
export interface DBField {
|
||||
id: string;
|
||||
@@ -97,3 +102,80 @@ export const createFieldsFromMetadata = ({
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const generateDBFieldSuffix = (
|
||||
field: DBField,
|
||||
{
|
||||
databaseType,
|
||||
forceExtended = false,
|
||||
typeId,
|
||||
}: {
|
||||
databaseType?: DatabaseType;
|
||||
forceExtended?: boolean;
|
||||
typeId?: string;
|
||||
} = {}
|
||||
): string => {
|
||||
if (databaseType && forceExtended && typeId) {
|
||||
return generateExtendedSuffix(field, databaseType, typeId);
|
||||
}
|
||||
|
||||
return generateStandardSuffix(field);
|
||||
};
|
||||
|
||||
const generateExtendedSuffix = (
|
||||
field: DBField,
|
||||
databaseType: DatabaseType,
|
||||
typeId: string
|
||||
): string => {
|
||||
const type = findDataTypeDataById(typeId, databaseType);
|
||||
|
||||
if (!type?.fieldAttributes) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const { fieldAttributes } = type;
|
||||
|
||||
// Character maximum length types (e.g., VARCHAR)
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
const maxLength = field.characterMaximumLength ?? 'n';
|
||||
return `(${maxLength})`;
|
||||
}
|
||||
|
||||
// Precision and scale types (e.g., DECIMAL)
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return formatPrecisionAndScale(field.precision, field.scale, '(p, s)');
|
||||
}
|
||||
|
||||
// Precision only types (e.g., FLOAT)
|
||||
if (fieldAttributes.precision) {
|
||||
const precision = field.precision ?? 'p';
|
||||
return `(${precision})`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
const generateStandardSuffix = (field: DBField): string => {
|
||||
// Character maximum length
|
||||
if (field.characterMaximumLength) {
|
||||
return `(${field.characterMaximumLength})`;
|
||||
}
|
||||
|
||||
return formatPrecisionAndScale(field.precision, field.scale, '');
|
||||
};
|
||||
|
||||
const formatPrecisionAndScale = (
|
||||
precision: number | null | undefined,
|
||||
scale: number | null | undefined,
|
||||
fallback: string
|
||||
): string => {
|
||||
if (precision && scale) {
|
||||
return `(${precision}, ${scale})`;
|
||||
}
|
||||
|
||||
if (precision) {
|
||||
return `(${precision})`;
|
||||
}
|
||||
|
||||
return fallback;
|
||||
};
|
||||
|
||||
@@ -108,7 +108,7 @@ export const loadFromDatabaseMetadata = async ({
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
const diagram = {
|
||||
const diagram: Diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: databaseMetadata.database_name
|
||||
? `${databaseMetadata.database_name}-db`
|
||||
|
||||
@@ -329,6 +329,27 @@ function compareFieldProperties({
|
||||
changedAttributes.push('comments');
|
||||
}
|
||||
|
||||
if (
|
||||
(newField.characterMaximumLength || oldField.characterMaximumLength) &&
|
||||
oldField.characterMaximumLength !== newField.characterMaximumLength
|
||||
) {
|
||||
changedAttributes.push('characterMaximumLength');
|
||||
}
|
||||
|
||||
if (
|
||||
(newField.scale || oldField.scale) &&
|
||||
oldField.scale !== newField.scale
|
||||
) {
|
||||
changedAttributes.push('scale');
|
||||
}
|
||||
|
||||
if (
|
||||
(newField.precision || oldField.precision) &&
|
||||
oldField.precision !== newField.precision
|
||||
) {
|
||||
changedAttributes.push('precision');
|
||||
}
|
||||
|
||||
if (changedAttributes.length > 0) {
|
||||
for (const attribute of changedAttributes) {
|
||||
diffMap.set(
|
||||
|
||||
@@ -12,7 +12,10 @@ export type FieldDiffAttribute =
|
||||
| 'primaryKey'
|
||||
| 'unique'
|
||||
| 'nullable'
|
||||
| 'comments';
|
||||
| 'comments'
|
||||
| 'characterMaximumLength'
|
||||
| 'precision'
|
||||
| 'scale';
|
||||
|
||||
export const fieldDiffAttributeSchema: z.ZodType<FieldDiffAttribute> = z.union([
|
||||
z.literal('name'),
|
||||
@@ -61,8 +64,8 @@ export interface FieldDiffChanged {
|
||||
fieldId: string;
|
||||
tableId: string;
|
||||
attribute: FieldDiffAttribute;
|
||||
oldValue: string | boolean | DataType;
|
||||
newValue: string | boolean | DataType;
|
||||
oldValue: string | boolean | DataType | number;
|
||||
newValue: string | boolean | DataType | number;
|
||||
}
|
||||
|
||||
export const fieldDiffChangedSchema: z.ZodType<FieldDiffChanged> = z.object({
|
||||
|
||||
@@ -80,7 +80,7 @@ export const AreaNode: React.FC<NodeProps<AreaNodeType>> = React.memo(
|
||||
<NodeResizer
|
||||
isVisible={focused}
|
||||
lineClassName="!border-4 !border-transparent"
|
||||
handleClassName="!h-[18px] !w-[18px] !rounded-full !bg-pink-600"
|
||||
handleClassName="!h-[10px] !w-[10px] !rounded-full !bg-pink-600"
|
||||
minHeight={100}
|
||||
minWidth={100}
|
||||
/>
|
||||
|
||||
@@ -82,13 +82,15 @@ export const CanvasContextMenu: React.FC<React.PropsWithChildren> = ({
|
||||
openCreateRelationshipDialog();
|
||||
}, [openCreateRelationshipDialog]);
|
||||
|
||||
if (!isDesktop || readonly) {
|
||||
if (!isDesktop) {
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
return (
|
||||
<ContextMenu>
|
||||
<ContextMenuTrigger>{children}</ContextMenuTrigger>
|
||||
<ContextMenuTrigger disabled={readonly}>
|
||||
{children}
|
||||
</ContextMenuTrigger>
|
||||
<ContextMenuContent>
|
||||
<ContextMenuItem
|
||||
onClick={createTableHandler}
|
||||
|
||||
@@ -16,6 +16,7 @@ import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import { TreeView } from '@/components/tree-view/tree-view';
|
||||
import type { TreeNode } from '@/components/tree-view/tree';
|
||||
import { ScrollArea } from '@/components/scroll-area/scroll-area';
|
||||
|
||||
export interface CanvasFilterProps {
|
||||
onClose: () => void;
|
||||
@@ -405,7 +406,7 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
</div>
|
||||
|
||||
{/* Table Tree */}
|
||||
<div className="flex-1 overflow-y-auto rounded-b-lg">
|
||||
<ScrollArea className="flex-1 rounded-b-lg" type="auto">
|
||||
<TreeView
|
||||
data={filteredTreeData}
|
||||
onNodeClick={handleNodeClick}
|
||||
@@ -416,7 +417,7 @@ export const CanvasFilter: React.FC<CanvasFilterProps> = ({ onClose }) => {
|
||||
setExpanded={setExpanded}
|
||||
className="py-2"
|
||||
/>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -40,7 +40,13 @@ import {
|
||||
} from './table-node/table-node-field';
|
||||
import { Toolbar } from './toolbar/toolbar';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { Pencil, LayoutGrid, AlertTriangle, Magnet } from 'lucide-react';
|
||||
import {
|
||||
Pencil,
|
||||
LayoutGrid,
|
||||
AlertTriangle,
|
||||
Magnet,
|
||||
Highlighter,
|
||||
} from 'lucide-react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { useLayout } from '@/hooks/use-layout';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
@@ -86,6 +92,8 @@ import type { Area } from '@/lib/domain/area';
|
||||
import { updateTablesParentAreas, getTablesInArea } from './area-utils';
|
||||
import { CanvasFilter } from './canvas-filter/canvas-filter';
|
||||
import { useHotkeys } from 'react-hotkeys-hook';
|
||||
import { ShowAllButton } from './show-all-button';
|
||||
import { useIsLostInCanvas } from './hooks/use-is-lost-in-canvas';
|
||||
|
||||
const HIGHLIGHTED_EDGE_Z_INDEX = 1;
|
||||
const DEFAULT_EDGE_Z_INDEX = 0;
|
||||
@@ -158,6 +166,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
>([]);
|
||||
const { toast } = useToast();
|
||||
const { t } = useTranslation();
|
||||
const { isLostInCanvas } = useIsLostInCanvas();
|
||||
const {
|
||||
tables,
|
||||
areas,
|
||||
@@ -175,6 +184,8 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
readonly,
|
||||
removeArea,
|
||||
updateArea,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
hiddenTableIds,
|
||||
} = useChartDB();
|
||||
const { showSidePanel } = useLayout();
|
||||
@@ -385,12 +396,22 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
hiddenTableIds,
|
||||
});
|
||||
|
||||
// Check if table uses the highlighted custom type
|
||||
let hasHighlightedCustomType = false;
|
||||
if (highlightedCustomType) {
|
||||
hasHighlightedCustomType = table.fields.some(
|
||||
(field) =>
|
||||
field.type.name === highlightedCustomType.name
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
},
|
||||
};
|
||||
}),
|
||||
@@ -413,6 +434,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
overlapGraph.lastUpdated,
|
||||
overlapGraph.graph,
|
||||
highlightOverlappingTables,
|
||||
highlightedCustomType,
|
||||
]);
|
||||
|
||||
const prevFilteredSchemas = useRef<string[] | undefined>(undefined);
|
||||
@@ -453,6 +475,7 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
const oldTable = tables[index];
|
||||
if (
|
||||
oldTable &&
|
||||
(!!newTable.parentAreaId || !!oldTable.parentAreaId) &&
|
||||
newTable.parentAreaId !== oldTable.parentAreaId
|
||||
) {
|
||||
needsUpdate.push({
|
||||
@@ -990,6 +1013,21 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
overlapGraph
|
||||
);
|
||||
setOverlapGraph(newOverlappingGraph);
|
||||
|
||||
setTimeout(() => {
|
||||
setNodes((prevNodes) =>
|
||||
prevNodes.map((n) => {
|
||||
if (n.id === event.data.id) {
|
||||
return {
|
||||
...n,
|
||||
measured,
|
||||
};
|
||||
}
|
||||
|
||||
return n;
|
||||
})
|
||||
);
|
||||
}, 0);
|
||||
} else if (
|
||||
event.action === 'add_field' ||
|
||||
event.action === 'remove_field'
|
||||
@@ -1029,7 +1067,14 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
setOverlapGraph(overlappingTablesInDiagram);
|
||||
}
|
||||
},
|
||||
[overlapGraph, setOverlapGraph, getNode, nodes, filteredSchemas]
|
||||
[
|
||||
overlapGraph,
|
||||
setOverlapGraph,
|
||||
getNode,
|
||||
nodes,
|
||||
filteredSchemas,
|
||||
setNodes,
|
||||
]
|
||||
);
|
||||
|
||||
events.useSubscription(eventConsumer);
|
||||
@@ -1162,6 +1207,34 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
})}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
{highlightedCustomType ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="size-8 border border-yellow-400 bg-yellow-200 p-1 shadow-none hover:bg-yellow-300 dark:border-yellow-700 dark:bg-yellow-800 dark:hover:bg-yellow-700"
|
||||
onClick={() =>
|
||||
highlightCustomTypeId(
|
||||
undefined
|
||||
)
|
||||
}
|
||||
>
|
||||
<Highlighter className="size-4" />
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'toolbar.custom_type_highlight_tooltip',
|
||||
{
|
||||
typeName:
|
||||
highlightedCustomType.name,
|
||||
}
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : null}
|
||||
</>
|
||||
) : null}
|
||||
|
||||
@@ -1228,6 +1301,25 @@ export const Canvas: React.FC<CanvasProps> = ({ initialTables }) => {
|
||||
</Button>
|
||||
</Controls>
|
||||
) : null}
|
||||
{isLostInCanvas ? (
|
||||
<Controls
|
||||
position={
|
||||
isDesktop ? 'bottom-center' : 'top-center'
|
||||
}
|
||||
orientation="horizontal"
|
||||
showZoom={false}
|
||||
showFitView={false}
|
||||
showInteractive={false}
|
||||
className="!shadow-none"
|
||||
style={{
|
||||
[isDesktop ? 'bottom' : 'top']: isDesktop
|
||||
? '70px'
|
||||
: '70px',
|
||||
}}
|
||||
>
|
||||
<ShowAllButton />
|
||||
</Controls>
|
||||
) : null}
|
||||
<Controls
|
||||
position={isDesktop ? 'bottom-center' : 'top-center'}
|
||||
orientation="horizontal"
|
||||
|
||||
65
src/pages/editor-page/canvas/show-all-button.tsx
Normal file
65
src/pages/editor-page/canvas/show-all-button.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Info } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
|
||||
export interface ShowAllButtonProps {}
|
||||
|
||||
export const ShowAllButton: React.FC<ShowAllButtonProps> = () => {
|
||||
const { fitView } = useCanvas();
|
||||
const [visible, setVisible] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => {
|
||||
setVisible(true);
|
||||
}, 300);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, []);
|
||||
|
||||
const showAll = useCallback(() => {
|
||||
fitView({
|
||||
duration: 500,
|
||||
padding: 0.1,
|
||||
maxZoom: 0.8,
|
||||
});
|
||||
}, [fitView]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'transition-all duration-300 ease-in-out',
|
||||
visible
|
||||
? 'translate-y-0 opacity-100'
|
||||
: 'pointer-events-none translate-y-4 opacity-0'
|
||||
)}
|
||||
>
|
||||
<div className="sm:hidden">
|
||||
<Button
|
||||
onClick={showAll}
|
||||
size="sm"
|
||||
className="h-fit rounded-lg bg-slate-900 px-4 py-1.5 text-xs text-white shadow-lg hover:bg-slate-800 dark:bg-slate-700 dark:hover:bg-slate-600"
|
||||
>
|
||||
Show All
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="hidden items-center gap-2 rounded-lg bg-slate-900 px-3 py-2 shadow-lg sm:flex">
|
||||
<div className="flex size-6 items-center justify-center rounded-full bg-pink-600">
|
||||
<Info className="size-4 text-white" />
|
||||
</div>
|
||||
<span className="text-sm text-white">
|
||||
Your content is out of view
|
||||
</span>
|
||||
<Button
|
||||
onClick={showAll}
|
||||
size="sm"
|
||||
className="ml-2 h-fit rounded-lg bg-slate-700 px-4 py-1.5 text-xs text-white hover:bg-slate-600 dark:hover:bg-slate-800"
|
||||
>
|
||||
Show All
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
SquarePlus,
|
||||
Trash2,
|
||||
} from 'lucide-react';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
@@ -32,6 +32,7 @@ import {
|
||||
import { useClickAway, useKeyPressEvent } from 'react-use';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
|
||||
export const LEFT_HANDLE_ID_PREFIX = 'left_rel_';
|
||||
export const RIGHT_HANDLE_ID_PREFIX = 'right_rel_';
|
||||
@@ -59,6 +60,10 @@ const arePropsEqual = (
|
||||
prevProps.field.unique === nextProps.field.unique &&
|
||||
prevProps.field.type.id === nextProps.field.type.id &&
|
||||
prevProps.field.type.name === nextProps.field.type.name &&
|
||||
prevProps.field.characterMaximumLength ===
|
||||
nextProps.field.characterMaximumLength &&
|
||||
prevProps.field.precision === nextProps.field.precision &&
|
||||
prevProps.field.scale === nextProps.field.scale &&
|
||||
prevProps.focused === nextProps.focused &&
|
||||
prevProps.highlighted === nextProps.highlighted &&
|
||||
prevProps.visible === nextProps.visible &&
|
||||
@@ -69,8 +74,13 @@ const arePropsEqual = (
|
||||
|
||||
export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
({ field, focused, tableNodeId, highlighted, visible, isConnectable }) => {
|
||||
const { removeField, relationships, readonly, updateField } =
|
||||
useChartDB();
|
||||
const {
|
||||
removeField,
|
||||
relationships,
|
||||
readonly,
|
||||
updateField,
|
||||
highlightedCustomType,
|
||||
} = useChartDB();
|
||||
const [editMode, setEditMode] = useState(false);
|
||||
const [fieldName, setFieldName] = useState(field.name);
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
@@ -142,7 +152,13 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
checkIfNewField,
|
||||
getFieldNewName,
|
||||
getFieldNewType,
|
||||
getFieldNewNullable,
|
||||
getFieldNewPrimaryKey,
|
||||
getFieldNewCharacterMaximumLength,
|
||||
getFieldNewPrecision,
|
||||
getFieldNewScale,
|
||||
checkIfFieldHasChange,
|
||||
isSummaryOnly,
|
||||
} = useDiff();
|
||||
|
||||
const [diffState, setDiffState] = useState<{
|
||||
@@ -150,12 +166,22 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
isDiffNewField: boolean;
|
||||
fieldDiffChangedName: string | null;
|
||||
fieldDiffChangedType: DBField['type'] | null;
|
||||
fieldDiffChangedNullable: boolean | null;
|
||||
fieldDiffChangedCharacterMaximumLength: string | null;
|
||||
fieldDiffChangedScale: number | null;
|
||||
fieldDiffChangedPrecision: number | null;
|
||||
fieldDiffChangedPrimaryKey: boolean | null;
|
||||
isDiffFieldChanged: boolean;
|
||||
}>({
|
||||
isDiffFieldRemoved: false,
|
||||
isDiffNewField: false,
|
||||
fieldDiffChangedName: null,
|
||||
fieldDiffChangedType: null,
|
||||
fieldDiffChangedNullable: null,
|
||||
fieldDiffChangedCharacterMaximumLength: null,
|
||||
fieldDiffChangedScale: null,
|
||||
fieldDiffChangedPrecision: null,
|
||||
fieldDiffChangedPrimaryKey: null,
|
||||
isDiffFieldChanged: false,
|
||||
});
|
||||
|
||||
@@ -173,6 +199,22 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
fieldDiffChangedType: getFieldNewType({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
fieldDiffChangedNullable: getFieldNewNullable({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
fieldDiffChangedPrimaryKey: getFieldNewPrimaryKey({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
fieldDiffChangedCharacterMaximumLength:
|
||||
getFieldNewCharacterMaximumLength({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
fieldDiffChangedScale: getFieldNewScale({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
fieldDiffChangedPrecision: getFieldNewPrecision({
|
||||
fieldId: field.id,
|
||||
}),
|
||||
isDiffFieldChanged: checkIfFieldHasChange({
|
||||
fieldId: field.id,
|
||||
tableId: tableNodeId,
|
||||
@@ -185,7 +227,12 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
checkIfNewField,
|
||||
getFieldNewName,
|
||||
getFieldNewType,
|
||||
getFieldNewPrimaryKey,
|
||||
getFieldNewNullable,
|
||||
checkIfFieldHasChange,
|
||||
getFieldNewCharacterMaximumLength,
|
||||
getFieldNewPrecision,
|
||||
getFieldNewScale,
|
||||
field.id,
|
||||
tableNodeId,
|
||||
]);
|
||||
@@ -196,6 +243,11 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
fieldDiffChangedName,
|
||||
fieldDiffChangedType,
|
||||
isDiffFieldChanged,
|
||||
fieldDiffChangedNullable,
|
||||
fieldDiffChangedPrimaryKey,
|
||||
fieldDiffChangedCharacterMaximumLength,
|
||||
fieldDiffChangedScale,
|
||||
fieldDiffChangedPrecision,
|
||||
} = diffState;
|
||||
|
||||
const enterEditMode = useCallback((e: React.MouseEvent) => {
|
||||
@@ -203,17 +255,27 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
setEditMode(true);
|
||||
}, []);
|
||||
|
||||
const isCustomTypeHighlighted = useMemo(() => {
|
||||
if (!highlightedCustomType) return false;
|
||||
return field.type.name === highlightedCustomType.name;
|
||||
}, [highlightedCustomType, field.type.name]);
|
||||
const { showFieldAttributes } = useLocalConfig();
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'group relative flex h-8 items-center justify-between gap-1 border-t px-3 text-sm last:rounded-b-[6px] hover:bg-slate-100 dark:hover:bg-slate-800',
|
||||
'transition-all duration-200 ease-in-out',
|
||||
{
|
||||
'bg-pink-100 dark:bg-pink-900': highlighted,
|
||||
'bg-pink-100 dark:bg-pink-900':
|
||||
highlighted && !isCustomTypeHighlighted,
|
||||
'bg-yellow-100 dark:bg-yellow-900':
|
||||
isCustomTypeHighlighted,
|
||||
'max-h-8 opacity-100': visible,
|
||||
'z-0 max-h-0 overflow-hidden opacity-0': !visible,
|
||||
'bg-sky-200 dark:bg-sky-800 hover:bg-sky-100 dark:hover:bg-sky-900 border-sky-300 dark:border-sky-700':
|
||||
isDiffFieldChanged &&
|
||||
!isSummaryOnly &&
|
||||
!isDiffFieldRemoved &&
|
||||
!isDiffNewField,
|
||||
'bg-red-200 dark:bg-red-800 hover:bg-red-100 dark:hover:bg-red-900 border-red-300 dark:border-red-700':
|
||||
@@ -267,7 +329,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
)}
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center gap-1 truncate text-left',
|
||||
'flex items-center gap-1 min-w-0 flex-1 text-left',
|
||||
{
|
||||
'font-semibold': field.primaryKey || field.unique,
|
||||
'w-full': editMode,
|
||||
@@ -278,7 +340,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
<SquareMinus className="size-3.5 text-red-800 dark:text-red-200" />
|
||||
) : isDiffNewField ? (
|
||||
<SquarePlus className="size-3.5 text-green-800 dark:text-green-200" />
|
||||
) : isDiffFieldChanged ? (
|
||||
) : isDiffFieldChanged && !isSummaryOnly ? (
|
||||
<SquareDot className="size-3.5 shrink-0 text-sky-800 dark:text-sky-200" />
|
||||
) : null}
|
||||
{editMode && !readonly ? (
|
||||
@@ -303,20 +365,15 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
// <span
|
||||
// className="truncate"
|
||||
// onClick={readonly ? undefined : enterEditMode}
|
||||
// >
|
||||
// {field.name}
|
||||
// </span>
|
||||
<span
|
||||
className={cn('truncate', {
|
||||
className={cn('truncate min-w-0', {
|
||||
'text-red-800 font-normal dark:text-red-200':
|
||||
isDiffFieldRemoved,
|
||||
'text-green-800 font-normal dark:text-green-200':
|
||||
isDiffNewField,
|
||||
'text-sky-800 font-normal dark:text-sky-200':
|
||||
isDiffFieldChanged &&
|
||||
!isSummaryOnly &&
|
||||
!isDiffFieldRemoved &&
|
||||
!isDiffNewField,
|
||||
})}
|
||||
@@ -333,7 +390,6 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
{/* <span className="truncate">{field.name}</span> */}
|
||||
{field.comments && !editMode ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
@@ -346,8 +402,10 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
) : null}
|
||||
</div>
|
||||
{editMode ? null : (
|
||||
<div className="flex max-w-[35%] justify-end gap-1.5 truncate hover:shrink-0">
|
||||
{field.primaryKey ? (
|
||||
<div className="ml-2 flex shrink-0 items-center justify-end gap-1.5">
|
||||
{(field.primaryKey &&
|
||||
fieldDiffChangedPrimaryKey === null) ||
|
||||
fieldDiffChangedPrimaryKey ? (
|
||||
<div
|
||||
className={cn(
|
||||
'text-muted-foreground',
|
||||
@@ -359,6 +417,7 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
? 'text-green-800 dark:text-green-200'
|
||||
: '',
|
||||
isDiffFieldChanged &&
|
||||
!isSummaryOnly &&
|
||||
!isDiffFieldRemoved &&
|
||||
!isDiffNewField
|
||||
? 'text-sky-800 dark:text-sky-200'
|
||||
@@ -371,7 +430,8 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
|
||||
<div
|
||||
className={cn(
|
||||
'content-center truncate text-right text-xs text-muted-foreground',
|
||||
'content-center text-right text-xs text-muted-foreground overflow-hidden max-w-[8rem]',
|
||||
field.primaryKey ? 'min-w-0' : 'min-w-[3rem]',
|
||||
!readonly ? 'group-hover:hidden' : '',
|
||||
isDiffFieldRemoved
|
||||
? 'text-red-800 dark:text-red-200'
|
||||
@@ -381,22 +441,56 @@ export const TableNodeField: React.FC<TableNodeFieldProps> = React.memo(
|
||||
: '',
|
||||
isDiffFieldChanged &&
|
||||
!isDiffFieldRemoved &&
|
||||
!isSummaryOnly &&
|
||||
!isDiffNewField
|
||||
? 'text-sky-800 dark:text-sky-200'
|
||||
: ''
|
||||
)}
|
||||
>
|
||||
{fieldDiffChangedType ? (
|
||||
<>
|
||||
<span className="line-through">
|
||||
{field.type.name.split(' ')[0]}
|
||||
</span>{' '}
|
||||
{fieldDiffChangedType.name.split(' ')[0]}
|
||||
</>
|
||||
) : (
|
||||
field.type.name.split(' ')[0]
|
||||
)}
|
||||
{field.nullable ? '?' : ''}
|
||||
<span className="block truncate">
|
||||
{fieldDiffChangedType ? (
|
||||
<>
|
||||
<span className="line-through">
|
||||
{field.type.name.split(' ')[0]}
|
||||
</span>{' '}
|
||||
{
|
||||
fieldDiffChangedType.name.split(
|
||||
' '
|
||||
)[0]
|
||||
}
|
||||
</>
|
||||
) : (
|
||||
`${field.type.name.split(' ')[0]}${
|
||||
showFieldAttributes
|
||||
? generateDBFieldSuffix({
|
||||
...field,
|
||||
...{
|
||||
precision:
|
||||
fieldDiffChangedPrecision ??
|
||||
field.precision,
|
||||
scale:
|
||||
fieldDiffChangedScale ??
|
||||
field.scale,
|
||||
characterMaximumLength:
|
||||
fieldDiffChangedCharacterMaximumLength ??
|
||||
field.characterMaximumLength,
|
||||
},
|
||||
})
|
||||
: ''
|
||||
}`
|
||||
)}
|
||||
{fieldDiffChangedNullable !== null ? (
|
||||
fieldDiffChangedNullable ? (
|
||||
<span className="font-semibold">?</span>
|
||||
) : (
|
||||
<span className="line-through">?</span>
|
||||
)
|
||||
) : field.nullable ? (
|
||||
'?'
|
||||
) : (
|
||||
''
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
{readonly ? null : (
|
||||
<div className="hidden flex-row group-hover:flex">
|
||||
|
||||
@@ -53,6 +53,7 @@ export type TableNodeType = Node<
|
||||
table: DBTable;
|
||||
isOverlapping: boolean;
|
||||
highlightOverlappingTables?: boolean;
|
||||
hasHighlightedCustomType?: boolean;
|
||||
},
|
||||
'table'
|
||||
>;
|
||||
@@ -62,7 +63,12 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
selected,
|
||||
dragging,
|
||||
id,
|
||||
data: { table, isOverlapping, highlightOverlappingTables },
|
||||
data: {
|
||||
table,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
},
|
||||
}) => {
|
||||
const { updateTable, relationships, readonly } = useChartDB();
|
||||
const edges = useStore((store) => store.edges) as EdgeType[];
|
||||
@@ -80,6 +86,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
checkIfTableHasChange,
|
||||
checkIfNewTable,
|
||||
checkIfTableRemoved,
|
||||
isSummaryOnly,
|
||||
} = useDiff();
|
||||
|
||||
const fields = useMemo(() => table.fields, [table.fields]);
|
||||
@@ -303,7 +310,13 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
highlightOverlappingTables && isOverlapping
|
||||
? 'animate-scale-2'
|
||||
: '',
|
||||
isDiffTableChanged && !isDiffNewTable && !isDiffTableRemoved
|
||||
hasHighlightedCustomType
|
||||
? 'ring-2 ring-offset-slate-50 dark:ring-offset-slate-900 ring-yellow-500 ring-offset-2 animate-scale'
|
||||
: '',
|
||||
isDiffTableChanged &&
|
||||
!isSummaryOnly &&
|
||||
!isDiffNewTable &&
|
||||
!isDiffTableRemoved
|
||||
? 'outline outline-[3px] outline-sky-500 dark:outline-sky-900 outline-offset-[5px]'
|
||||
: '',
|
||||
isDiffNewTable
|
||||
@@ -317,6 +330,8 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
selected,
|
||||
isOverlapping,
|
||||
highlightOverlappingTables,
|
||||
hasHighlightedCustomType,
|
||||
isSummaryOnly,
|
||||
isDiffTableChanged,
|
||||
isDiffNewTable,
|
||||
isDiffTableRemoved,
|
||||
@@ -353,7 +368,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
? 'new'
|
||||
: isDiffTableRemoved
|
||||
? 'removed'
|
||||
: isDiffTableChanged
|
||||
: isDiffTableChanged && !isSummaryOnly
|
||||
? 'changed'
|
||||
: 'none'
|
||||
}
|
||||
@@ -386,7 +401,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
Table Removed
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : isDiffTableChanged ? (
|
||||
) : isDiffTableChanged && !isSummaryOnly ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<SquareDot
|
||||
@@ -422,7 +437,7 @@ export const TableNode: React.FC<NodeProps<TableNodeType>> = React.memo(
|
||||
<Label className="flex h-5 flex-col justify-center truncate rounded-sm bg-red-200 px-2 py-0.5 text-sm font-normal text-red-900 dark:bg-red-800 dark:text-red-200">
|
||||
{table.name}
|
||||
</Label>
|
||||
) : isDiffTableChanged ? (
|
||||
) : isDiffTableChanged && !isSummaryOnly ? (
|
||||
<Label className="flex h-5 flex-col justify-center truncate rounded-sm bg-sky-200 px-2 py-0.5 text-sm font-normal text-sky-900 dark:bg-sky-800 dark:text-sky-200">
|
||||
{table.name}
|
||||
</Label>
|
||||
|
||||
@@ -14,7 +14,6 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { keyboardShortcutsForOS } from '@/context/keyboard-shortcuts-context/keyboard-shortcuts';
|
||||
import { KeyboardShortcutAction } from '@/context/keyboard-shortcuts-context/keyboard-shortcuts';
|
||||
import { useIsLostInCanvas } from '../hooks/use-is-lost-in-canvas';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { cn } from '@/lib/utils';
|
||||
@@ -30,7 +29,6 @@ export const Toolbar: React.FC<ToolbarProps> = () => {
|
||||
const { redo, undo, hasRedo, hasUndo } = useHistory();
|
||||
const { getZoom, zoomIn, zoomOut, fitView } = useReactFlow();
|
||||
const [zoom, setZoom] = useState<string>(convertToPercentage(getZoom()));
|
||||
const { isLostInCanvas } = useIsLostInCanvas();
|
||||
const { setShowFilter } = useCanvas();
|
||||
const { hiddenTableIds } = useChartDB();
|
||||
|
||||
@@ -106,14 +104,7 @@ export const Toolbar: React.FC<ToolbarProps> = () => {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<ToolbarButton
|
||||
onClick={showAll}
|
||||
className={
|
||||
isLostInCanvas
|
||||
? 'bg-pink-500 text-white hover:bg-pink-600 hover:text-white'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
<ToolbarButton onClick={showAll}>
|
||||
<Scan />
|
||||
</ToolbarButton>
|
||||
</span>
|
||||
|
||||
@@ -16,11 +16,17 @@ import {
|
||||
customTypeKindToLabel,
|
||||
DBCustomTypeKind,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
import { Trash2, Braces } from 'lucide-react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { Trash2, Braces, Highlighter } from 'lucide-react';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CustomTypeEnumValues } from './enum-values/enum-values';
|
||||
import { CustomTypeCompositeFields } from './composite-fields/composite-fields';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { checkIfCustomTypeUsed } from '../utils';
|
||||
|
||||
export interface CustomTypeListItemContentProps {
|
||||
customType: DBCustomType;
|
||||
@@ -29,7 +35,13 @@ export interface CustomTypeListItemContentProps {
|
||||
export const CustomTypeListItemContent: React.FC<
|
||||
CustomTypeListItemContentProps
|
||||
> = ({ customType }) => {
|
||||
const { removeCustomType, updateCustomType } = useChartDB();
|
||||
const {
|
||||
removeCustomType,
|
||||
updateCustomType,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
tables,
|
||||
} = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const deleteCustomTypeHandler = useCallback(() => {
|
||||
@@ -92,6 +104,43 @@ export const CustomTypeListItemContent: React.FC<
|
||||
[customType.id, updateCustomType]
|
||||
);
|
||||
|
||||
const toggleHighlightCustomType = useCallback(() => {
|
||||
if (highlightedCustomType?.id === customType.id) {
|
||||
highlightCustomTypeId(undefined);
|
||||
} else {
|
||||
highlightCustomTypeId(customType.id);
|
||||
}
|
||||
}, [customType.id, highlightCustomTypeId, highlightedCustomType?.id]);
|
||||
|
||||
const canHighlight = useMemo(
|
||||
() => checkIfCustomTypeUsed({ customType, tables }),
|
||||
[customType, tables]
|
||||
);
|
||||
|
||||
const isHighlighted = useMemo(
|
||||
() => highlightedCustomType?.id === customType.id,
|
||||
[highlightedCustomType, customType.id]
|
||||
);
|
||||
|
||||
const renderHighlightButton = useCallback(
|
||||
() => (
|
||||
<Button
|
||||
variant="ghost"
|
||||
disabled={!canHighlight}
|
||||
className="flex h-8 w-full items-center justify-center p-2 text-xs"
|
||||
onClick={toggleHighlightCustomType}
|
||||
>
|
||||
<Highlighter className="mr-1 size-3.5" />
|
||||
{t(
|
||||
isHighlighted
|
||||
? 'side_panel.custom_types_section.custom_type.custom_type_actions.clear_field_highlight'
|
||||
: 'side_panel.custom_types_section.custom_type.custom_type_actions.highlight_fields'
|
||||
)}
|
||||
</Button>
|
||||
),
|
||||
[isHighlighted, canHighlight, toggleHighlightCustomType, t]
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="my-1 flex flex-col rounded-b-md px-1">
|
||||
<div className="flex flex-col gap-6">
|
||||
@@ -148,10 +197,24 @@ export const CustomTypeListItemContent: React.FC<
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-1 items-center justify-center pt-2">
|
||||
<div className="flex flex-col items-center justify-center pt-2">
|
||||
{!canHighlight ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>{renderHighlightButton()}</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{t(
|
||||
'side_panel.custom_types_section.custom_type.no_fields_tooltip'
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
renderHighlightButton()
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="h-8 p-2 text-xs"
|
||||
className="flex h-8 w-full items-center justify-center p-2 text-xs"
|
||||
onClick={deleteCustomTypeHandler}
|
||||
>
|
||||
<Trash2 className="mr-1 size-3.5 text-red-700" />
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import {
|
||||
GripVertical,
|
||||
Pencil,
|
||||
EllipsisVertical,
|
||||
Trash2,
|
||||
Check,
|
||||
Highlighter,
|
||||
} from 'lucide-react';
|
||||
import { ListItemHeaderButton } from '@/pages/editor-page/side-panel/list-item-header-button/list-item-header-button';
|
||||
import { Input } from '@/components/input/input';
|
||||
@@ -32,6 +33,7 @@ import {
|
||||
type DBCustomType,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
import { Badge } from '@/components/badge/badge';
|
||||
import { checkIfCustomTypeUsed } from '../utils';
|
||||
|
||||
export interface CustomTypeListItemHeaderProps {
|
||||
customType: DBCustomType;
|
||||
@@ -40,8 +42,15 @@ export interface CustomTypeListItemHeaderProps {
|
||||
export const CustomTypeListItemHeader: React.FC<
|
||||
CustomTypeListItemHeaderProps
|
||||
> = ({ customType }) => {
|
||||
const { updateCustomType, removeCustomType, schemas, filteredSchemas } =
|
||||
useChartDB();
|
||||
const {
|
||||
updateCustomType,
|
||||
removeCustomType,
|
||||
schemas,
|
||||
filteredSchemas,
|
||||
highlightedCustomType,
|
||||
highlightCustomTypeId,
|
||||
tables,
|
||||
} = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const [editMode, setEditMode] = React.useState(false);
|
||||
const [customTypeName, setCustomTypeName] = React.useState(customType.name);
|
||||
@@ -71,12 +80,40 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
setEditMode(true);
|
||||
};
|
||||
|
||||
const deleteCustomTypeHandler = useCallback(() => {
|
||||
removeCustomType(customType.id);
|
||||
}, [customType.id, removeCustomType]);
|
||||
const deleteCustomTypeHandler = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
|
||||
e.stopPropagation();
|
||||
|
||||
const renderDropDownMenu = useCallback(
|
||||
() => (
|
||||
removeCustomType(customType.id);
|
||||
},
|
||||
[customType.id, removeCustomType]
|
||||
);
|
||||
|
||||
const isHighlighted = useMemo(
|
||||
() => highlightedCustomType?.id === customType.id,
|
||||
[highlightedCustomType, customType.id]
|
||||
);
|
||||
|
||||
const toggleHighlightCustomType = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
|
||||
e.stopPropagation();
|
||||
|
||||
if (isHighlighted) {
|
||||
highlightCustomTypeId(undefined);
|
||||
} else {
|
||||
highlightCustomTypeId(customType.id);
|
||||
}
|
||||
},
|
||||
[customType.id, highlightCustomTypeId, isHighlighted]
|
||||
);
|
||||
|
||||
const canHighlight = useMemo(
|
||||
() => checkIfCustomTypeUsed({ customType, tables }),
|
||||
[customType, tables]
|
||||
);
|
||||
|
||||
const renderDropDownMenu = useCallback(() => {
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger>
|
||||
<ListItemHeaderButton>
|
||||
@@ -91,6 +128,18 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
</DropdownMenuLabel>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuGroup>
|
||||
<DropdownMenuItem
|
||||
onClick={toggleHighlightCustomType}
|
||||
disabled={!canHighlight}
|
||||
className="flex justify-between"
|
||||
>
|
||||
{t(
|
||||
isHighlighted
|
||||
? 'side_panel.custom_types_section.custom_type.custom_type_actions.clear_field_highlight'
|
||||
: 'side_panel.custom_types_section.custom_type.custom_type_actions.highlight_fields'
|
||||
)}
|
||||
<Highlighter className="size-3.5" />
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={deleteCustomTypeHandler}
|
||||
className="flex justify-between !text-red-700"
|
||||
@@ -103,9 +152,14 @@ export const CustomTypeListItemHeader: React.FC<
|
||||
</DropdownMenuGroup>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
),
|
||||
[deleteCustomTypeHandler, t]
|
||||
);
|
||||
);
|
||||
}, [
|
||||
deleteCustomTypeHandler,
|
||||
t,
|
||||
toggleHighlightCustomType,
|
||||
canHighlight,
|
||||
isHighlighted,
|
||||
]);
|
||||
|
||||
let schemaToDisplay;
|
||||
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
import type { DBCustomType, DBTable } from '@/lib/domain';
|
||||
|
||||
export const checkIfCustomTypeUsed = ({
|
||||
customType,
|
||||
tables,
|
||||
}: {
|
||||
customType: DBCustomType;
|
||||
tables: DBTable[];
|
||||
}): boolean => {
|
||||
const typeNameToFind = customType.name;
|
||||
|
||||
for (const table of tables) {
|
||||
for (const field of table.fields) {
|
||||
if (field.type.name === typeNameToFind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
@@ -1,4 +1,10 @@
|
||||
import React, { useMemo, useState, useEffect } from 'react';
|
||||
import React, {
|
||||
useMemo,
|
||||
useState,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
@@ -7,8 +13,28 @@ import type { EffectiveTheme } from '@/context/theme-context/theme-context';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import { ArrowLeftRight } from 'lucide-react';
|
||||
import {
|
||||
AlertCircle,
|
||||
ArrowLeftRight,
|
||||
Check,
|
||||
Pencil,
|
||||
PencilOff,
|
||||
Undo2,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { generateDBMLFromDiagram } from '@/lib/dbml/dbml-export/dbml-export';
|
||||
import { useDiff } from '@/context/diff-context/use-diff';
|
||||
import { importDBMLToDiagram } from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import { applyDBMLChanges } from '@/lib/dbml/apply-dbml/apply-dbml';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import { parseDBMLError } from '@/lib/dbml/dbml-import/dbml-import-error';
|
||||
import {
|
||||
clearErrorHighlight,
|
||||
highlightErrorLine,
|
||||
} from '@/components/code-snippet/dbml/utils';
|
||||
import type * as monaco from 'monaco-editor';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useFullScreenLoader } from '@/hooks/use-full-screen-spinner';
|
||||
|
||||
export interface TableDBMLProps {
|
||||
filteredTables: DBTable[];
|
||||
@@ -18,20 +44,87 @@ const getEditorTheme = (theme: EffectiveTheme) => {
|
||||
return theme === 'dark' ? 'dbml-dark' : 'dbml-light';
|
||||
};
|
||||
|
||||
export const TableDBML: React.FC<TableDBMLProps> = ({ filteredTables }) => {
|
||||
const { currentDiagram } = useChartDB();
|
||||
export const TableDBML: React.FC<TableDBMLProps> = () => {
|
||||
const { currentDiagram, updateDiagramData, databaseType } = useChartDB();
|
||||
const { effectiveTheme } = useTheme();
|
||||
const { toast } = useToast();
|
||||
const [dbmlFormat, setDbmlFormat] = useState<'inline' | 'standard'>(
|
||||
'inline'
|
||||
);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [standardDbml, setStandardDbml] = useState('');
|
||||
const [inlineDbml, setInlineDbml] = useState('');
|
||||
const isMountedRef = useRef(true);
|
||||
const [isEditButtonEmphasized, setIsEditButtonEmphasized] = useState(false);
|
||||
|
||||
// --- Effect for handling empty field name warnings ---
|
||||
const editorRef = useRef<monaco.editor.IStandaloneCodeEditor>();
|
||||
const decorationsCollection =
|
||||
useRef<monaco.editor.IEditorDecorationsCollection>();
|
||||
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: monaco.editor.IStandaloneCodeEditor) => {
|
||||
editorRef.current = editor;
|
||||
decorationsCollection.current =
|
||||
editor.createDecorationsCollection();
|
||||
|
||||
if (readOnlyDisposableRef.current) {
|
||||
readOnlyDisposableRef.current.dispose();
|
||||
}
|
||||
|
||||
const readOnlyDisposable = editor.onDidAttemptReadOnlyEdit(() => {
|
||||
if (emphasisTimeoutRef.current) {
|
||||
clearTimeout(emphasisTimeoutRef.current);
|
||||
}
|
||||
|
||||
setIsEditButtonEmphasized(false);
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
setIsEditButtonEmphasized(true);
|
||||
|
||||
emphasisTimeoutRef.current = setTimeout(() => {
|
||||
setIsEditButtonEmphasized(false);
|
||||
}, 600);
|
||||
});
|
||||
});
|
||||
|
||||
readOnlyDisposableRef.current = readOnlyDisposable;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
// Determine which DBML string to display
|
||||
const dbmlToDisplay = useMemo(
|
||||
() => (dbmlFormat === 'inline' ? inlineDbml : standardDbml),
|
||||
[dbmlFormat, inlineDbml, standardDbml]
|
||||
);
|
||||
|
||||
// Toggle function
|
||||
const toggleFormat = useCallback(() => {
|
||||
setDbmlFormat((prev) => (prev === 'inline' ? 'standard' : 'inline'));
|
||||
}, []);
|
||||
|
||||
const [isEditMode, setIsEditMode] = useState(false);
|
||||
const [editedDbml, setEditedDbml] = useState<string>('');
|
||||
const lastDBMLChange = useRef(editedDbml);
|
||||
const { calculateDiff, originalDiagram, resetDiff, hasDiff, newDiagram } =
|
||||
useDiff();
|
||||
const { loadDiagramFromData } = useChartDB();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
const [warningMessage, setWarningMessage] = useState<string>();
|
||||
const { t } = useTranslation();
|
||||
const { hideLoader, showLoader } = useFullScreenLoader();
|
||||
const emphasisTimeoutRef = useRef<NodeJS.Timeout>();
|
||||
const readOnlyDisposableRef = useRef<monaco.IDisposable>();
|
||||
|
||||
// --- Check for empty field name warnings only on mount ---
|
||||
useEffect(() => {
|
||||
// Only check when not in edit mode
|
||||
if (isEditMode) return;
|
||||
|
||||
let foundInvalidFields = false;
|
||||
const invalidTableNames = new Set<string>();
|
||||
|
||||
filteredTables.forEach((table) => {
|
||||
currentDiagram.tables?.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.name === '') {
|
||||
foundInvalidFields = true;
|
||||
@@ -42,70 +135,307 @@ export const TableDBML: React.FC<TableDBMLProps> = ({ filteredTables }) => {
|
||||
|
||||
if (foundInvalidFields) {
|
||||
const tableNamesString = Array.from(invalidTableNames).join(', ');
|
||||
toast({
|
||||
title: 'Warning',
|
||||
description: `Some fields had empty names in tables: [${tableNamesString}] and were excluded from the DBML export.`,
|
||||
variant: 'default',
|
||||
});
|
||||
setWarningMessage(
|
||||
`Some fields had empty names in tables: [${tableNamesString}] and were excluded from the DBML export.`
|
||||
);
|
||||
}
|
||||
}, [filteredTables, toast]); // Depend on filteredTables and toast
|
||||
}, [currentDiagram.tables, t, isEditMode]);
|
||||
|
||||
// Generate both standard and inline DBML formats
|
||||
const { standardDbml, inlineDbml } = useMemo(() => {
|
||||
// Create a filtered diagram with only the selected tables
|
||||
const filteredDiagram: Diagram = {
|
||||
...currentDiagram,
|
||||
tables: filteredTables,
|
||||
};
|
||||
|
||||
const result = generateDBMLFromDiagram(filteredDiagram);
|
||||
|
||||
// Handle errors
|
||||
if (result.error) {
|
||||
toast({
|
||||
title: 'DBML Export Error',
|
||||
description: `Could not generate DBML: ${result.error.substring(0, 100)}${result.error.length > 100 ? '...' : ''}`,
|
||||
variant: 'destructive',
|
||||
});
|
||||
useEffect(() => {
|
||||
if (isEditMode) {
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
standardDbml: result.standardDbml,
|
||||
inlineDbml: result.inlineDbml,
|
||||
setErrorMessage(undefined);
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
|
||||
const generateDBML = async () => {
|
||||
setIsLoading(true);
|
||||
|
||||
const result = generateDBMLFromDiagram(currentDiagram);
|
||||
|
||||
// Handle errors
|
||||
if (result.error) {
|
||||
toast({
|
||||
title: 'DBML Export Error',
|
||||
description: `Could not generate DBML: ${result.error.substring(0, 100)}${result.error.length > 100 ? '...' : ''}`,
|
||||
variant: 'destructive',
|
||||
});
|
||||
}
|
||||
|
||||
setStandardDbml(result.standardDbml);
|
||||
setInlineDbml(result.inlineDbml);
|
||||
setIsLoading(false);
|
||||
};
|
||||
}, [currentDiagram, filteredTables, toast]);
|
||||
|
||||
// Determine which DBML string to display
|
||||
const dbmlToDisplay = dbmlFormat === 'inline' ? inlineDbml : standardDbml;
|
||||
setTimeout(() => generateDBML(), 0);
|
||||
}, [currentDiagram, toast, isEditMode]);
|
||||
|
||||
// Toggle function
|
||||
const toggleFormat = () => {
|
||||
setDbmlFormat((prev) => (prev === 'inline' ? 'standard' : 'inline'));
|
||||
};
|
||||
// Update editedDbml when dbmlToDisplay changes
|
||||
useEffect(() => {
|
||||
if (!isLoading && dbmlToDisplay && !isEditMode) {
|
||||
setEditedDbml(dbmlToDisplay);
|
||||
lastDBMLChange.current = dbmlToDisplay;
|
||||
}
|
||||
}, [dbmlToDisplay, isLoading, isEditMode]);
|
||||
|
||||
// Create the showDiff function
|
||||
const showDiff = useCallback(
|
||||
async (dbmlContent: string) => {
|
||||
clearErrorHighlight(decorationsCollection.current);
|
||||
setErrorMessage(undefined);
|
||||
try {
|
||||
const diagramFromDBML: Diagram = await importDBMLToDiagram(
|
||||
dbmlContent,
|
||||
{ databaseType }
|
||||
);
|
||||
|
||||
const sourceDiagram: Diagram =
|
||||
originalDiagram ?? currentDiagram;
|
||||
|
||||
const targetDiagram: Diagram = {
|
||||
...sourceDiagram,
|
||||
tables: diagramFromDBML.tables,
|
||||
relationships: diagramFromDBML.relationships,
|
||||
customTypes: diagramFromDBML.customTypes,
|
||||
};
|
||||
|
||||
const newDiagram = applyDBMLChanges({
|
||||
sourceDiagram,
|
||||
targetDiagram,
|
||||
});
|
||||
|
||||
if (originalDiagram) {
|
||||
resetDiff();
|
||||
loadDiagramFromData(originalDiagram);
|
||||
}
|
||||
|
||||
calculateDiff({
|
||||
diagram: sourceDiagram,
|
||||
newDiagram,
|
||||
options: { summaryOnly: true },
|
||||
});
|
||||
} catch (error) {
|
||||
const dbmlError = parseDBMLError(error);
|
||||
|
||||
if (dbmlError) {
|
||||
highlightErrorLine({
|
||||
error: dbmlError,
|
||||
model: editorRef.current?.getModel(),
|
||||
editorDecorationsCollection:
|
||||
decorationsCollection.current,
|
||||
});
|
||||
|
||||
setErrorMessage(
|
||||
t('import_dbml_dialog.error.description') +
|
||||
` (1 error found - in line ${dbmlError.line})`
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
t,
|
||||
originalDiagram,
|
||||
currentDiagram,
|
||||
resetDiff,
|
||||
loadDiagramFromData,
|
||||
calculateDiff,
|
||||
databaseType,
|
||||
]
|
||||
);
|
||||
|
||||
const debouncedShowDiff = useDebounce(showDiff, 1000);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isEditMode || !editedDbml) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Only calculate diff if the DBML has changed
|
||||
if (editedDbml === lastDBMLChange.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
lastDBMLChange.current = editedDbml;
|
||||
|
||||
debouncedShowDiff(editedDbml);
|
||||
}, [editedDbml, isEditMode, debouncedShowDiff]);
|
||||
|
||||
const acceptChanges = useCallback(async () => {
|
||||
if (!editedDbml) return;
|
||||
if (!newDiagram) return;
|
||||
|
||||
showLoader();
|
||||
|
||||
await updateDiagramData(newDiagram, { forceUpdateStorage: true });
|
||||
|
||||
resetDiff();
|
||||
setEditedDbml(editedDbml);
|
||||
setIsEditMode(false);
|
||||
lastDBMLChange.current = editedDbml;
|
||||
hideLoader();
|
||||
}, [
|
||||
editedDbml,
|
||||
updateDiagramData,
|
||||
newDiagram,
|
||||
resetDiff,
|
||||
showLoader,
|
||||
hideLoader,
|
||||
]);
|
||||
|
||||
const undoChanges = useCallback(() => {
|
||||
if (!editedDbml) return;
|
||||
if (!originalDiagram) return;
|
||||
|
||||
loadDiagramFromData(originalDiagram);
|
||||
setIsEditMode(false);
|
||||
resetDiff();
|
||||
setEditedDbml(dbmlToDisplay);
|
||||
lastDBMLChange.current = dbmlToDisplay;
|
||||
}, [
|
||||
editedDbml,
|
||||
loadDiagramFromData,
|
||||
originalDiagram,
|
||||
resetDiff,
|
||||
dbmlToDisplay,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
isMountedRef.current = true;
|
||||
|
||||
return () => {
|
||||
isMountedRef.current = false;
|
||||
|
||||
if (emphasisTimeoutRef.current) {
|
||||
clearTimeout(emphasisTimeoutRef.current);
|
||||
}
|
||||
|
||||
if (readOnlyDisposableRef.current) {
|
||||
readOnlyDisposableRef.current.dispose();
|
||||
readOnlyDisposableRef.current = undefined;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const currentUndoChanges = undoChanges;
|
||||
|
||||
return () => {
|
||||
setTimeout(() => {
|
||||
if (!isMountedRef.current) {
|
||||
currentUndoChanges();
|
||||
}
|
||||
}, 0);
|
||||
};
|
||||
}, [undoChanges]);
|
||||
|
||||
return (
|
||||
<CodeSnippet
|
||||
code={dbmlToDisplay}
|
||||
className="my-0.5"
|
||||
actions={[
|
||||
{
|
||||
label: `Show ${dbmlFormat === 'inline' ? 'Standard' : 'Inline'} Refs`,
|
||||
icon: ArrowLeftRight,
|
||||
onClick: toggleFormat,
|
||||
},
|
||||
]}
|
||||
editorProps={{
|
||||
height: '100%',
|
||||
defaultLanguage: 'dbml',
|
||||
beforeMount: setupDBMLLanguage,
|
||||
loading: false,
|
||||
theme: getEditorTheme(effectiveTheme),
|
||||
options: {
|
||||
wordWrap: 'off',
|
||||
mouseWheelZoom: false,
|
||||
domReadOnly: true,
|
||||
},
|
||||
}}
|
||||
/>
|
||||
<>
|
||||
<CodeSnippet
|
||||
code={editedDbml}
|
||||
loading={isLoading}
|
||||
actionsTooltipSide="right"
|
||||
className="my-0.5"
|
||||
allowCopy={!isEditMode}
|
||||
actions={
|
||||
isEditMode && hasDiff
|
||||
? [
|
||||
{
|
||||
label: 'Accept Changes',
|
||||
icon: Check,
|
||||
onClick: acceptChanges,
|
||||
className:
|
||||
'h-7 items-center gap-1.5 rounded-md border border-green-200 bg-green-50 px-2.5 py-1.5 text-xs font-medium text-green-600 shadow-sm hover:bg-green-100 dark:border-green-800 dark:bg-green-800 dark:text-green-200 dark:hover:bg-green-700',
|
||||
},
|
||||
{
|
||||
label: 'Undo Changes',
|
||||
icon: Undo2,
|
||||
onClick: undoChanges,
|
||||
className:
|
||||
'h-7 items-center gap-1.5 rounded-md border border-red-200 bg-red-50 px-2.5 py-1.5 text-xs font-medium text-red-600 shadow-sm hover:bg-red-100 dark:border-red-800 dark:bg-red-800 dark:text-red-200 dark:hover:bg-red-700',
|
||||
},
|
||||
]
|
||||
: isEditMode && !hasDiff
|
||||
? [
|
||||
{
|
||||
label: 'View',
|
||||
icon: PencilOff,
|
||||
onClick: () =>
|
||||
setIsEditMode((prev) => !prev),
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
label: `Show ${dbmlFormat === 'inline' ? 'Standard' : 'Inline'} Refs`,
|
||||
icon: ArrowLeftRight,
|
||||
onClick: toggleFormat,
|
||||
},
|
||||
{
|
||||
label: 'Edit',
|
||||
icon: Pencil,
|
||||
onClick: () =>
|
||||
setIsEditMode((prev) => !prev),
|
||||
className: isEditButtonEmphasized
|
||||
? 'dbml-edit-button-emphasis'
|
||||
: undefined,
|
||||
},
|
||||
]
|
||||
}
|
||||
editorProps={{
|
||||
height: '100%',
|
||||
defaultLanguage: 'dbml',
|
||||
beforeMount: setupDBMLLanguage,
|
||||
theme: getEditorTheme(effectiveTheme),
|
||||
onMount: handleEditorDidMount,
|
||||
options: {
|
||||
wordWrap: 'off',
|
||||
mouseWheelZoom: false,
|
||||
readOnly: !isEditMode,
|
||||
},
|
||||
onChange: (value) => {
|
||||
setEditedDbml(value ?? '');
|
||||
},
|
||||
}}
|
||||
/>
|
||||
{warningMessage ? (
|
||||
<div className="my-2 rounded-md border border-blue-200 bg-blue-50 p-3 dark:border-blue-900/50 dark:bg-blue-950/20">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertCircle className="mt-0.5 size-4 shrink-0 text-blue-600 dark:text-blue-400" />
|
||||
<div className="flex-1">
|
||||
<p className="text-sm font-medium text-blue-800 dark:text-blue-200">
|
||||
Warning
|
||||
</p>
|
||||
<p className="mt-0.5 text-xs text-blue-700 dark:text-blue-300">
|
||||
{warningMessage}
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setWarningMessage(undefined)}
|
||||
className="rounded p-0.5 text-blue-600 hover:bg-blue-100 dark:text-blue-400 dark:hover:bg-blue-900/50"
|
||||
aria-label="Close warning"
|
||||
>
|
||||
<X className="size-3.5" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
{errorMessage ? (
|
||||
<div className="my-2 rounded-md border border-orange-200 bg-orange-50 p-3 dark:border-orange-900/50 dark:bg-orange-950/20">
|
||||
<div className="flex gap-2">
|
||||
<AlertCircle className="mt-0.5 size-4 shrink-0 text-orange-600 dark:text-orange-400" />
|
||||
<div className="flex-1">
|
||||
<p className="text-sm font-medium text-orange-800 dark:text-orange-200">
|
||||
Syntax Error
|
||||
</p>
|
||||
<p className="mt-0.5 text-xs text-orange-700 dark:text-orange-300">
|
||||
{errorMessage ||
|
||||
t('import_dbml_dialog.error.description')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import React, { useEffect, useRef, useCallback } from 'react';
|
||||
import React, { useEffect, useRef, useCallback, useMemo } from 'react';
|
||||
import { Ellipsis, Trash2 } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { FieldAttributeRange } from '@/lib/data/data-types/data-types';
|
||||
import { findDataTypeDataById } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
Popover,
|
||||
@@ -16,15 +17,28 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Textarea } from '@/components/textarea/textarea';
|
||||
import { useDebounce } from '@/hooks/use-debounce';
|
||||
import equal from 'fast-deep-equal';
|
||||
import type { DatabaseType, DBTable } from '@/lib/domain';
|
||||
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/select/select';
|
||||
|
||||
export interface TableFieldPopoverProps {
|
||||
field: DBField;
|
||||
table: DBTable;
|
||||
databaseType: DatabaseType;
|
||||
updateField: (attrs: Partial<DBField>) => void;
|
||||
removeField: () => void;
|
||||
}
|
||||
|
||||
export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
field,
|
||||
table,
|
||||
databaseType,
|
||||
updateField,
|
||||
removeField,
|
||||
}) => {
|
||||
@@ -32,6 +46,19 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
const [localField, setLocalField] = React.useState<DBField>(field);
|
||||
const [isOpen, setIsOpen] = React.useState(false);
|
||||
|
||||
// Check if this field is the only primary key in the table
|
||||
const isOnlyPrimaryKey = React.useMemo(() => {
|
||||
if (!field.primaryKey) return false;
|
||||
|
||||
// Early exit if we find another primary key
|
||||
for (const f of table.fields) {
|
||||
if (f.id !== field.id && f.primaryKey) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}, [table.fields, field.primaryKey, field.id]);
|
||||
|
||||
useEffect(() => {
|
||||
setLocalField(field);
|
||||
}, [field]);
|
||||
@@ -52,6 +79,8 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
debouncedUpdateField({
|
||||
comments: localField.comments,
|
||||
characterMaximumLength: localField.characterMaximumLength,
|
||||
precision: localField.precision,
|
||||
scale: localField.scale,
|
||||
unique: localField.unique,
|
||||
default: localField.default,
|
||||
});
|
||||
@@ -59,6 +88,11 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
prevFieldRef.current = localField;
|
||||
}, [localField, debouncedUpdateField, isOpen]);
|
||||
|
||||
const dataFieldType = useMemo(
|
||||
() => findDataTypeDataById(field.type.id, databaseType),
|
||||
[field.type.id, databaseType]
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
@@ -94,7 +128,7 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
</Label>
|
||||
<Checkbox
|
||||
checked={localField.unique}
|
||||
disabled={field.primaryKey}
|
||||
disabled={isOnlyPrimaryKey}
|
||||
onCheckedChange={(value) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
@@ -123,8 +157,7 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
{findDataTypeDataById(field.type.id)
|
||||
?.hasCharMaxLength ? (
|
||||
{dataFieldType?.fieldAttributes?.hasCharMaxLength ? (
|
||||
<div className="flex flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
@@ -134,20 +167,209 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
|
||||
'side_panel.tables_section.table.field_actions.character_length'
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength ?? ''
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
{dataFieldType?.fieldAttributes
|
||||
?.hasCharMaxLengthOption ? (
|
||||
<div className="flex gap-2">
|
||||
<Select
|
||||
value={
|
||||
localField.characterMaximumLength ===
|
||||
'max'
|
||||
? 'max'
|
||||
: localField.characterMaximumLength
|
||||
? 'custom'
|
||||
: 'none'
|
||||
}
|
||||
onValueChange={(value) => {
|
||||
if (value === 'max') {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
'max',
|
||||
})
|
||||
);
|
||||
} else if (value === 'custom') {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
'255',
|
||||
})
|
||||
);
|
||||
} else {
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
null,
|
||||
})
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<SelectTrigger className="w-full bg-muted">
|
||||
<SelectValue placeholder="Select length" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="none">
|
||||
No length
|
||||
</SelectItem>
|
||||
<SelectItem value="max">
|
||||
MAX
|
||||
</SelectItem>
|
||||
<SelectItem value="custom">
|
||||
Custom
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{localField.characterMaximumLength &&
|
||||
localField.characterMaximumLength !==
|
||||
'max' ? (
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength
|
||||
}
|
||||
type="number"
|
||||
min="1"
|
||||
max={
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.maxLength || undefined
|
||||
}
|
||||
onChange={(e) =>
|
||||
setLocalField(
|
||||
(current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
})
|
||||
)
|
||||
}
|
||||
className="w-24 rounded-md bg-muted text-sm"
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
) : (
|
||||
<Input
|
||||
value={
|
||||
localField.characterMaximumLength ??
|
||||
''
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
characterMaximumLength:
|
||||
e.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
{dataFieldType?.fieldAttributes?.precision ||
|
||||
dataFieldType?.fieldAttributes?.scale ? (
|
||||
<div className="flex gap-2">
|
||||
<div className="flex flex-1 flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.precision'
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
value={localField.precision ?? ''}
|
||||
type="number"
|
||||
max={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.precision as FieldAttributeRange
|
||||
).max
|
||||
: undefined
|
||||
}
|
||||
min={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.precision as FieldAttributeRange
|
||||
).min
|
||||
: undefined
|
||||
}
|
||||
placeholder={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.precision
|
||||
? `${(dataFieldType?.fieldAttributes?.precision as FieldAttributeRange).default}`
|
||||
: 'Optional'
|
||||
}
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
precision: e.target.value
|
||||
? parseInt(e.target.value)
|
||||
: undefined,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col gap-2">
|
||||
<Label
|
||||
htmlFor="width"
|
||||
className="text-subtitle"
|
||||
>
|
||||
{t(
|
||||
'side_panel.tables_section.table.field_actions.scale'
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
value={localField.scale ?? ''}
|
||||
max={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? (
|
||||
dataFieldType
|
||||
?.fieldAttributes
|
||||
?.scale as FieldAttributeRange
|
||||
).max
|
||||
: undefined
|
||||
}
|
||||
min={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? (
|
||||
findDataTypeDataById(
|
||||
field.type.id
|
||||
)?.fieldAttributes
|
||||
?.scale as FieldAttributeRange
|
||||
).min
|
||||
: undefined
|
||||
}
|
||||
placeholder={
|
||||
dataFieldType?.fieldAttributes
|
||||
?.scale
|
||||
? `${(dataFieldType?.fieldAttributes?.scale as FieldAttributeRange).default}`
|
||||
: 'Optional'
|
||||
}
|
||||
type="number"
|
||||
onChange={(e) =>
|
||||
setLocalField((current) => ({
|
||||
...current,
|
||||
scale: e.target.value
|
||||
? parseInt(e.target.value)
|
||||
: undefined,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-md bg-muted text-sm"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
<div className="flex flex-col gap-2">
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { GripVertical, KeyRound } from 'lucide-react';
|
||||
import { Input } from '@/components/input/input';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import type { DataTypeData } from '@/lib/data/data-types/data-types';
|
||||
import {
|
||||
dataTypeDataToDataType,
|
||||
sortedDataTypeMap,
|
||||
@@ -22,14 +23,62 @@ import type {
|
||||
} from '@/components/select-box/select-box';
|
||||
import { SelectBox } from '@/components/select-box/select-box';
|
||||
import { TableFieldPopover } from './table-field-modal/table-field-modal';
|
||||
import type { DBTable } from '@/lib/domain';
|
||||
|
||||
export interface TableFieldProps {
|
||||
table: DBTable;
|
||||
field: DBField;
|
||||
updateField: (attrs: Partial<DBField>) => void;
|
||||
removeField: () => void;
|
||||
}
|
||||
|
||||
const generateFieldRegexPatterns = (
|
||||
dataType: DataTypeData
|
||||
): {
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
} => {
|
||||
if (!dataType.fieldAttributes) {
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
}
|
||||
|
||||
const typeName = dataType.name;
|
||||
const fieldAttributes = dataType.fieldAttributes;
|
||||
|
||||
if (fieldAttributes.hasCharMaxLength) {
|
||||
if (fieldAttributes.hasCharMaxLengthOption) {
|
||||
return {
|
||||
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
|
||||
extractRegex: /\((\d+|max)\)/i,
|
||||
};
|
||||
}
|
||||
return {
|
||||
regex: `^${typeName}\\(\\d+\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision && fieldAttributes.scale) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
|
||||
extractRegex: new RegExp(
|
||||
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (fieldAttributes.precision) {
|
||||
return {
|
||||
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
|
||||
extractRegex: /\((\d+)\)/,
|
||||
};
|
||||
}
|
||||
|
||||
return { regex: undefined, extractRegex: undefined };
|
||||
};
|
||||
|
||||
export const TableField: React.FC<TableFieldProps> = ({
|
||||
table,
|
||||
field,
|
||||
updateField,
|
||||
removeField,
|
||||
@@ -37,21 +86,30 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
const { databaseType, customTypes } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// Only calculate primary key fields, not just count
|
||||
const primaryKeyFields = useMemo(() => {
|
||||
return table.fields.filter((f) => f.primaryKey);
|
||||
}, [table.fields]);
|
||||
|
||||
const primaryKeyCount = primaryKeyFields.length;
|
||||
|
||||
const { attributes, listeners, setNodeRef, transform, transition } =
|
||||
useSortable({ id: field.id });
|
||||
|
||||
const dataFieldOptions = useMemo(() => {
|
||||
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
|
||||
databaseType
|
||||
].map((type) => ({
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: type.hasCharMaxLength
|
||||
? `^${type.name}\\(\\d+\\)$`
|
||||
: undefined,
|
||||
extractRegex: type.hasCharMaxLength ? /\((\d+)\)/ : undefined,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
}));
|
||||
].map((type) => {
|
||||
const regexPatterns = generateFieldRegexPatterns(type);
|
||||
|
||||
return {
|
||||
label: type.name,
|
||||
value: type.id,
|
||||
regex: regexPatterns.regex,
|
||||
extractRegex: regexPatterns.extractRegex,
|
||||
group: customTypes?.length ? 'Standard Types' : undefined,
|
||||
};
|
||||
});
|
||||
|
||||
if (!customTypes?.length) {
|
||||
return standardTypes;
|
||||
@@ -83,18 +141,44 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
};
|
||||
|
||||
let characterMaximumLength: string | undefined = undefined;
|
||||
let precision: number | undefined = undefined;
|
||||
let scale: number | undefined = undefined;
|
||||
|
||||
if (regexMatches?.length && dataType?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1];
|
||||
} else if (
|
||||
field.characterMaximumLength &&
|
||||
dataType?.hasCharMaxLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
if (regexMatches?.length) {
|
||||
if (dataType?.fieldAttributes?.hasCharMaxLength) {
|
||||
characterMaximumLength = regexMatches[1]?.toLowerCase();
|
||||
} else if (
|
||||
dataType?.fieldAttributes?.precision &&
|
||||
dataType?.fieldAttributes?.scale
|
||||
) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
scale = regexMatches[2]
|
||||
? parseInt(regexMatches[2])
|
||||
: undefined;
|
||||
} else if (dataType?.fieldAttributes?.precision) {
|
||||
precision = parseInt(regexMatches[1]);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
dataType?.fieldAttributes?.hasCharMaxLength &&
|
||||
field.characterMaximumLength
|
||||
) {
|
||||
characterMaximumLength = field.characterMaximumLength;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.precision && field.precision) {
|
||||
precision = field.precision;
|
||||
}
|
||||
|
||||
if (dataType?.fieldAttributes?.scale && field.scale) {
|
||||
scale = field.scale;
|
||||
}
|
||||
}
|
||||
|
||||
updateField({
|
||||
characterMaximumLength,
|
||||
precision,
|
||||
scale,
|
||||
type: dataTypeDataToDataType(
|
||||
dataType ?? {
|
||||
id: value as string,
|
||||
@@ -103,7 +187,13 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
),
|
||||
});
|
||||
},
|
||||
[updateField, databaseType, field.characterMaximumLength]
|
||||
[
|
||||
updateField,
|
||||
databaseType,
|
||||
field.characterMaximumLength,
|
||||
field.precision,
|
||||
field.scale,
|
||||
]
|
||||
);
|
||||
|
||||
const style = {
|
||||
@@ -111,14 +201,50 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
transition,
|
||||
};
|
||||
|
||||
const handlePrimaryKeyToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
if (value) {
|
||||
// When setting as primary key
|
||||
const updates: Partial<DBField> = {
|
||||
primaryKey: true,
|
||||
};
|
||||
// Only auto-set unique if this will be the only primary key
|
||||
if (primaryKeyCount === 0) {
|
||||
updates.unique = true;
|
||||
}
|
||||
updateField(updates);
|
||||
} else {
|
||||
// When removing primary key
|
||||
updateField({
|
||||
primaryKey: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
[primaryKeyCount, updateField]
|
||||
);
|
||||
|
||||
const handleNullableToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
updateField({ nullable: value });
|
||||
},
|
||||
[updateField]
|
||||
);
|
||||
|
||||
const handleNameChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
updateField({ name: e.target.value });
|
||||
},
|
||||
[updateField]
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex flex-1 touch-none flex-row justify-between p-1"
|
||||
className="flex flex-1 touch-none flex-row justify-between gap-2 p-1"
|
||||
ref={setNodeRef}
|
||||
style={style}
|
||||
{...attributes}
|
||||
>
|
||||
<div className="flex w-8/12 items-center justify-start gap-1 overflow-hidden">
|
||||
<div className="flex flex-1 items-center justify-start gap-1 overflow-hidden">
|
||||
<div
|
||||
className="flex w-4 shrink-0 cursor-move items-center justify-center"
|
||||
{...listeners}
|
||||
@@ -127,7 +253,7 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</div>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="w-5/12">
|
||||
<span className="min-w-0 flex-1">
|
||||
<Input
|
||||
className="h-8 w-full !truncate focus-visible:ring-0"
|
||||
type="text"
|
||||
@@ -135,18 +261,14 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
'side_panel.tables_section.table.field_name'
|
||||
)}
|
||||
value={field.name}
|
||||
onChange={(e) =>
|
||||
updateField({
|
||||
name: e.target.value,
|
||||
})
|
||||
}
|
||||
onChange={handleNameChange}
|
||||
/>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{field.name}</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger className="flex h-8 !w-5/12" asChild>
|
||||
<TooltipTrigger className="flex h-8 min-w-0 flex-1" asChild>
|
||||
<span>
|
||||
<SelectBox
|
||||
className="flex h-8 min-h-8 w-full"
|
||||
@@ -156,26 +278,14 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
'side_panel.tables_section.table.field_type'
|
||||
)}
|
||||
value={field.type.id}
|
||||
valueSuffix={
|
||||
field.characterMaximumLength
|
||||
? `(${field.characterMaximumLength})`
|
||||
: ''
|
||||
valueSuffix={generateDBFieldSuffix(field)}
|
||||
optionSuffix={(option) =>
|
||||
generateDBFieldSuffix(field, {
|
||||
databaseType,
|
||||
forceExtended: true,
|
||||
typeId: option.value,
|
||||
})
|
||||
}
|
||||
optionSuffix={(option) => {
|
||||
const type = sortedDataTypeMap[
|
||||
databaseType
|
||||
].find((v) => v.id === option.value);
|
||||
|
||||
if (!type) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (type.hasCharMaxLength) {
|
||||
return `(${!field.characterMaximumLength ? 'n' : field.characterMaximumLength})`;
|
||||
}
|
||||
|
||||
return '';
|
||||
}}
|
||||
onChange={onChangeDataType}
|
||||
emptyPlaceholder={t(
|
||||
'side_panel.tables_section.table.no_types_found'
|
||||
@@ -191,17 +301,13 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="flex w-4/12 justify-end gap-1 overflow-hidden">
|
||||
<div className="flex shrink-0 items-center justify-end gap-1">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={field.nullable}
|
||||
onPressedChange={(value) =>
|
||||
updateField({
|
||||
nullable: value,
|
||||
})
|
||||
}
|
||||
onPressedChange={handleNullableToggle}
|
||||
>
|
||||
N
|
||||
</TableFieldToggle>
|
||||
@@ -216,12 +322,7 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
<span>
|
||||
<TableFieldToggle
|
||||
pressed={field.primaryKey}
|
||||
onPressedChange={(value) =>
|
||||
updateField({
|
||||
unique: value,
|
||||
primaryKey: value,
|
||||
})
|
||||
}
|
||||
onPressedChange={handlePrimaryKeyToggle}
|
||||
>
|
||||
<KeyRound className="h-3.5" />
|
||||
</TableFieldToggle>
|
||||
@@ -233,8 +334,10 @@ export const TableField: React.FC<TableFieldProps> = ({
|
||||
</Tooltip>
|
||||
<TableFieldPopover
|
||||
field={field}
|
||||
table={table}
|
||||
updateField={updateField}
|
||||
removeField={removeField}
|
||||
databaseType={databaseType}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -56,6 +56,32 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
>(['fields']);
|
||||
const sensors = useSensors(useSensor(PointerSensor));
|
||||
|
||||
// Create a memoized version of the field updater that handles primary key logic
|
||||
const handleFieldUpdate = useCallback(
|
||||
(fieldId: string, attrs: Partial<DBField>) => {
|
||||
updateField(table.id, fieldId, attrs);
|
||||
|
||||
// Handle the case when removing a primary key and only one remains
|
||||
if (attrs.primaryKey === false) {
|
||||
const remainingPrimaryKeys = table.fields.filter(
|
||||
(f) => f.id !== fieldId && f.primaryKey
|
||||
);
|
||||
if (remainingPrimaryKeys.length === 1) {
|
||||
// Set the remaining primary key field as unique
|
||||
updateField(
|
||||
table.id,
|
||||
remainingPrimaryKeys[0].id,
|
||||
{
|
||||
unique: true,
|
||||
},
|
||||
{ updateHistory: false }
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[table.id, table.fields, updateField]
|
||||
);
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent) => {
|
||||
const { active, over } = event;
|
||||
|
||||
@@ -147,14 +173,9 @@ export const TableListItemContent: React.FC<TableListItemContentProps> = ({
|
||||
<TableField
|
||||
key={field.id}
|
||||
field={field}
|
||||
updateField={(
|
||||
attrs: Partial<DBField>
|
||||
) =>
|
||||
updateField(
|
||||
table.id,
|
||||
field.id,
|
||||
attrs
|
||||
)
|
||||
table={table}
|
||||
updateField={(attrs) =>
|
||||
handleFieldUpdate(field.id, attrs)
|
||||
}
|
||||
removeField={() =>
|
||||
removeField(table.id, field.id)
|
||||
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { cloneTable } from '@/lib/clone';
|
||||
import type { DBSchema } from '@/lib/domain';
|
||||
import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
|
||||
export interface TableListItemHeaderProps {
|
||||
table: DBTable;
|
||||
@@ -48,12 +49,14 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
}) => {
|
||||
const {
|
||||
updateTable,
|
||||
updateTablesState,
|
||||
removeTable,
|
||||
createIndex,
|
||||
createField,
|
||||
createTable,
|
||||
schemas,
|
||||
filteredSchemas,
|
||||
databaseType,
|
||||
} = useChartDB();
|
||||
const { openTableSchemaDialog } = useDialog();
|
||||
const { t } = useTranslation();
|
||||
@@ -128,9 +131,15 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
|
||||
const updateTableSchema = useCallback(
|
||||
({ schema }: { schema: DBSchema }) => {
|
||||
updateTable(table.id, { schema: schema.name });
|
||||
updateTablesState((currentTables) =>
|
||||
currentTables.map((t) =>
|
||||
t.id === table.id || !t.schema
|
||||
? { ...t, schema: schema.name }
|
||||
: t
|
||||
)
|
||||
);
|
||||
},
|
||||
[table.id, updateTable]
|
||||
[table.id, updateTablesState]
|
||||
);
|
||||
|
||||
const changeSchema = useCallback(() => {
|
||||
@@ -138,6 +147,7 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
table,
|
||||
schemas,
|
||||
onConfirm: updateTableSchema,
|
||||
allowSchemaCreation: true,
|
||||
});
|
||||
}, [openTableSchemaDialog, table, schemas, updateTableSchema]);
|
||||
|
||||
@@ -170,7 +180,7 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
)}
|
||||
</DropdownMenuLabel>
|
||||
<DropdownMenuSeparator />
|
||||
{schemas.length > 0 ? (
|
||||
{schemas.length > 0 || defaultSchemas?.[databaseType] ? (
|
||||
<>
|
||||
<DropdownMenuGroup>
|
||||
<DropdownMenuItem
|
||||
@@ -251,6 +261,7 @@ export const TableListItemHeader: React.FC<TableListItemHeaderProps> = ({
|
||||
t,
|
||||
changeSchema,
|
||||
schemas.length,
|
||||
databaseType,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -57,6 +57,8 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
setScrollAction,
|
||||
setShowCardinality,
|
||||
showCardinality,
|
||||
setShowFieldAttributes,
|
||||
showFieldAttributes,
|
||||
setShowDependenciesOnCanvas,
|
||||
showDependenciesOnCanvas,
|
||||
setShowMiniMapOnCanvas,
|
||||
@@ -137,6 +139,10 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
setShowCardinality(!showCardinality);
|
||||
}, [showCardinality, setShowCardinality]);
|
||||
|
||||
const showOrHideFieldAttributes = useCallback(() => {
|
||||
setShowFieldAttributes(!showFieldAttributes);
|
||||
}, [showFieldAttributes, setShowFieldAttributes]);
|
||||
|
||||
const showOrHideDependencies = useCallback(() => {
|
||||
setShowDependenciesOnCanvas(!showDependenciesOnCanvas);
|
||||
}, [showDependenciesOnCanvas, setShowDependenciesOnCanvas]);
|
||||
@@ -424,6 +430,11 @@ export const Menu: React.FC<MenuProps> = () => {
|
||||
? t('menu.view.hide_cardinality')
|
||||
: t('menu.view.show_cardinality')}
|
||||
</MenubarItem>
|
||||
<MenubarItem onClick={showOrHideFieldAttributes}>
|
||||
{showFieldAttributes
|
||||
? t('menu.view.hide_field_attributes')
|
||||
: t('menu.view.show_field_attributes')}
|
||||
</MenubarItem>
|
||||
{databaseType !== DatabaseType.CLICKHOUSE &&
|
||||
dependencies &&
|
||||
dependencies.length > 0 ? (
|
||||
|
||||
Reference in New Issue
Block a user