mirror of
https://github.com/chartdb/chartdb.git
synced 2025-11-03 21:43:23 +00:00
Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8102f19f79 | ||
|
|
840a00ebcd | ||
|
|
181f96d250 | ||
|
|
ce2389f135 | ||
|
|
f15dc77f33 | ||
|
|
caa81c24a6 | ||
|
|
e3cb62788c | ||
|
|
fc46cbb893 | ||
|
|
d94a71e9e1 | ||
|
|
cf81253535 | ||
|
|
25c4b42538 | ||
|
|
f7a6e0cb5e | ||
|
|
85275e5dd6 | ||
|
|
4e5b467ce5 | ||
|
|
874aa5ab75 | ||
|
|
0940d72d5d | ||
|
|
0d1739d70f | ||
|
|
60fe0843ac | ||
|
|
794f226209 | ||
|
|
2fbf3476b8 | ||
|
|
897ac60a82 | ||
|
|
18f228ca1d | ||
|
|
14de30b7aa | ||
|
|
3faa39e787 | ||
|
|
63b5ba0bb9 | ||
|
|
44eac7daff | ||
|
|
502472b083 | ||
|
|
52d2ea596c | ||
|
|
bd67ccfbcf | ||
|
|
62beb68fa1 | ||
|
|
09b1275475 | ||
|
|
5dd7fe75d1 | ||
|
|
2939320a15 | ||
|
|
a643852837 |
11
.github/workflows/publish.yaml
vendored
11
.github/workflows/publish.yaml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
@@ -42,6 +42,12 @@ jobs:
|
||||
- name: Build project
|
||||
run: npm run build
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
@@ -50,10 +56,11 @@ jobs:
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
|
||||
- name: Build and push Docker image
|
||||
- name: Build and push multi-arch Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
53
CHANGELOG.md
53
CHANGELOG.md
@@ -1,5 +1,58 @@
|
||||
# Changelog
|
||||
|
||||
## [1.11.0](https://github.com/chartdb/chartdb/compare/v1.10.0...v1.11.0) (2025-04-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add sidebar footer help buttons ([#650](https://github.com/chartdb/chartdb/issues/650)) ([fc46cbb](https://github.com/chartdb/chartdb/commit/fc46cbb8933761c7bac3604664f7de812f6f5b6b))
|
||||
* **import-sql:** import postgresql via SQL (DDL script) ([#639](https://github.com/chartdb/chartdb/issues/639)) ([f7a6e0c](https://github.com/chartdb/chartdb/commit/f7a6e0cb5e4921dd9540739f9da269858e7ca7be))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **import:** display query result formatted ([#644](https://github.com/chartdb/chartdb/issues/644)) ([caa81c2](https://github.com/chartdb/chartdb/commit/caa81c24a6535bc87129c38622aac5a62a6d479d))
|
||||
* **import:** strict parse of database metadata ([#635](https://github.com/chartdb/chartdb/issues/635)) ([0940d72](https://github.com/chartdb/chartdb/commit/0940d72d5d3726650213257639f24ba47e729854))
|
||||
* **mobile:** fix create diagram modal on mobile ([#646](https://github.com/chartdb/chartdb/issues/646)) ([25c4b42](https://github.com/chartdb/chartdb/commit/25c4b4253849575d7a781ed197281e2a35e7184a))
|
||||
* **mysql-ddl:** update the script to import - for create fks ([#642](https://github.com/chartdb/chartdb/issues/642)) ([cf81253](https://github.com/chartdb/chartdb/commit/cf81253535ca5a3b8a65add78287c1bdb283a1c7))
|
||||
* **performance:** Import deps dynamically ([#652](https://github.com/chartdb/chartdb/issues/652)) ([e3cb627](https://github.com/chartdb/chartdb/commit/e3cb62788c13f149e35e1a5020191bd43d14b52f))
|
||||
* remove unused links from help menu ([#623](https://github.com/chartdb/chartdb/issues/623)) ([85275e5](https://github.com/chartdb/chartdb/commit/85275e5dd6e7845f06f682eeceda7932fc87e875))
|
||||
* **sidebar:** turn sidebar to responsive for mobile ([#658](https://github.com/chartdb/chartdb/issues/658)) ([ce2389f](https://github.com/chartdb/chartdb/commit/ce2389f135d399d82c9848335d31174bac8a3791))
|
||||
|
||||
## [1.10.0](https://github.com/chartdb/chartdb/compare/v1.9.0...v1.10.0) (2025-03-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **cloudflare-d1:** add support to cloudflare-d1 + wrangler cli ([#632](https://github.com/chartdb/chartdb/issues/632)) ([794f226](https://github.com/chartdb/chartdb/commit/794f2262092fbe36e27e92220221ed98cb51ae37))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **dbml-editor:** dealing with dbml editor for non-generic db-type ([#624](https://github.com/chartdb/chartdb/issues/624)) ([14de30b](https://github.com/chartdb/chartdb/commit/14de30b7aaa0ccaca8372f0213b692266d53f0de))
|
||||
* **export-sql:** move from AI sql-export for MySQL&MariaDB to deterministic script ([#628](https://github.com/chartdb/chartdb/issues/628)) ([2fbf347](https://github.com/chartdb/chartdb/commit/2fbf3476b87f1177af17de8242a74d195dae5f35))
|
||||
* **export-sql:** move from AI sql-export for postgres to deterministic script ([#626](https://github.com/chartdb/chartdb/issues/626)) ([18f228c](https://github.com/chartdb/chartdb/commit/18f228ca1d5a6c6056cb7c3bfc24d04ec470edf1))
|
||||
* **export-sql:** move from AI sql-export for sqlite to deterministic script ([#627](https://github.com/chartdb/chartdb/issues/627)) ([897ac60](https://github.com/chartdb/chartdb/commit/897ac60a829a00e9453d670cceeb2282e9e93f1c))
|
||||
* **sidebar:** add sidebar for diagram objects ([#618](https://github.com/chartdb/chartdb/issues/618)) ([63b5ba0](https://github.com/chartdb/chartdb/commit/63b5ba0bb9934c4e5c5d0d1b6f995afbbd3acf36))
|
||||
* **sidebar:** opens sidepanel in case its closed and click on sidebar ([#620](https://github.com/chartdb/chartdb/issues/620)) ([3faa39e](https://github.com/chartdb/chartdb/commit/3faa39e7875d836dfe526d94a10f8aed070ac1c1))
|
||||
|
||||
## [1.9.0](https://github.com/chartdb/chartdb/compare/v1.8.1...v1.9.0) (2025-03-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **canvas:** highlight the Show-All button when No-Tables are visible in the canvas ([#612](https://github.com/chartdb/chartdb/issues/612)) ([62beb68](https://github.com/chartdb/chartdb/commit/62beb68fa1ec22ccd4fe5e59a8ceb9d3e8f6d374))
|
||||
* **chart max length:** add support for edit char max length ([#613](https://github.com/chartdb/chartdb/issues/613)) ([09b1275](https://github.com/chartdb/chartdb/commit/09b12754757b9625ca287d91a92cf0d83c9e2b89))
|
||||
* **chart max length:** enable edit length from data type select box ([#616](https://github.com/chartdb/chartdb/issues/616)) ([bd67ccf](https://github.com/chartdb/chartdb/commit/bd67ccfbcf66b919453ca6c0bfd71e16772b3d8e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **cardinality:** set true as default ([#583](https://github.com/chartdb/chartdb/issues/583)) ([2939320](https://github.com/chartdb/chartdb/commit/2939320a15a9ccd9eccfe46c26e04ca1edca2420))
|
||||
* **performance:** Optimize performance of field comments editing ([#610](https://github.com/chartdb/chartdb/issues/610)) ([5dd7fe7](https://github.com/chartdb/chartdb/commit/5dd7fe75d1b0378ba406c75183c5e2356730c3b4))
|
||||
* remove Buckle dialog ([#617](https://github.com/chartdb/chartdb/issues/617)) ([502472b](https://github.com/chartdb/chartdb/commit/502472b08342be425e66e2b6c94e5fe37ba14aa9))
|
||||
* **shorcuts:** add shortcut to toggle the theme ([#602](https://github.com/chartdb/chartdb/issues/602)) ([a643852](https://github.com/chartdb/chartdb/commit/a6438528375ab54d3ec7d80ac6b6ddd65ea8cf1e))
|
||||
|
||||
## [1.8.1](https://github.com/chartdb/chartdb/compare/v1.8.0...v1.8.1) (2025-03-02)
|
||||
|
||||
|
||||
|
||||
40
README.md
40
README.md
@@ -30,8 +30,8 @@
|
||||
<a href="https://discord.gg/QeFwyWSKwC">
|
||||
<img src="https://img.shields.io/discord/1277047413705670678?color=5865F2&label=Discord&logo=discord&logoColor=white" alt="Discord community channel" />
|
||||
</a>
|
||||
<a href="https://x.com/chartdb_io">
|
||||
<img src="https://img.shields.io/twitter/follow/ChartDB?style=social"/>
|
||||
<a href="https://x.com/intent/follow?screen_name=jonathanfishner">
|
||||
<img src="https://img.shields.io/twitter/follow/jonathanfishner?style=social"/>
|
||||
</a>
|
||||
|
||||
</h4>
|
||||
@@ -49,13 +49,13 @@ Instantly visualize your database schema with a single **"Smart Query."** Custom
|
||||
|
||||
**What it does**:
|
||||
|
||||
- **Instant Schema Import**
|
||||
Run a single query to instantly retrieve your database schema as JSON. This makes it incredibly fast to visualize your database schema, whether for documentation, team discussions, or simply understanding your data better.
|
||||
- **Instant Schema Import**
|
||||
Run a single query to instantly retrieve your database schema as JSON. This makes it incredibly fast to visualize your database schema, whether for documentation, team discussions, or simply understanding your data better.
|
||||
|
||||
- **AI-Powered Export for Easy Migration**
|
||||
Our AI-driven export feature allows you to generate the DDL script in the dialect of your choice. Whether you’re migrating from MySQL to PostgreSQL or from SQLite to MariaDB, ChartDB simplifies the process by providing the necessary scripts tailored to your target database.
|
||||
- **Interactive Editing**
|
||||
Fine-tune your database schema using our intuitive editor. Easily make adjustments or annotations to better visualize complex structures.
|
||||
- **AI-Powered Export for Easy Migration**
|
||||
Our AI-driven export feature allows you to generate the DDL script in the dialect of your choice. Whether you're migrating from MySQL to PostgreSQL or from SQLite to MariaDB, ChartDB simplifies the process by providing the necessary scripts tailored to your target database.
|
||||
- **Interactive Editing**
|
||||
Fine-tune your database schema using our intuitive editor. Easily make adjustments or annotations to better visualize complex structures.
|
||||
|
||||
### Status
|
||||
|
||||
@@ -63,13 +63,13 @@ ChartDB is currently in Public Beta. Star and watch this repository to get notif
|
||||
|
||||
### Supported Databases
|
||||
|
||||
- ✅ PostgreSQL (<img src="./src/assets/postgresql_logo_2.png" width="15"/> + <img src="./src/assets/supabase.png" alt="Supabase" width="15"/> + <img src="./src/assets/timescale.png" alt="Timescale" width="15"/> )
|
||||
- ✅ MySQL
|
||||
- ✅ SQL Server
|
||||
- ✅ MariaDB
|
||||
- ✅ SQLite
|
||||
- ✅ CockroachDB
|
||||
- ✅ ClickHouse
|
||||
- ✅ PostgreSQL (<img src="./src/assets/postgresql_logo_2.png" width="15"/> + <img src="./src/assets/supabase.png" alt="Supabase" width="15"/> + <img src="./src/assets/timescale.png" alt="Timescale" width="15"/> )
|
||||
- ✅ MySQL
|
||||
- ✅ SQL Server
|
||||
- ✅ MariaDB
|
||||
- ✅ SQLite (<img src="./src/assets/sqlite_logo_2.png" width="15"/> + <img src="./src/assets/cloudflare_d1.png" alt="Cloudflare D1" width="15"/> Cloudflare D1)
|
||||
- ✅ CockroachDB
|
||||
- ✅ ClickHouse
|
||||
|
||||
## Getting Started
|
||||
|
||||
@@ -91,17 +91,19 @@ npm run build
|
||||
|
||||
Or like this if you want to have AI capabilities:
|
||||
|
||||
```
|
||||
```bash
|
||||
npm install
|
||||
VITE_OPENAI_API_KEY=<YOUR_OPEN_AI_KEY> npm run build
|
||||
```
|
||||
|
||||
### Run the Docker Container
|
||||
|
||||
```bash
|
||||
docker run -e OPENAI_API_KEY=<YOUR_OPEN_AI_KEY> -p 8080:80 ghcr.io/chartdb/chartdb:latest
|
||||
```
|
||||
|
||||
#### Build and Run locally
|
||||
|
||||
```bash
|
||||
docker build -t chartdb .
|
||||
docker run -e OPENAI_API_KEY=<YOUR_OPEN_AI_KEY> -p 8080:80 chartdb
|
||||
@@ -145,9 +147,9 @@ VITE_LLM_MODEL_NAME=Qwen/Qwen2.5-32B-Instruct-AWQ
|
||||
|
||||
## 💚 Community & Support
|
||||
|
||||
- [Discord](https://discord.gg/QeFwyWSKwC) (For live discussion with the community and the ChartDB team)
|
||||
- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
|
||||
- [Twitter](https://x.com/chartdb_io) (Get news fast)
|
||||
- [Discord](https://discord.gg/QeFwyWSKwC) (For live discussion with the community and the ChartDB team)
|
||||
- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
|
||||
- [Twitter](https://x.com/intent/follow?screen_name=jonathanfishner) (Get news fast)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
482
package-lock.json
generated
482
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"version": "1.8.1",
|
||||
"version": "1.11.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "chartdb",
|
||||
"version": "1.8.1",
|
||||
"version": "1.11.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
"@dbml/core": "^3.9.5",
|
||||
@@ -18,7 +18,7 @@
|
||||
"@radix-ui/react-checkbox": "^1.1.1",
|
||||
"@radix-ui/react-collapsible": "^1.1.0",
|
||||
"@radix-ui/react-context-menu": "^2.2.1",
|
||||
"@radix-ui/react-dialog": "^1.1.1",
|
||||
"@radix-ui/react-dialog": "^1.1.6",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.1",
|
||||
"@radix-ui/react-hover-card": "^1.1.1",
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
@@ -27,18 +27,18 @@
|
||||
"@radix-ui/react-popover": "^1.1.1",
|
||||
"@radix-ui/react-scroll-area": "1.2.0",
|
||||
"@radix-ui/react-select": "^2.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.0",
|
||||
"@radix-ui/react-slot": "^1.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.2",
|
||||
"@radix-ui/react-slot": "^1.1.2",
|
||||
"@radix-ui/react-tabs": "^1.1.0",
|
||||
"@radix-ui/react-toast": "^1.2.1",
|
||||
"@radix-ui/react-toggle": "^1.1.0",
|
||||
"@radix-ui/react-toggle-group": "^1.1.0",
|
||||
"@radix-ui/react-tooltip": "^1.1.2",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@uidotdev/usehooks": "^2.4.1",
|
||||
"@xyflow/react": "^12.3.1",
|
||||
"ahooks": "^3.8.1",
|
||||
"ai": "^3.3.14",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.0.0",
|
||||
"dexie": "^4.0.8",
|
||||
@@ -1834,6 +1834,60 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-alert-dialog/node_modules/@radix-ui/react-dialog": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.5.tgz",
|
||||
"integrity": "sha512-LaO3e5h/NOEL4OfXjxD43k9Dx+vn+8n+PCFt6uhX/BADFflllyv3WJG6rgvvSVBxpTch938Qq/LGc2MMxipXPw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.1",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-dismissable-layer": "1.1.4",
|
||||
"@radix-ui/react-focus-guards": "1.1.1",
|
||||
"@radix-ui/react-focus-scope": "1.1.1",
|
||||
"@radix-ui/react-id": "1.1.0",
|
||||
"@radix-ui/react-portal": "1.1.3",
|
||||
"@radix-ui/react-presence": "1.1.2",
|
||||
"@radix-ui/react-primitive": "2.0.1",
|
||||
"@radix-ui/react-slot": "1.1.1",
|
||||
"@radix-ui/react-use-controllable-state": "1.1.0",
|
||||
"aria-hidden": "^1.2.4",
|
||||
"react-remove-scroll": "^2.6.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-alert-dialog/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-arrow": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.1.tgz",
|
||||
@@ -1969,6 +2023,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-compose-refs": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.1.tgz",
|
||||
@@ -2028,25 +2100,124 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-dialog": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.5.tgz",
|
||||
"integrity": "sha512-LaO3e5h/NOEL4OfXjxD43k9Dx+vn+8n+PCFt6uhX/BADFflllyv3WJG6rgvvSVBxpTch938Qq/LGc2MMxipXPw==",
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.6.tgz",
|
||||
"integrity": "sha512-/IVhJV5AceX620DUJ4uYVMymzsipdKBzo3edo+omeskCKGm9FRHM0ebIdbPnlQVJqyuHbuBltQUOG2mOTq2IYw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.1",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-dismissable-layer": "1.1.4",
|
||||
"@radix-ui/react-dismissable-layer": "1.1.5",
|
||||
"@radix-ui/react-focus-guards": "1.1.1",
|
||||
"@radix-ui/react-focus-scope": "1.1.1",
|
||||
"@radix-ui/react-focus-scope": "1.1.2",
|
||||
"@radix-ui/react-id": "1.1.0",
|
||||
"@radix-ui/react-portal": "1.1.3",
|
||||
"@radix-ui/react-portal": "1.1.4",
|
||||
"@radix-ui/react-presence": "1.1.2",
|
||||
"@radix-ui/react-primitive": "2.0.1",
|
||||
"@radix-ui/react-slot": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-slot": "1.1.2",
|
||||
"@radix-ui/react-use-controllable-state": "1.1.0",
|
||||
"aria-hidden": "^1.2.4",
|
||||
"react-remove-scroll": "^2.6.2"
|
||||
"react-remove-scroll": "^2.6.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.5.tgz",
|
||||
"integrity": "sha512-E4TywXY6UsXNRhFrECa5HAvE5/4BFcGyfTyK36gP+pAW1ed7UTK4vKwdr53gAJYwqbfCWC6ATvJa3J3R/9+Qrg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.1",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-callback-ref": "1.1.0",
|
||||
"@radix-ui/react-use-escape-keydown": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-scope": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.2.tgz",
|
||||
"integrity": "sha512-zxwE80FCU7lcXUGWkdt6XpTTCKPitG1XKOwViTxHVKIJhZl9MvIl2dVHeZENCWD9+EdWv05wlaEkRXUykU27RA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-callback-ref": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-portal": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.4.tgz",
|
||||
"integrity": "sha512-sn2O9k1rPFYVyKd5LAJfo96JlSGVFpa1fS6UuBJfrZadudiw5tAmru+n1x7aMRQ84qDM71Zh1+SzK5QwU0tJfA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-layout-effect": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-primitive": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.2.tgz",
|
||||
"integrity": "sha512-Ec/0d38EIuvDF+GZjcMU/Ze6MxntVJYO/fRlCPhCaVUyPY9WTalHJw54tp9sXeJo3tlShWpy41vQRgLRGOuz+w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-slot": "1.1.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
@@ -2295,6 +2466,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-menubar": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.1.5.tgz",
|
||||
@@ -2364,6 +2553,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-popper": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.1.tgz",
|
||||
@@ -2467,6 +2674,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-roving-focus": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.1.tgz",
|
||||
@@ -2658,13 +2883,54 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-separator": {
|
||||
"node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.1.tgz",
|
||||
"integrity": "sha512-RRiNRSrD8iUiXriq/Y5n4/3iE8HzqgLHsusUSg5jVpU2+3tqcUFPJXHDymwEypunc2sWxDUS3UC+rkZRlHedsw==",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.1"
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-separator": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.2.tgz",
|
||||
"integrity": "sha512-oZfHcaAp2Y6KFBX6I5P1u7CQoy4lheCGiYj+pGFrHy8E/VNRb5E39TkTr3JrV520csPBTZjkuKFdEsjS5EUNKQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-separator/node_modules/@radix-ui/react-primitive": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.2.tgz",
|
||||
"integrity": "sha512-Ec/0d38EIuvDF+GZjcMU/Ze6MxntVJYO/fRlCPhCaVUyPY9WTalHJw54tp9sXeJo3tlShWpy41vQRgLRGOuz+w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-slot": "1.1.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
@@ -2682,9 +2948,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-slot": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
|
||||
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.2.tgz",
|
||||
"integrity": "sha512-YAKxaiGsSQJ38VzKH86/BPRC4rh+b1Jpa+JneA5LRE7skmLPNAyeG8kPJj/oo4STLvlrs8vkf/iYyc3A5stYCQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-compose-refs": "1.1.1"
|
||||
@@ -2818,23 +3084,175 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip": {
|
||||
"version": "1.1.7",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.7.tgz",
|
||||
"integrity": "sha512-ss0s80BC0+g0+Zc53MvilcnTYSOi4mSuFWBPYPuTOFGjx+pUU+ZrmamMNwS56t8MTFlniA5ocjd4jYm/CdhbOg==",
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.8.tgz",
|
||||
"integrity": "sha512-YAA2cu48EkJZdAMHC0dqo9kialOcRStbtiY4nJPaht7Ptrhcvpo+eDChaM6BIs8kL6a8Z5l5poiqLnXcNduOkA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.1",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-dismissable-layer": "1.1.4",
|
||||
"@radix-ui/react-dismissable-layer": "1.1.5",
|
||||
"@radix-ui/react-id": "1.1.0",
|
||||
"@radix-ui/react-popper": "1.2.1",
|
||||
"@radix-ui/react-portal": "1.1.3",
|
||||
"@radix-ui/react-popper": "1.2.2",
|
||||
"@radix-ui/react-portal": "1.1.4",
|
||||
"@radix-ui/react-presence": "1.1.2",
|
||||
"@radix-ui/react-primitive": "2.0.1",
|
||||
"@radix-ui/react-slot": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-slot": "1.1.2",
|
||||
"@radix-ui/react-use-controllable-state": "1.1.0",
|
||||
"@radix-ui/react-visually-hidden": "1.1.1"
|
||||
"@radix-ui/react-visually-hidden": "1.1.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-arrow": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.2.tgz",
|
||||
"integrity": "sha512-G+KcpzXHq24iH0uGG/pF8LyzpFJYGD4RfLjCIBfGdSLXvjLHST31RUiRVrupIBMvIppMgSzQ6l66iAxl03tdlg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-dismissable-layer": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.5.tgz",
|
||||
"integrity": "sha512-E4TywXY6UsXNRhFrECa5HAvE5/4BFcGyfTyK36gP+pAW1ed7UTK4vKwdr53gAJYwqbfCWC6ATvJa3J3R/9+Qrg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.1",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-callback-ref": "1.1.0",
|
||||
"@radix-ui/react-use-escape-keydown": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-popper": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.2.tgz",
|
||||
"integrity": "sha512-Rvqc3nOpwseCyj/rgjlJDYAgyfw7OC1tTkKn2ivhaMGcYt8FSBlahHOZak2i3QwkRXUXgGgzeEe2RuqeEHuHgA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@floating-ui/react-dom": "^2.0.0",
|
||||
"@radix-ui/react-arrow": "1.1.2",
|
||||
"@radix-ui/react-compose-refs": "1.1.1",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-callback-ref": "1.1.0",
|
||||
"@radix-ui/react-use-layout-effect": "1.1.0",
|
||||
"@radix-ui/react-use-rect": "1.1.0",
|
||||
"@radix-ui/react-use-size": "1.1.0",
|
||||
"@radix-ui/rect": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-portal": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.4.tgz",
|
||||
"integrity": "sha512-sn2O9k1rPFYVyKd5LAJfo96JlSGVFpa1fS6UuBJfrZadudiw5tAmru+n1x7aMRQ84qDM71Zh1+SzK5QwU0tJfA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.2",
|
||||
"@radix-ui/react-use-layout-effect": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-primitive": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.2.tgz",
|
||||
"integrity": "sha512-Ec/0d38EIuvDF+GZjcMU/Ze6MxntVJYO/fRlCPhCaVUyPY9WTalHJw54tp9sXeJo3tlShWpy41vQRgLRGOuz+w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-slot": "1.1.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-visually-hidden": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.2.tgz",
|
||||
"integrity": "sha512-1SzA4ns2M1aRlvxErqhLHsBHoS5eI5UUcI2awAMgGUp4LoaoWOKYmvqDY2s/tltuPkh3Yk77YF/r3IRj+Amx4Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/react-primitive": "2.0.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
|
||||
12
package.json
12
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chartdb",
|
||||
"private": true,
|
||||
"version": "1.8.1",
|
||||
"version": "1.11.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -22,7 +22,7 @@
|
||||
"@radix-ui/react-checkbox": "^1.1.1",
|
||||
"@radix-ui/react-collapsible": "^1.1.0",
|
||||
"@radix-ui/react-context-menu": "^2.2.1",
|
||||
"@radix-ui/react-dialog": "^1.1.1",
|
||||
"@radix-ui/react-dialog": "^1.1.6",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.1",
|
||||
"@radix-ui/react-hover-card": "^1.1.1",
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
@@ -31,18 +31,18 @@
|
||||
"@radix-ui/react-popover": "^1.1.1",
|
||||
"@radix-ui/react-scroll-area": "1.2.0",
|
||||
"@radix-ui/react-select": "^2.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.0",
|
||||
"@radix-ui/react-slot": "^1.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.2",
|
||||
"@radix-ui/react-slot": "^1.1.2",
|
||||
"@radix-ui/react-tabs": "^1.1.0",
|
||||
"@radix-ui/react-toast": "^1.2.1",
|
||||
"@radix-ui/react-toggle": "^1.1.0",
|
||||
"@radix-ui/react-toggle-group": "^1.1.0",
|
||||
"@radix-ui/react-tooltip": "^1.1.2",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@uidotdev/usehooks": "^2.4.1",
|
||||
"@xyflow/react": "^12.3.1",
|
||||
"ahooks": "^3.8.1",
|
||||
"ai": "^3.3.14",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.0.0",
|
||||
"dexie": "^4.0.8",
|
||||
|
||||
BIN
src/assets/cloudflare_d1.png
Normal file
BIN
src/assets/cloudflare_d1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 937 B |
@@ -1,2 +1,3 @@
|
||||
import './config.ts';
|
||||
export { Editor } from '@monaco-editor/react';
|
||||
export { DiffEditor } from '@monaco-editor/react';
|
||||
|
||||
@@ -18,11 +18,18 @@ export const Editor = lazy(() =>
|
||||
}))
|
||||
);
|
||||
|
||||
export const DiffEditor = lazy(() =>
|
||||
import('./code-editor').then((module) => ({
|
||||
default: module.DiffEditor,
|
||||
}))
|
||||
);
|
||||
|
||||
type EditorType = typeof Editor;
|
||||
|
||||
export interface CodeSnippetProps {
|
||||
className?: string;
|
||||
code: string;
|
||||
codeToCopy?: string;
|
||||
language?: 'sql' | 'shell';
|
||||
loading?: boolean;
|
||||
autoScroll?: boolean;
|
||||
@@ -34,6 +41,7 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
({
|
||||
className,
|
||||
code,
|
||||
codeToCopy,
|
||||
loading,
|
||||
language = 'sql',
|
||||
autoScroll = false,
|
||||
@@ -85,7 +93,7 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
}
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(code);
|
||||
await navigator.clipboard.writeText(codeToCopy ?? code);
|
||||
setIsCopied(true);
|
||||
} catch {
|
||||
setIsCopied(false);
|
||||
@@ -97,7 +105,7 @@ export const CodeSnippet: React.FC<CodeSnippetProps> = React.memo(
|
||||
),
|
||||
});
|
||||
}
|
||||
}, [code, t, toast]);
|
||||
}, [code, codeToCopy, t, toast]);
|
||||
|
||||
return (
|
||||
<div
|
||||
|
||||
@@ -24,12 +24,19 @@ export interface SelectBoxOption {
|
||||
value: string;
|
||||
label: string;
|
||||
description?: string;
|
||||
regex?: string;
|
||||
extractRegex?: RegExp;
|
||||
}
|
||||
|
||||
export interface SelectBoxProps {
|
||||
options: SelectBoxOption[];
|
||||
value?: string[] | string;
|
||||
onChange?: (values: string[] | string) => void;
|
||||
valueSuffix?: string;
|
||||
optionSuffix?: (option: SelectBoxOption) => string;
|
||||
onChange?: (
|
||||
values: string[] | string,
|
||||
regexMatches?: string[] | string
|
||||
) => void;
|
||||
placeholder?: string;
|
||||
inputPlaceholder?: string;
|
||||
emptyPlaceholder?: string;
|
||||
@@ -55,10 +62,12 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
className,
|
||||
options,
|
||||
value,
|
||||
valueSuffix,
|
||||
onChange,
|
||||
multiple,
|
||||
oneLine,
|
||||
selectAll,
|
||||
optionSuffix,
|
||||
deselectAll,
|
||||
clearText,
|
||||
showClear,
|
||||
@@ -86,7 +95,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
);
|
||||
|
||||
const handleSelect = React.useCallback(
|
||||
(selectedValue: string) => {
|
||||
(selectedValue: string, regexMatches?: string[]) => {
|
||||
if (multiple) {
|
||||
const newValue =
|
||||
value?.includes(selectedValue) && Array.isArray(value)
|
||||
@@ -94,7 +103,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
: [...(value ?? []), selectedValue];
|
||||
onChange?.(newValue);
|
||||
} else {
|
||||
onChange?.(selectedValue);
|
||||
onChange?.(selectedValue, regexMatches);
|
||||
setIsOpen(false);
|
||||
}
|
||||
},
|
||||
@@ -199,6 +208,7 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
(opt) => opt.value === value
|
||||
)?.label
|
||||
}
|
||||
{valueSuffix ? valueSuffix : ''}
|
||||
</div>
|
||||
)
|
||||
) : (
|
||||
@@ -239,11 +249,22 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
align="center"
|
||||
>
|
||||
<Command
|
||||
filter={(value, search) =>
|
||||
value.toLowerCase().includes(search.toLowerCase())
|
||||
filter={(value, search, keywords) => {
|
||||
if (
|
||||
keywords?.length &&
|
||||
keywords.some((keyword) =>
|
||||
new RegExp(keyword).test(search)
|
||||
)
|
||||
) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return value
|
||||
.toLowerCase()
|
||||
.includes(search.toLowerCase())
|
||||
? 1
|
||||
: 0
|
||||
}
|
||||
: 0;
|
||||
}}
|
||||
>
|
||||
<div className="relative">
|
||||
<CommandInput
|
||||
@@ -302,14 +323,36 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
const isSelected =
|
||||
Array.isArray(value) &&
|
||||
value.includes(option.value);
|
||||
|
||||
const isRegexMatch =
|
||||
option.regex &&
|
||||
new RegExp(option.regex)?.test(
|
||||
searchTerm
|
||||
);
|
||||
|
||||
const matches = option.extractRegex
|
||||
? searchTerm.match(
|
||||
option.extractRegex
|
||||
)
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<CommandItem
|
||||
className="flex items-center"
|
||||
key={option.value}
|
||||
keywords={
|
||||
option.regex
|
||||
? [option.regex]
|
||||
: undefined
|
||||
}
|
||||
// value={option.value}
|
||||
onSelect={() =>
|
||||
handleSelect(
|
||||
option.value
|
||||
option.value,
|
||||
matches?.map(
|
||||
(match) =>
|
||||
match.toString()
|
||||
)
|
||||
)
|
||||
}
|
||||
>
|
||||
@@ -327,7 +370,15 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
)}
|
||||
<div className="flex items-center truncate">
|
||||
<span>
|
||||
{option.label}
|
||||
{isRegexMatch
|
||||
? searchTerm
|
||||
: option.label}
|
||||
{!isRegexMatch &&
|
||||
optionSuffix
|
||||
? optionSuffix(
|
||||
option
|
||||
)
|
||||
: ''}
|
||||
</span>
|
||||
{option.description && (
|
||||
<span className="ml-1 text-xs text-muted-foreground">
|
||||
@@ -337,19 +388,20 @@ export const SelectBox = React.forwardRef<HTMLInputElement, SelectBoxProps>(
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
{!multiple &&
|
||||
{((!multiple &&
|
||||
option.value ===
|
||||
value && (
|
||||
<CheckIcon
|
||||
className={cn(
|
||||
'ml-auto',
|
||||
option.value ===
|
||||
value
|
||||
? 'opacity-100'
|
||||
: 'opacity-0'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
value) ||
|
||||
isRegexMatch) && (
|
||||
<CheckIcon
|
||||
className={cn(
|
||||
'ml-auto',
|
||||
option.value ===
|
||||
value
|
||||
? 'opacity-100'
|
||||
: 'opacity-0'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</CommandItem>
|
||||
);
|
||||
})}
|
||||
|
||||
135
src/components/sheet/sheet.tsx
Normal file
135
src/components/sheet/sheet.tsx
Normal file
@@ -0,0 +1,135 @@
|
||||
import * as React from 'react';
|
||||
import * as SheetPrimitive from '@radix-ui/react-dialog';
|
||||
import { cva, type VariantProps } from 'class-variance-authority';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Cross2Icon } from '@radix-ui/react-icons';
|
||||
|
||||
const Sheet = SheetPrimitive.Root;
|
||||
|
||||
const SheetTrigger = SheetPrimitive.Trigger;
|
||||
|
||||
const SheetClose = SheetPrimitive.Close;
|
||||
|
||||
const SheetPortal = SheetPrimitive.Portal;
|
||||
|
||||
const SheetOverlay = React.forwardRef<
|
||||
React.ElementRef<typeof SheetPrimitive.Overlay>,
|
||||
React.ComponentPropsWithoutRef<typeof SheetPrimitive.Overlay>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SheetPrimitive.Overlay
|
||||
className={cn(
|
||||
'fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
ref={ref}
|
||||
/>
|
||||
));
|
||||
SheetOverlay.displayName = SheetPrimitive.Overlay.displayName;
|
||||
|
||||
const sheetVariants = cva(
|
||||
'fixed z-50 gap-4 bg-background p-6 shadow-lg transition ease-in-out data-[state=closed]:duration-300 data-[state=open]:duration-500 data-[state=open]:animate-in data-[state=closed]:animate-out',
|
||||
{
|
||||
variants: {
|
||||
side: {
|
||||
top: 'inset-x-0 top-0 border-b data-[state=closed]:slide-out-to-top data-[state=open]:slide-in-from-top',
|
||||
bottom: 'inset-x-0 bottom-0 border-t data-[state=closed]:slide-out-to-bottom data-[state=open]:slide-in-from-bottom',
|
||||
left: 'inset-y-0 left-0 h-full w-3/4 border-r data-[state=closed]:slide-out-to-left data-[state=open]:slide-in-from-left sm:max-w-sm',
|
||||
right: 'inset-y-0 right-0 h-full w-3/4 border-l data-[state=closed]:slide-out-to-right data-[state=open]:slide-in-from-right sm:max-w-sm',
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
side: 'right',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
interface SheetContentProps
|
||||
extends React.ComponentPropsWithoutRef<typeof SheetPrimitive.Content>,
|
||||
VariantProps<typeof sheetVariants> {}
|
||||
|
||||
const SheetContent = React.forwardRef<
|
||||
React.ElementRef<typeof SheetPrimitive.Content>,
|
||||
SheetContentProps
|
||||
>(({ side = 'right', className, children, ...props }, ref) => (
|
||||
<SheetPortal>
|
||||
<SheetOverlay />
|
||||
<SheetPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(sheetVariants({ side }), className)}
|
||||
{...props}
|
||||
>
|
||||
<SheetPrimitive.Close className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary">
|
||||
<Cross2Icon className="size-4" />
|
||||
<span className="sr-only">Close</span>
|
||||
</SheetPrimitive.Close>
|
||||
{children}
|
||||
</SheetPrimitive.Content>
|
||||
</SheetPortal>
|
||||
));
|
||||
SheetContent.displayName = SheetPrimitive.Content.displayName;
|
||||
|
||||
const SheetHeader = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col space-y-2 text-center sm:text-left',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
SheetHeader.displayName = 'SheetHeader';
|
||||
|
||||
const SheetFooter = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
SheetFooter.displayName = 'SheetFooter';
|
||||
|
||||
const SheetTitle = React.forwardRef<
|
||||
React.ElementRef<typeof SheetPrimitive.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof SheetPrimitive.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SheetPrimitive.Title
|
||||
ref={ref}
|
||||
className={cn('text-lg font-semibold text-foreground', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SheetTitle.displayName = SheetPrimitive.Title.displayName;
|
||||
|
||||
const SheetDescription = React.forwardRef<
|
||||
React.ElementRef<typeof SheetPrimitive.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof SheetPrimitive.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SheetPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn('text-sm text-muted-foreground', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SheetDescription.displayName = SheetPrimitive.Description.displayName;
|
||||
|
||||
export {
|
||||
Sheet,
|
||||
SheetPortal,
|
||||
SheetOverlay,
|
||||
SheetTrigger,
|
||||
SheetClose,
|
||||
SheetContent,
|
||||
SheetHeader,
|
||||
SheetFooter,
|
||||
SheetTitle,
|
||||
SheetDescription,
|
||||
};
|
||||
790
src/components/sidebar/sidebar.tsx
Normal file
790
src/components/sidebar/sidebar.tsx
Normal file
@@ -0,0 +1,790 @@
|
||||
import * as React from 'react';
|
||||
import { Slot } from '@radix-ui/react-slot';
|
||||
import type { VariantProps } from 'class-variance-authority';
|
||||
import { cva } from 'class-variance-authority';
|
||||
import { useIsMobile } from '@/hooks/use-mobile';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import {
|
||||
Sheet,
|
||||
SheetContent,
|
||||
SheetDescription,
|
||||
SheetHeader,
|
||||
SheetTitle,
|
||||
} from '@/components/sheet/sheet';
|
||||
import { Skeleton } from '@/components/skeleton/skeleton';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
import { ViewVerticalIcon } from '@radix-ui/react-icons';
|
||||
import { useSidebar } from './use-sidebar';
|
||||
|
||||
const SIDEBAR_COOKIE_NAME = 'sidebar_state';
|
||||
const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7;
|
||||
const SIDEBAR_WIDTH = '16rem';
|
||||
const SIDEBAR_WIDTH_MOBILE = '18rem';
|
||||
const SIDEBAR_WIDTH_ICON = '3rem';
|
||||
const SIDEBAR_KEYBOARD_SHORTCUT = 'b';
|
||||
|
||||
type SidebarContext = {
|
||||
state: 'expanded' | 'collapsed';
|
||||
open: boolean;
|
||||
setOpen: (open: boolean) => void;
|
||||
openMobile: boolean;
|
||||
setOpenMobile: (open: boolean) => void;
|
||||
isMobile: boolean;
|
||||
toggleSidebar: () => void;
|
||||
};
|
||||
|
||||
const SidebarContext = React.createContext<SidebarContext | null>(null);
|
||||
|
||||
const SidebarProvider = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'> & {
|
||||
defaultOpen?: boolean;
|
||||
open?: boolean;
|
||||
onOpenChange?: (open: boolean) => void;
|
||||
}
|
||||
>(
|
||||
(
|
||||
{
|
||||
defaultOpen = true,
|
||||
open: openProp,
|
||||
onOpenChange: setOpenProp,
|
||||
className,
|
||||
style,
|
||||
children,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const isMobile = useIsMobile();
|
||||
const [openMobile, setOpenMobile] = React.useState(false);
|
||||
|
||||
// This is the internal state of the sidebar.
|
||||
// We use openProp and setOpenProp for control from outside the component.
|
||||
const [_open, _setOpen] = React.useState(defaultOpen);
|
||||
const open = openProp ?? _open;
|
||||
const setOpen = React.useCallback(
|
||||
(value: boolean | ((value: boolean) => boolean)) => {
|
||||
const openState =
|
||||
typeof value === 'function' ? value(open) : value;
|
||||
if (setOpenProp) {
|
||||
setOpenProp(openState);
|
||||
} else {
|
||||
_setOpen(openState);
|
||||
}
|
||||
|
||||
// This sets the cookie to keep the sidebar state.
|
||||
document.cookie = `${SIDEBAR_COOKIE_NAME}=${openState}; path=/; max-age=${SIDEBAR_COOKIE_MAX_AGE}`;
|
||||
},
|
||||
[setOpenProp, open]
|
||||
);
|
||||
|
||||
// Helper to toggle the sidebar.
|
||||
const toggleSidebar = React.useCallback(() => {
|
||||
return isMobile
|
||||
? setOpenMobile((open) => !open)
|
||||
: setOpen((open) => !open);
|
||||
}, [isMobile, setOpen, setOpenMobile]);
|
||||
|
||||
// Adds a keyboard shortcut to toggle the sidebar.
|
||||
React.useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
if (
|
||||
event.key === SIDEBAR_KEYBOARD_SHORTCUT &&
|
||||
(event.metaKey || event.ctrlKey)
|
||||
) {
|
||||
event.preventDefault();
|
||||
toggleSidebar();
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [toggleSidebar]);
|
||||
|
||||
// We add a state so that we can do data-state="expanded" or "collapsed".
|
||||
// This makes it easier to style the sidebar with Tailwind classes.
|
||||
const state = open ? 'expanded' : 'collapsed';
|
||||
|
||||
const contextValue = React.useMemo<SidebarContext>(
|
||||
() => ({
|
||||
state,
|
||||
open,
|
||||
setOpen,
|
||||
isMobile,
|
||||
openMobile,
|
||||
setOpenMobile,
|
||||
toggleSidebar,
|
||||
}),
|
||||
[
|
||||
state,
|
||||
open,
|
||||
setOpen,
|
||||
isMobile,
|
||||
openMobile,
|
||||
setOpenMobile,
|
||||
toggleSidebar,
|
||||
]
|
||||
);
|
||||
|
||||
return (
|
||||
<SidebarContext.Provider value={contextValue}>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<div
|
||||
style={
|
||||
{
|
||||
'--sidebar-width': SIDEBAR_WIDTH,
|
||||
'--sidebar-width-icon': SIDEBAR_WIDTH_ICON,
|
||||
...style,
|
||||
} as React.CSSProperties
|
||||
}
|
||||
className={cn(
|
||||
'group/sidebar-wrapper flex min-h-svh w-full has-[[data-variant=inset]]:bg-sidebar',
|
||||
className
|
||||
)}
|
||||
ref={ref}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
</TooltipProvider>
|
||||
</SidebarContext.Provider>
|
||||
);
|
||||
}
|
||||
);
|
||||
SidebarProvider.displayName = 'SidebarProvider';
|
||||
|
||||
const Sidebar = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'> & {
|
||||
side?: 'left' | 'right';
|
||||
variant?: 'sidebar' | 'floating' | 'inset';
|
||||
collapsible?: 'offcanvas' | 'icon' | 'none';
|
||||
}
|
||||
>(
|
||||
(
|
||||
{
|
||||
side = 'left',
|
||||
variant = 'sidebar',
|
||||
collapsible = 'offcanvas',
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const { isMobile, state, openMobile, setOpenMobile } = useSidebar();
|
||||
|
||||
if (collapsible === 'none') {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-full w-[--sidebar-width] flex-col bg-sidebar text-sidebar-foreground',
|
||||
className
|
||||
)}
|
||||
ref={ref}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (isMobile) {
|
||||
return (
|
||||
<Sheet
|
||||
open={openMobile}
|
||||
onOpenChange={setOpenMobile}
|
||||
{...props}
|
||||
>
|
||||
<SheetContent
|
||||
data-sidebar="sidebar"
|
||||
data-mobile="true"
|
||||
className="w-[--sidebar-width] bg-sidebar p-0 text-sidebar-foreground [&>button]:hidden"
|
||||
style={
|
||||
{
|
||||
'--sidebar-width': SIDEBAR_WIDTH_MOBILE,
|
||||
} as React.CSSProperties
|
||||
}
|
||||
side={side}
|
||||
>
|
||||
<SheetHeader className="sr-only">
|
||||
<SheetTitle>Sidebar</SheetTitle>
|
||||
<SheetDescription>
|
||||
Displays the mobile sidebar.
|
||||
</SheetDescription>
|
||||
</SheetHeader>
|
||||
<div className="flex size-full flex-col">
|
||||
{children}
|
||||
</div>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
className="group peer hidden text-sidebar-foreground md:block"
|
||||
data-state={state}
|
||||
data-collapsible={state === 'collapsed' ? collapsible : ''}
|
||||
data-variant={variant}
|
||||
data-side={side}
|
||||
>
|
||||
{/* This is what handles the sidebar gap on desktop */}
|
||||
<div
|
||||
className={cn(
|
||||
'relative w-[--sidebar-width] bg-transparent transition-[width] duration-200 ease-linear',
|
||||
'group-data-[collapsible=offcanvas]:w-0',
|
||||
'group-data-[side=right]:rotate-180',
|
||||
variant === 'floating' || variant === 'inset'
|
||||
? 'group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4))]'
|
||||
: 'group-data-[collapsible=icon]:w-[--sidebar-width-icon]'
|
||||
)}
|
||||
/>
|
||||
<div
|
||||
className={cn(
|
||||
'fixed inset-y-0 z-10 hidden h-svh w-[--sidebar-width] transition-[left,right,width] duration-200 ease-linear md:flex',
|
||||
side === 'left'
|
||||
? 'left-0 group-data-[collapsible=offcanvas]:left-[calc(var(--sidebar-width)*-1)]'
|
||||
: 'right-0 group-data-[collapsible=offcanvas]:right-[calc(var(--sidebar-width)*-1)]',
|
||||
// Adjust the padding for floating and inset variants.
|
||||
variant === 'floating' || variant === 'inset'
|
||||
? 'p-2 group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4)_+2px)]'
|
||||
: 'group-data-[collapsible=icon]:w-[--sidebar-width-icon] group-data-[side=left]:border-r group-data-[side=right]:border-l',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<div
|
||||
data-sidebar="sidebar"
|
||||
className="flex size-full flex-col bg-sidebar group-data-[variant=floating]:rounded-lg group-data-[variant=floating]:border group-data-[variant=floating]:border-sidebar-border group-data-[variant=floating]:shadow"
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
);
|
||||
Sidebar.displayName = 'Sidebar';
|
||||
|
||||
const SidebarTrigger = React.forwardRef<
|
||||
React.ElementRef<typeof Button>,
|
||||
React.ComponentProps<typeof Button>
|
||||
>(({ className, onClick, ...props }, ref) => {
|
||||
const { toggleSidebar } = useSidebar();
|
||||
|
||||
return (
|
||||
<Button
|
||||
ref={ref}
|
||||
data-sidebar="trigger"
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className={cn('h-7 w-7', className)}
|
||||
onClick={(event) => {
|
||||
onClick?.(event);
|
||||
toggleSidebar();
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
<ViewVerticalIcon />
|
||||
<span className="sr-only">Toggle Sidebar</span>
|
||||
</Button>
|
||||
);
|
||||
});
|
||||
SidebarTrigger.displayName = 'SidebarTrigger';
|
||||
|
||||
const SidebarRail = React.forwardRef<
|
||||
HTMLButtonElement,
|
||||
React.ComponentProps<'button'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
const { toggleSidebar } = useSidebar();
|
||||
|
||||
return (
|
||||
<button
|
||||
ref={ref}
|
||||
data-sidebar="rail"
|
||||
aria-label="Toggle Sidebar"
|
||||
tabIndex={-1}
|
||||
onClick={toggleSidebar}
|
||||
title="Toggle Sidebar"
|
||||
className={cn(
|
||||
'absolute inset-y-0 z-20 hidden w-4 -translate-x-1/2 transition-all ease-linear after:absolute after:inset-y-0 after:left-1/2 after:w-[2px] hover:after:bg-sidebar-border group-data-[side=left]:-right-4 group-data-[side=right]:left-0 sm:flex',
|
||||
'[[data-side=left]_&]:cursor-w-resize [[data-side=right]_&]:cursor-e-resize',
|
||||
'[[data-side=left][data-state=collapsed]_&]:cursor-e-resize [[data-side=right][data-state=collapsed]_&]:cursor-w-resize',
|
||||
'group-data-[collapsible=offcanvas]:translate-x-0 group-data-[collapsible=offcanvas]:after:left-full group-data-[collapsible=offcanvas]:hover:bg-sidebar',
|
||||
'[[data-side=left][data-collapsible=offcanvas]_&]:-right-2',
|
||||
'[[data-side=right][data-collapsible=offcanvas]_&]:-left-2',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarRail.displayName = 'SidebarRail';
|
||||
|
||||
const SidebarInset = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'main'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<main
|
||||
ref={ref}
|
||||
className={cn(
|
||||
'relative flex w-full flex-1 flex-col bg-background',
|
||||
'md:peer-data-[variant=inset]:m-2 md:peer-data-[state=collapsed]:peer-data-[variant=inset]:ml-2 md:peer-data-[variant=inset]:ml-0 md:peer-data-[variant=inset]:rounded-xl md:peer-data-[variant=inset]:shadow',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarInset.displayName = 'SidebarInset';
|
||||
|
||||
const SidebarInput = React.forwardRef<
|
||||
React.ElementRef<typeof Input>,
|
||||
React.ComponentProps<typeof Input>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<Input
|
||||
ref={ref}
|
||||
data-sidebar="input"
|
||||
className={cn(
|
||||
'h-8 w-full bg-background shadow-none focus-visible:ring-2 focus-visible:ring-sidebar-ring',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarInput.displayName = 'SidebarInput';
|
||||
|
||||
const SidebarHeader = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="header"
|
||||
className={cn('flex flex-col gap-2 p-2', className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarHeader.displayName = 'SidebarHeader';
|
||||
|
||||
const SidebarFooter = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="footer"
|
||||
className={cn('flex flex-col gap-2 p-2', className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarFooter.displayName = 'SidebarFooter';
|
||||
|
||||
const SidebarSeparator = React.forwardRef<
|
||||
React.ElementRef<typeof Separator>,
|
||||
React.ComponentProps<typeof Separator>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<Separator
|
||||
ref={ref}
|
||||
data-sidebar="separator"
|
||||
className={cn('mx-2 w-auto bg-sidebar-border', className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarSeparator.displayName = 'SidebarSeparator';
|
||||
|
||||
const SidebarContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="content"
|
||||
className={cn(
|
||||
'flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarContent.displayName = 'SidebarContent';
|
||||
|
||||
const SidebarGroup = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => {
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="group"
|
||||
className={cn(
|
||||
'relative flex w-full min-w-0 flex-col p-2',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarGroup.displayName = 'SidebarGroup';
|
||||
|
||||
const SidebarGroupLabel = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'> & { asChild?: boolean }
|
||||
>(({ className, asChild = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : 'div';
|
||||
|
||||
return (
|
||||
<Comp
|
||||
ref={ref}
|
||||
data-sidebar="group-label"
|
||||
className={cn(
|
||||
'flex h-8 shrink-0 items-center rounded-md px-2 text-xs font-medium text-sidebar-foreground/70 outline-none ring-sidebar-ring transition-[margin,opacity] duration-200 ease-linear focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0',
|
||||
'group-data-[collapsible=icon]:-mt-8 group-data-[collapsible=icon]:opacity-0',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarGroupLabel.displayName = 'SidebarGroupLabel';
|
||||
|
||||
const SidebarGroupAction = React.forwardRef<
|
||||
HTMLButtonElement,
|
||||
React.ComponentProps<'button'> & { asChild?: boolean }
|
||||
>(({ className, asChild = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : 'button';
|
||||
|
||||
return (
|
||||
<Comp
|
||||
ref={ref}
|
||||
data-sidebar="group-action"
|
||||
className={cn(
|
||||
'absolute right-3 top-3.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0',
|
||||
// Increases the hit area of the button on mobile.
|
||||
'after:absolute after:-inset-2 after:md:hidden',
|
||||
'group-data-[collapsible=icon]:hidden',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarGroupAction.displayName = 'SidebarGroupAction';
|
||||
|
||||
const SidebarGroupContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="group-content"
|
||||
className={cn('w-full text-sm', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SidebarGroupContent.displayName = 'SidebarGroupContent';
|
||||
|
||||
const SidebarMenu = React.forwardRef<
|
||||
HTMLUListElement,
|
||||
React.ComponentProps<'ul'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ul
|
||||
ref={ref}
|
||||
data-sidebar="menu"
|
||||
className={cn('flex w-full min-w-0 flex-col gap-1', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SidebarMenu.displayName = 'SidebarMenu';
|
||||
|
||||
const SidebarMenuItem = React.forwardRef<
|
||||
HTMLLIElement,
|
||||
React.ComponentProps<'li'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<li
|
||||
ref={ref}
|
||||
data-sidebar="menu-item"
|
||||
className={cn('group/menu-item relative', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SidebarMenuItem.displayName = 'SidebarMenuItem';
|
||||
|
||||
const sidebarMenuButtonVariants = cva(
|
||||
'peer/menu-button flex w-full items-center gap-2 overflow-hidden rounded-md p-2 text-left text-sm outline-none ring-sidebar-ring transition-[width,height,padding] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 group-has-[[data-sidebar=menu-action]]/menu-item:pr-8 aria-disabled:pointer-events-none aria-disabled:opacity-50 data-[active=true]:bg-sidebar-accent data-[active=true]:font-medium data-[active=true]:text-sidebar-accent-foreground data-[state=open]:hover:bg-sidebar-accent data-[state=open]:hover:text-sidebar-accent-foreground group-data-[collapsible=icon]:!size-8 group-data-[collapsible=icon]:!p-2 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0',
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
'hover:bg-sidebar-accent hover:text-sidebar-accent-foreground',
|
||||
outline:
|
||||
'bg-background shadow-[0_0_0_1px_hsl(var(--sidebar-border))] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground hover:shadow-[0_0_0_1px_hsl(var(--sidebar-accent))]',
|
||||
},
|
||||
size: {
|
||||
default: 'h-8 text-sm',
|
||||
sm: 'h-7 text-xs',
|
||||
lg: 'h-12 text-sm group-data-[collapsible=icon]:!p-0',
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: 'default',
|
||||
size: 'default',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const SidebarMenuButton = React.forwardRef<
|
||||
HTMLButtonElement,
|
||||
React.ComponentProps<'button'> & {
|
||||
asChild?: boolean;
|
||||
isActive?: boolean;
|
||||
tooltip?: string | React.ComponentProps<typeof TooltipContent>;
|
||||
} & VariantProps<typeof sidebarMenuButtonVariants>
|
||||
>(
|
||||
(
|
||||
{
|
||||
asChild = false,
|
||||
isActive = false,
|
||||
variant = 'default',
|
||||
size = 'default',
|
||||
tooltip,
|
||||
className,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const Comp = asChild ? Slot : 'button';
|
||||
const { isMobile, state } = useSidebar();
|
||||
|
||||
const button = (
|
||||
<Comp
|
||||
ref={ref}
|
||||
data-sidebar="menu-button"
|
||||
data-size={size}
|
||||
data-active={isActive}
|
||||
className={cn(
|
||||
sidebarMenuButtonVariants({ variant, size }),
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
|
||||
if (!tooltip) {
|
||||
return button;
|
||||
}
|
||||
|
||||
if (typeof tooltip === 'string') {
|
||||
tooltip = {
|
||||
children: tooltip,
|
||||
};
|
||||
}
|
||||
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{button}</TooltipTrigger>
|
||||
<TooltipContent
|
||||
side="right"
|
||||
align="center"
|
||||
hidden={state !== 'collapsed' || isMobile}
|
||||
{...tooltip}
|
||||
/>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
);
|
||||
SidebarMenuButton.displayName = 'SidebarMenuButton';
|
||||
|
||||
const SidebarMenuAction = React.forwardRef<
|
||||
HTMLButtonElement,
|
||||
React.ComponentProps<'button'> & {
|
||||
asChild?: boolean;
|
||||
showOnHover?: boolean;
|
||||
}
|
||||
>(({ className, asChild = false, showOnHover = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : 'button';
|
||||
|
||||
return (
|
||||
<Comp
|
||||
ref={ref}
|
||||
data-sidebar="menu-action"
|
||||
className={cn(
|
||||
'absolute right-1 top-1.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 peer-hover/menu-button:text-sidebar-accent-foreground [&>svg]:size-4 [&>svg]:shrink-0',
|
||||
// Increases the hit area of the button on mobile.
|
||||
'after:absolute after:-inset-2 after:md:hidden',
|
||||
'peer-data-[size=sm]/menu-button:top-1',
|
||||
'peer-data-[size=default]/menu-button:top-1.5',
|
||||
'peer-data-[size=lg]/menu-button:top-2.5',
|
||||
'group-data-[collapsible=icon]:hidden',
|
||||
showOnHover &&
|
||||
'group-focus-within/menu-item:opacity-100 group-hover/menu-item:opacity-100 data-[state=open]:opacity-100 peer-data-[active=true]/menu-button:text-sidebar-accent-foreground md:opacity-0',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarMenuAction.displayName = 'SidebarMenuAction';
|
||||
|
||||
const SidebarMenuBadge = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="menu-badge"
|
||||
className={cn(
|
||||
'pointer-events-none absolute right-1 flex h-5 min-w-5 select-none items-center justify-center rounded-md px-1 text-xs font-medium tabular-nums text-sidebar-foreground',
|
||||
'peer-hover/menu-button:text-sidebar-accent-foreground peer-data-[active=true]/menu-button:text-sidebar-accent-foreground',
|
||||
'peer-data-[size=sm]/menu-button:top-1',
|
||||
'peer-data-[size=default]/menu-button:top-1.5',
|
||||
'peer-data-[size=lg]/menu-button:top-2.5',
|
||||
'group-data-[collapsible=icon]:hidden',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SidebarMenuBadge.displayName = 'SidebarMenuBadge';
|
||||
|
||||
const SidebarMenuSkeleton = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<'div'> & {
|
||||
showIcon?: boolean;
|
||||
}
|
||||
>(({ className, showIcon = false, ...props }, ref) => {
|
||||
// Random width between 50 to 90%.
|
||||
const width = React.useMemo(() => {
|
||||
return `${Math.floor(Math.random() * 40) + 50}%`;
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
data-sidebar="menu-skeleton"
|
||||
className={cn(
|
||||
'flex h-8 items-center gap-2 rounded-md px-2',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{showIcon && (
|
||||
<Skeleton
|
||||
className="size-4 rounded-md"
|
||||
data-sidebar="menu-skeleton-icon"
|
||||
/>
|
||||
)}
|
||||
<Skeleton
|
||||
className="h-4 max-w-[--skeleton-width] flex-1"
|
||||
data-sidebar="menu-skeleton-text"
|
||||
style={
|
||||
{
|
||||
'--skeleton-width': width,
|
||||
} as React.CSSProperties
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
SidebarMenuSkeleton.displayName = 'SidebarMenuSkeleton';
|
||||
|
||||
const SidebarMenuSub = React.forwardRef<
|
||||
HTMLUListElement,
|
||||
React.ComponentProps<'ul'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ul
|
||||
ref={ref}
|
||||
data-sidebar="menu-sub"
|
||||
className={cn(
|
||||
'mx-3.5 flex min-w-0 translate-x-px flex-col gap-1 border-l border-sidebar-border px-2.5 py-0.5',
|
||||
'group-data-[collapsible=icon]:hidden',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
SidebarMenuSub.displayName = 'SidebarMenuSub';
|
||||
|
||||
const SidebarMenuSubItem = React.forwardRef<
|
||||
HTMLLIElement,
|
||||
React.ComponentProps<'li'>
|
||||
>(({ ...props }, ref) => <li ref={ref} {...props} />);
|
||||
SidebarMenuSubItem.displayName = 'SidebarMenuSubItem';
|
||||
|
||||
const SidebarMenuSubButton = React.forwardRef<
|
||||
HTMLAnchorElement,
|
||||
React.ComponentProps<'a'> & {
|
||||
asChild?: boolean;
|
||||
size?: 'sm' | 'md';
|
||||
isActive?: boolean;
|
||||
}
|
||||
>(({ asChild = false, size = 'md', isActive, className, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : 'a';
|
||||
|
||||
return (
|
||||
<Comp
|
||||
ref={ref}
|
||||
data-sidebar="menu-sub-button"
|
||||
data-size={size}
|
||||
data-active={isActive}
|
||||
className={cn(
|
||||
'flex h-7 min-w-0 -translate-x-px items-center gap-2 overflow-hidden rounded-md px-2 text-sidebar-foreground outline-none ring-sidebar-ring hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 aria-disabled:pointer-events-none aria-disabled:opacity-50 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0 [&>svg]:text-sidebar-accent-foreground',
|
||||
'data-[active=true]:bg-sidebar-accent data-[active=true]:text-sidebar-accent-foreground',
|
||||
size === 'sm' && 'text-xs',
|
||||
size === 'md' && 'text-sm',
|
||||
'group-data-[collapsible=icon]:hidden',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
SidebarMenuSubButton.displayName = 'SidebarMenuSubButton';
|
||||
|
||||
export {
|
||||
Sidebar,
|
||||
SidebarContent,
|
||||
SidebarFooter,
|
||||
SidebarGroup,
|
||||
SidebarGroupAction,
|
||||
SidebarGroupContent,
|
||||
SidebarGroupLabel,
|
||||
SidebarHeader,
|
||||
SidebarInput,
|
||||
SidebarInset,
|
||||
SidebarMenu,
|
||||
SidebarMenuAction,
|
||||
SidebarMenuBadge,
|
||||
SidebarMenuButton,
|
||||
SidebarMenuItem,
|
||||
SidebarMenuSkeleton,
|
||||
SidebarMenuSub,
|
||||
SidebarMenuSubButton,
|
||||
SidebarMenuSubItem,
|
||||
SidebarProvider,
|
||||
SidebarRail,
|
||||
SidebarSeparator,
|
||||
SidebarTrigger,
|
||||
SidebarContext,
|
||||
};
|
||||
11
src/components/sidebar/use-sidebar.tsx
Normal file
11
src/components/sidebar/use-sidebar.tsx
Normal file
@@ -0,0 +1,11 @@
|
||||
import React from 'react';
|
||||
import { SidebarContext } from './sidebar';
|
||||
|
||||
export const useSidebar = () => {
|
||||
const context = React.useContext(SidebarContext);
|
||||
if (!context) {
|
||||
throw new Error('useSidebar must be used within a SidebarProvider.');
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
16
src/components/skeleton/skeleton.tsx
Normal file
16
src/components/skeleton/skeleton.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
import React from 'react';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
function Skeleton({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) {
|
||||
return (
|
||||
<div
|
||||
className={cn('animate-pulse rounded-md bg-primary/10', className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export { Skeleton };
|
||||
@@ -22,6 +22,8 @@ import { defaultSchemas } from '@/lib/data/default-schemas';
|
||||
import { useEventEmitter } from 'ahooks';
|
||||
import type { DBDependency } from '@/lib/domain/db-dependency';
|
||||
import { storageInitialValue } from '../storage-context/storage-context';
|
||||
import { useDiff } from '../diff-context/use-diff';
|
||||
import type { DiffCalculatedEvent } from '../diff-context/diff-context';
|
||||
|
||||
export interface ChartDBProviderProps {
|
||||
diagram?: Diagram;
|
||||
@@ -30,7 +32,8 @@ export interface ChartDBProviderProps {
|
||||
|
||||
export const ChartDBProvider: React.FC<
|
||||
React.PropsWithChildren<ChartDBProviderProps>
|
||||
> = ({ children, diagram, readonly }) => {
|
||||
> = ({ children, diagram, readonly: readonlyProp }) => {
|
||||
const { hasDiff } = useDiff();
|
||||
let db = useStorage();
|
||||
const events = useEventEmitter<ChartDBEvent>();
|
||||
const { setSchemasFilter, schemasFilter } = useLocalConfig();
|
||||
@@ -53,9 +56,33 @@ export const ChartDBProvider: React.FC<
|
||||
const [dependencies, setDependencies] = useState<DBDependency[]>(
|
||||
diagram?.dependencies ?? []
|
||||
);
|
||||
const { events: diffEvents } = useDiff();
|
||||
|
||||
const diffCalculatedHandler = useCallback((event: DiffCalculatedEvent) => {
|
||||
const { tablesAdded, fieldsAdded, relationshipsAdded } = event.data;
|
||||
setTables((tables) =>
|
||||
[...tables, ...(tablesAdded ?? [])].map((table) => {
|
||||
const fields = fieldsAdded.get(table.id);
|
||||
return fields
|
||||
? { ...table, fields: [...table.fields, ...fields] }
|
||||
: table;
|
||||
})
|
||||
);
|
||||
setRelationships((relationships) => [
|
||||
...relationships,
|
||||
...(relationshipsAdded ?? []),
|
||||
]);
|
||||
}, []);
|
||||
|
||||
diffEvents.useSubscription(diffCalculatedHandler);
|
||||
|
||||
const defaultSchemaName = defaultSchemas[databaseType];
|
||||
|
||||
const readonly = useMemo(
|
||||
() => readonlyProp ?? hasDiff ?? false,
|
||||
[readonlyProp, hasDiff]
|
||||
);
|
||||
|
||||
if (readonly) {
|
||||
db = storageInitialValue;
|
||||
}
|
||||
|
||||
@@ -47,10 +47,6 @@ export interface DialogContext {
|
||||
openStarUsDialog: () => void;
|
||||
closeStarUsDialog: () => void;
|
||||
|
||||
// Buckle dialog
|
||||
openBuckleDialog: () => void;
|
||||
closeBuckleDialog: () => void;
|
||||
|
||||
// Export image dialog
|
||||
openExportImageDialog: (
|
||||
params: Omit<ExportImageDialogProps, 'dialog'>
|
||||
@@ -97,8 +93,6 @@ export const dialogContext = createContext<DialogContext>({
|
||||
closeExportDiagramDialog: emptyFn,
|
||||
openImportDiagramDialog: emptyFn,
|
||||
closeImportDiagramDialog: emptyFn,
|
||||
openBuckleDialog: emptyFn,
|
||||
closeBuckleDialog: emptyFn,
|
||||
openImportDBMLDialog: emptyFn,
|
||||
closeImportDBMLDialog: emptyFn,
|
||||
});
|
||||
|
||||
@@ -19,7 +19,6 @@ import type { ExportImageDialogProps } from '@/dialogs/export-image-dialog/expor
|
||||
import { ExportImageDialog } from '@/dialogs/export-image-dialog/export-image-dialog';
|
||||
import { ExportDiagramDialog } from '@/dialogs/export-diagram-dialog/export-diagram-dialog';
|
||||
import { ImportDiagramDialog } from '@/dialogs/import-diagram-dialog/import-diagram-dialog';
|
||||
import { BuckleDialog } from '@/dialogs/buckle-dialog/buckle-dialog';
|
||||
import type { ImportDBMLDialogProps } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
import { ImportDBMLDialog } from '@/dialogs/import-dbml-dialog/import-dbml-dialog';
|
||||
|
||||
@@ -54,7 +53,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
);
|
||||
|
||||
const [openStarUsDialog, setOpenStarUsDialog] = useState(false);
|
||||
const [openBuckleDialog, setOpenBuckleDialog] = useState(false);
|
||||
|
||||
// Export image dialog
|
||||
const [openExportImageDialog, setOpenExportImageDialog] = useState(false);
|
||||
@@ -147,8 +145,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
closeTableSchemaDialog: () => setOpenTableSchemaDialog(false),
|
||||
openStarUsDialog: () => setOpenStarUsDialog(true),
|
||||
closeStarUsDialog: () => setOpenStarUsDialog(false),
|
||||
closeBuckleDialog: () => setOpenBuckleDialog(false),
|
||||
openBuckleDialog: () => setOpenBuckleDialog(true),
|
||||
closeExportImageDialog: () => setOpenExportImageDialog(false),
|
||||
openExportImageDialog: openExportImageDialogHandler,
|
||||
openExportDiagramDialog: () => setOpenExportDiagramDialog(true),
|
||||
@@ -193,7 +189,6 @@ export const DialogProvider: React.FC<React.PropsWithChildren> = ({
|
||||
/>
|
||||
<ExportDiagramDialog dialog={{ open: openExportDiagramDialog }} />
|
||||
<ImportDiagramDialog dialog={{ open: openImportDiagramDialog }} />
|
||||
<BuckleDialog dialog={{ open: openBuckleDialog }} />
|
||||
<ImportDBMLDialog
|
||||
dialog={{ open: openImportDBMLDialog }}
|
||||
{...importDBMLDialogParams}
|
||||
|
||||
455
src/context/diff-context/diff-check/diff-check.ts
Normal file
455
src/context/diff-context/diff-check/diff-check.ts
Normal file
@@ -0,0 +1,455 @@
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import type { ChartDBDiff, DiffMap, DiffObject } from '@/lib/domain/diff/diff';
|
||||
import type { FieldDiffAttribute } from '@/lib/domain/diff/field-diff';
|
||||
|
||||
export function getDiffMapKey({
|
||||
diffObject,
|
||||
objectId,
|
||||
attribute,
|
||||
}: {
|
||||
diffObject: DiffObject;
|
||||
objectId: string;
|
||||
attribute?: string;
|
||||
}): string {
|
||||
return attribute
|
||||
? `${diffObject}-${attribute}-${objectId}`
|
||||
: `${diffObject}-${objectId}`;
|
||||
}
|
||||
|
||||
export function generateDiff({
|
||||
diagram,
|
||||
newDiagram,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
}): {
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
changedFields: Map<string, boolean>;
|
||||
} {
|
||||
const newDiffs = new Map<string, ChartDBDiff>();
|
||||
const changedTables = new Map<string, boolean>();
|
||||
const changedFields = new Map<string, boolean>();
|
||||
|
||||
// Compare tables
|
||||
compareTables({ diagram, newDiagram, diffMap: newDiffs, changedTables });
|
||||
|
||||
// Compare fields and indexes for matching tables
|
||||
compareTableContents({
|
||||
diagram,
|
||||
newDiagram,
|
||||
diffMap: newDiffs,
|
||||
changedTables,
|
||||
changedFields,
|
||||
});
|
||||
|
||||
// Compare relationships
|
||||
compareRelationships({ diagram, newDiagram, diffMap: newDiffs });
|
||||
|
||||
return { diffMap: newDiffs, changedTables, changedFields };
|
||||
}
|
||||
|
||||
// Compare tables between diagrams
|
||||
function compareTables({
|
||||
diagram,
|
||||
newDiagram,
|
||||
diffMap,
|
||||
changedTables,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
}) {
|
||||
const oldTables = diagram.tables || [];
|
||||
const newTables = newDiagram.tables || [];
|
||||
|
||||
// Check for added tables
|
||||
for (const newTable of newTables) {
|
||||
if (!oldTables.find((t) => t.id === newTable.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({ diffObject: 'table', objectId: newTable.id }),
|
||||
{
|
||||
object: 'table',
|
||||
type: 'added',
|
||||
tableAdded: newTable,
|
||||
}
|
||||
);
|
||||
changedTables.set(newTable.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removed tables
|
||||
for (const oldTable of oldTables) {
|
||||
if (!newTables.find((t) => t.id === oldTable.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({ diffObject: 'table', objectId: oldTable.id }),
|
||||
{
|
||||
object: 'table',
|
||||
type: 'removed',
|
||||
tableId: oldTable.id,
|
||||
}
|
||||
);
|
||||
changedTables.set(oldTable.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for table name, comments and color changes
|
||||
for (const oldTable of oldTables) {
|
||||
const newTable = newTables.find((t) => t.id === oldTable.id);
|
||||
|
||||
if (!newTable) continue;
|
||||
|
||||
if (oldTable.name !== newTable.name) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: oldTable.id,
|
||||
attribute: 'name',
|
||||
}),
|
||||
{
|
||||
object: 'table',
|
||||
type: 'changed',
|
||||
tableId: oldTable.id,
|
||||
attribute: 'name',
|
||||
newValue: newTable.name,
|
||||
oldValue: oldTable.name,
|
||||
}
|
||||
);
|
||||
|
||||
changedTables.set(oldTable.id, true);
|
||||
}
|
||||
|
||||
if (
|
||||
(oldTable.comments || newTable.comments) &&
|
||||
oldTable.comments !== newTable.comments
|
||||
) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: oldTable.id,
|
||||
attribute: 'comments',
|
||||
}),
|
||||
{
|
||||
object: 'table',
|
||||
type: 'changed',
|
||||
tableId: oldTable.id,
|
||||
attribute: 'comments',
|
||||
newValue: newTable.comments,
|
||||
oldValue: oldTable.comments,
|
||||
}
|
||||
);
|
||||
|
||||
changedTables.set(oldTable.id, true);
|
||||
}
|
||||
|
||||
if (oldTable.color !== newTable.color) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: oldTable.id,
|
||||
attribute: 'color',
|
||||
}),
|
||||
{
|
||||
object: 'table',
|
||||
type: 'changed',
|
||||
tableId: oldTable.id,
|
||||
attribute: 'color',
|
||||
newValue: newTable.color,
|
||||
oldValue: oldTable.color,
|
||||
}
|
||||
);
|
||||
|
||||
changedTables.set(oldTable.id, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compare fields and indexes for matching tables
|
||||
function compareTableContents({
|
||||
diagram,
|
||||
newDiagram,
|
||||
diffMap,
|
||||
changedTables,
|
||||
changedFields,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
changedFields: Map<string, boolean>;
|
||||
}) {
|
||||
const oldTables = diagram.tables || [];
|
||||
const newTables = newDiagram.tables || [];
|
||||
|
||||
// For each table that exists in both diagrams
|
||||
for (const oldTable of oldTables) {
|
||||
const newTable = newTables.find((t) => t.id === oldTable.id);
|
||||
if (!newTable) continue;
|
||||
|
||||
// Compare fields
|
||||
compareFields({
|
||||
tableId: oldTable.id,
|
||||
oldFields: oldTable.fields,
|
||||
newFields: newTable.fields,
|
||||
diffMap,
|
||||
changedTables,
|
||||
changedFields,
|
||||
});
|
||||
|
||||
// Compare indexes
|
||||
compareIndexes({
|
||||
tableId: oldTable.id,
|
||||
oldIndexes: oldTable.indexes,
|
||||
newIndexes: newTable.indexes,
|
||||
diffMap,
|
||||
changedTables,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Compare fields between tables
|
||||
function compareFields({
|
||||
tableId,
|
||||
oldFields,
|
||||
newFields,
|
||||
diffMap,
|
||||
changedTables,
|
||||
changedFields,
|
||||
}: {
|
||||
tableId: string;
|
||||
oldFields: DBField[];
|
||||
newFields: DBField[];
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
changedFields: Map<string, boolean>;
|
||||
}) {
|
||||
// Check for added fields
|
||||
for (const newField of newFields) {
|
||||
if (!oldFields.find((f) => f.id === newField.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: newField.id,
|
||||
}),
|
||||
{
|
||||
object: 'field',
|
||||
type: 'added',
|
||||
newField,
|
||||
tableId,
|
||||
}
|
||||
);
|
||||
changedTables.set(tableId, true);
|
||||
changedFields.set(newField.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removed fields
|
||||
for (const oldField of oldFields) {
|
||||
if (!newFields.find((f) => f.id === oldField.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: oldField.id,
|
||||
}),
|
||||
{
|
||||
object: 'field',
|
||||
type: 'removed',
|
||||
fieldId: oldField.id,
|
||||
tableId,
|
||||
}
|
||||
);
|
||||
|
||||
changedTables.set(tableId, true);
|
||||
changedFields.set(oldField.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for field changes
|
||||
for (const oldField of oldFields) {
|
||||
const newField = newFields.find((f) => f.id === oldField.id);
|
||||
if (!newField) continue;
|
||||
|
||||
// Compare basic field properties
|
||||
compareFieldProperties({
|
||||
tableId,
|
||||
oldField,
|
||||
newField,
|
||||
diffMap,
|
||||
changedTables,
|
||||
changedFields,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Compare field properties
|
||||
function compareFieldProperties({
|
||||
tableId,
|
||||
oldField,
|
||||
newField,
|
||||
diffMap,
|
||||
changedTables,
|
||||
changedFields,
|
||||
}: {
|
||||
tableId: string;
|
||||
oldField: DBField;
|
||||
newField: DBField;
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
changedFields: Map<string, boolean>;
|
||||
}) {
|
||||
const changedAttributes: FieldDiffAttribute[] = [];
|
||||
|
||||
if (oldField.name !== newField.name) {
|
||||
changedAttributes.push('name');
|
||||
}
|
||||
|
||||
if (oldField.type.id !== newField.type.id) {
|
||||
changedAttributes.push('type');
|
||||
}
|
||||
|
||||
if (oldField.primaryKey !== newField.primaryKey) {
|
||||
changedAttributes.push('primaryKey');
|
||||
}
|
||||
|
||||
if (oldField.unique !== newField.unique) {
|
||||
changedAttributes.push('unique');
|
||||
}
|
||||
|
||||
if (oldField.nullable !== newField.nullable) {
|
||||
changedAttributes.push('nullable');
|
||||
}
|
||||
|
||||
if (
|
||||
(newField.comments || oldField.comments) &&
|
||||
oldField.comments !== newField.comments
|
||||
) {
|
||||
changedAttributes.push('comments');
|
||||
}
|
||||
|
||||
if (changedAttributes.length > 0) {
|
||||
for (const attribute of changedAttributes) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: oldField.id,
|
||||
attribute,
|
||||
}),
|
||||
{
|
||||
object: 'field',
|
||||
type: 'changed',
|
||||
fieldId: oldField.id,
|
||||
tableId,
|
||||
attribute,
|
||||
oldValue: oldField[attribute] ?? '',
|
||||
newValue: newField[attribute] ?? '',
|
||||
}
|
||||
);
|
||||
}
|
||||
changedTables.set(tableId, true);
|
||||
changedFields.set(oldField.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare indexes between tables
|
||||
function compareIndexes({
|
||||
tableId,
|
||||
oldIndexes,
|
||||
newIndexes,
|
||||
diffMap,
|
||||
changedTables,
|
||||
}: {
|
||||
tableId: string;
|
||||
oldIndexes: DBIndex[];
|
||||
newIndexes: DBIndex[];
|
||||
diffMap: DiffMap;
|
||||
changedTables: Map<string, boolean>;
|
||||
}) {
|
||||
// Check for added indexes
|
||||
for (const newIndex of newIndexes) {
|
||||
if (!oldIndexes.find((i) => i.id === newIndex.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'index',
|
||||
objectId: newIndex.id,
|
||||
}),
|
||||
{
|
||||
object: 'index',
|
||||
type: 'added',
|
||||
newIndex,
|
||||
tableId,
|
||||
}
|
||||
);
|
||||
changedTables.set(tableId, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removed indexes
|
||||
for (const oldIndex of oldIndexes) {
|
||||
if (!newIndexes.find((i) => i.id === oldIndex.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'index',
|
||||
objectId: oldIndex.id,
|
||||
}),
|
||||
{
|
||||
object: 'index',
|
||||
type: 'removed',
|
||||
indexId: oldIndex.id,
|
||||
tableId,
|
||||
}
|
||||
);
|
||||
changedTables.set(tableId, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compare relationships between diagrams
|
||||
function compareRelationships({
|
||||
diagram,
|
||||
newDiagram,
|
||||
diffMap,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
diffMap: DiffMap;
|
||||
}) {
|
||||
const oldRelationships = diagram.relationships || [];
|
||||
const newRelationships = newDiagram.relationships || [];
|
||||
|
||||
// Check for added relationships
|
||||
for (const newRelationship of newRelationships) {
|
||||
if (!oldRelationships.find((r) => r.id === newRelationship.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'relationship',
|
||||
objectId: newRelationship.id,
|
||||
}),
|
||||
{
|
||||
object: 'relationship',
|
||||
type: 'added',
|
||||
newRelationship,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removed relationships
|
||||
for (const oldRelationship of oldRelationships) {
|
||||
if (!newRelationships.find((r) => r.id === oldRelationship.id)) {
|
||||
diffMap.set(
|
||||
getDiffMapKey({
|
||||
diffObject: 'relationship',
|
||||
objectId: oldRelationship.id,
|
||||
}),
|
||||
{
|
||||
object: 'relationship',
|
||||
type: 'removed',
|
||||
relationshipId: oldRelationship.id,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
79
src/context/diff-context/diff-context.tsx
Normal file
79
src/context/diff-context/diff-context.tsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import { createContext } from 'react';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { EventEmitter } from 'ahooks/lib/useEventEmitter';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { DiffMap } from '@/lib/domain/diff/diff';
|
||||
|
||||
export type DiffEventType = 'diff_calculated';
|
||||
|
||||
export type DiffEventBase<T extends DiffEventType, D> = {
|
||||
action: T;
|
||||
data: D;
|
||||
};
|
||||
|
||||
export type DiffCalculatedData = {
|
||||
tablesAdded: DBTable[];
|
||||
fieldsAdded: Map<string, DBField[]>;
|
||||
relationshipsAdded: DBRelationship[];
|
||||
};
|
||||
|
||||
export type DiffCalculatedEvent = DiffEventBase<
|
||||
'diff_calculated',
|
||||
DiffCalculatedData
|
||||
>;
|
||||
|
||||
export type DiffEvent = DiffCalculatedEvent;
|
||||
|
||||
export interface DiffContext {
|
||||
newDiagram: Diagram | null;
|
||||
originalDiagram: Diagram | null;
|
||||
diffMap: DiffMap;
|
||||
hasDiff: boolean;
|
||||
|
||||
calculateDiff: ({
|
||||
diagram,
|
||||
newDiagram,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
newDiagram: Diagram;
|
||||
}) => void;
|
||||
|
||||
// table diff
|
||||
checkIfTableHasChange: ({ tableId }: { tableId: string }) => boolean;
|
||||
checkIfNewTable: ({ tableId }: { tableId: string }) => boolean;
|
||||
checkIfTableRemoved: ({ tableId }: { tableId: string }) => boolean;
|
||||
getTableNewName: ({ tableId }: { tableId: string }) => string | null;
|
||||
getTableNewColor: ({ tableId }: { tableId: string }) => string | null;
|
||||
|
||||
// field diff
|
||||
checkIfFieldHasChange: ({
|
||||
tableId,
|
||||
fieldId,
|
||||
}: {
|
||||
tableId: string;
|
||||
fieldId: string;
|
||||
}) => boolean;
|
||||
checkIfFieldRemoved: ({ fieldId }: { fieldId: string }) => boolean;
|
||||
checkIfNewField: ({ fieldId }: { fieldId: string }) => boolean;
|
||||
getFieldNewName: ({ fieldId }: { fieldId: string }) => string | null;
|
||||
getFieldNewType: ({ fieldId }: { fieldId: string }) => DataType | null;
|
||||
|
||||
// relationship diff
|
||||
checkIfNewRelationship: ({
|
||||
relationshipId,
|
||||
}: {
|
||||
relationshipId: string;
|
||||
}) => boolean;
|
||||
checkIfRelationshipRemoved: ({
|
||||
relationshipId,
|
||||
}: {
|
||||
relationshipId: string;
|
||||
}) => boolean;
|
||||
|
||||
events: EventEmitter<DiffEvent>;
|
||||
}
|
||||
|
||||
export const diffContext = createContext<DiffContext | undefined>(undefined);
|
||||
373
src/context/diff-context/diff-provider.tsx
Normal file
373
src/context/diff-context/diff-provider.tsx
Normal file
@@ -0,0 +1,373 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import type {
|
||||
DiffCalculatedData,
|
||||
DiffContext,
|
||||
DiffEvent,
|
||||
} from './diff-context';
|
||||
import { diffContext } from './diff-context';
|
||||
|
||||
import { generateDiff, getDiffMapKey } from './diff-check/diff-check';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { useEventEmitter } from 'ahooks';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { ChartDBDiff, DiffMap } from '@/lib/domain/diff/diff';
|
||||
|
||||
export const DiffProvider: React.FC<React.PropsWithChildren> = ({
|
||||
children,
|
||||
}) => {
|
||||
const [newDiagram, setNewDiagram] = React.useState<Diagram | null>(null);
|
||||
const [originalDiagram, setOriginalDiagram] =
|
||||
React.useState<Diagram | null>(null);
|
||||
const [diffMap, setDiffMap] = React.useState<DiffMap>(
|
||||
new Map<string, ChartDBDiff>()
|
||||
);
|
||||
const [tablesChanged, setTablesChanged] = React.useState<
|
||||
Map<string, boolean>
|
||||
>(new Map<string, boolean>());
|
||||
const [fieldsChanged, setFieldsChanged] = React.useState<
|
||||
Map<string, boolean>
|
||||
>(new Map<string, boolean>());
|
||||
|
||||
const events = useEventEmitter<DiffEvent>();
|
||||
|
||||
const generateNewFieldsMap = useCallback(
|
||||
({
|
||||
diffMap,
|
||||
newDiagram,
|
||||
}: {
|
||||
diffMap: DiffMap;
|
||||
newDiagram: Diagram;
|
||||
}) => {
|
||||
const newFieldsMap = new Map<string, DBField[]>();
|
||||
|
||||
diffMap.forEach((diff) => {
|
||||
if (diff.object === 'field' && diff.type === 'added') {
|
||||
const field = newDiagram?.tables
|
||||
?.find((table) => table.id === diff.tableId)
|
||||
?.fields.find((f) => f.id === diff.newField.id);
|
||||
|
||||
if (field) {
|
||||
newFieldsMap.set(diff.tableId, [
|
||||
...(newFieldsMap.get(diff.tableId) ?? []),
|
||||
field,
|
||||
]);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return newFieldsMap;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const findNewRelationships = useCallback(
|
||||
({
|
||||
diffMap,
|
||||
newDiagram,
|
||||
}: {
|
||||
diffMap: DiffMap;
|
||||
newDiagram: Diagram;
|
||||
}) => {
|
||||
const relationships: DBRelationship[] = [];
|
||||
diffMap.forEach((diff) => {
|
||||
if (diff.object === 'relationship' && diff.type === 'added') {
|
||||
const relationship = newDiagram?.relationships?.find(
|
||||
(rel) => rel.id === diff.newRelationship.id
|
||||
);
|
||||
|
||||
if (relationship) {
|
||||
relationships.push(relationship);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return relationships;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const generateDiffCalculatedData = useCallback(
|
||||
({
|
||||
newDiagram,
|
||||
diffMap,
|
||||
}: {
|
||||
newDiagram: Diagram;
|
||||
diffMap: DiffMap;
|
||||
}): DiffCalculatedData => {
|
||||
return {
|
||||
tablesAdded:
|
||||
newDiagram?.tables?.filter((table) => {
|
||||
const tableKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: table.id,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(tableKey) &&
|
||||
diffMap.get(tableKey)?.type === 'added'
|
||||
);
|
||||
}) ?? [],
|
||||
|
||||
fieldsAdded: generateNewFieldsMap({
|
||||
diffMap: diffMap,
|
||||
newDiagram: newDiagram,
|
||||
}),
|
||||
relationshipsAdded: findNewRelationships({
|
||||
diffMap: diffMap,
|
||||
newDiagram: newDiagram,
|
||||
}),
|
||||
};
|
||||
},
|
||||
[findNewRelationships, generateNewFieldsMap]
|
||||
);
|
||||
|
||||
const calculateDiff: DiffContext['calculateDiff'] = useCallback(
|
||||
({ diagram, newDiagram: newDiagramArg }) => {
|
||||
const {
|
||||
diffMap: newDiffs,
|
||||
changedTables: newChangedTables,
|
||||
changedFields: newChangedFields,
|
||||
} = generateDiff({ diagram, newDiagram: newDiagramArg });
|
||||
|
||||
setDiffMap(newDiffs);
|
||||
setTablesChanged(newChangedTables);
|
||||
setFieldsChanged(newChangedFields);
|
||||
setNewDiagram(newDiagramArg);
|
||||
setOriginalDiagram(diagram);
|
||||
|
||||
events.emit({
|
||||
action: 'diff_calculated',
|
||||
data: generateDiffCalculatedData({
|
||||
diffMap: newDiffs,
|
||||
newDiagram: newDiagramArg,
|
||||
}),
|
||||
});
|
||||
},
|
||||
[setDiffMap, events, generateDiffCalculatedData]
|
||||
);
|
||||
|
||||
const getTableNewName = useCallback<DiffContext['getTableNewName']>(
|
||||
({ tableId }) => {
|
||||
const tableNameKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: tableId,
|
||||
attribute: 'name',
|
||||
});
|
||||
|
||||
if (diffMap.has(tableNameKey)) {
|
||||
const diff = diffMap.get(tableNameKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getTableNewColor = useCallback<DiffContext['getTableNewColor']>(
|
||||
({ tableId }) => {
|
||||
const tableColorKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: tableId,
|
||||
attribute: 'color',
|
||||
});
|
||||
|
||||
if (diffMap.has(tableColorKey)) {
|
||||
const diff = diffMap.get(tableColorKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfTableHasChange = useCallback<
|
||||
DiffContext['checkIfTableHasChange']
|
||||
>(({ tableId }) => tablesChanged.get(tableId) ?? false, [tablesChanged]);
|
||||
|
||||
const checkIfNewTable = useCallback<DiffContext['checkIfNewTable']>(
|
||||
({ tableId }) => {
|
||||
const tableKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: tableId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(tableKey) && diffMap.get(tableKey)?.type === 'added'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfTableRemoved = useCallback<DiffContext['checkIfTableRemoved']>(
|
||||
({ tableId }) => {
|
||||
const tableKey = getDiffMapKey({
|
||||
diffObject: 'table',
|
||||
objectId: tableId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(tableKey) &&
|
||||
diffMap.get(tableKey)?.type === 'removed'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfFieldHasChange = useCallback<
|
||||
DiffContext['checkIfFieldHasChange']
|
||||
>(
|
||||
({ fieldId }) => {
|
||||
return fieldsChanged.get(fieldId) ?? false;
|
||||
},
|
||||
[fieldsChanged]
|
||||
);
|
||||
|
||||
const checkIfFieldRemoved = useCallback<DiffContext['checkIfFieldRemoved']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(fieldKey) &&
|
||||
diffMap.get(fieldKey)?.type === 'removed'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfNewField = useCallback<DiffContext['checkIfNewField']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(fieldKey) && diffMap.get(fieldKey)?.type === 'added'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewName = useCallback<DiffContext['getFieldNewName']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'name',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as string;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const getFieldNewType = useCallback<DiffContext['getFieldNewType']>(
|
||||
({ fieldId }) => {
|
||||
const fieldKey = getDiffMapKey({
|
||||
diffObject: 'field',
|
||||
objectId: fieldId,
|
||||
attribute: 'type',
|
||||
});
|
||||
|
||||
if (diffMap.has(fieldKey)) {
|
||||
const diff = diffMap.get(fieldKey);
|
||||
|
||||
if (diff?.type === 'changed') {
|
||||
return diff.newValue as DataType;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfNewRelationship = useCallback<
|
||||
DiffContext['checkIfNewRelationship']
|
||||
>(
|
||||
({ relationshipId }) => {
|
||||
const relationshipKey = getDiffMapKey({
|
||||
diffObject: 'relationship',
|
||||
objectId: relationshipId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(relationshipKey) &&
|
||||
diffMap.get(relationshipKey)?.type === 'added'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
const checkIfRelationshipRemoved = useCallback<
|
||||
DiffContext['checkIfRelationshipRemoved']
|
||||
>(
|
||||
({ relationshipId }) => {
|
||||
const relationshipKey = getDiffMapKey({
|
||||
diffObject: 'relationship',
|
||||
objectId: relationshipId,
|
||||
});
|
||||
|
||||
return (
|
||||
diffMap.has(relationshipKey) &&
|
||||
diffMap.get(relationshipKey)?.type === 'removed'
|
||||
);
|
||||
},
|
||||
[diffMap]
|
||||
);
|
||||
|
||||
return (
|
||||
<diffContext.Provider
|
||||
value={{
|
||||
newDiagram,
|
||||
originalDiagram,
|
||||
diffMap,
|
||||
hasDiff: diffMap.size > 0,
|
||||
|
||||
calculateDiff,
|
||||
|
||||
// table diff
|
||||
getTableNewName,
|
||||
checkIfNewTable,
|
||||
checkIfTableRemoved,
|
||||
checkIfTableHasChange,
|
||||
getTableNewColor,
|
||||
|
||||
// field diff
|
||||
checkIfFieldHasChange,
|
||||
checkIfFieldRemoved,
|
||||
checkIfNewField,
|
||||
getFieldNewName,
|
||||
getFieldNewType,
|
||||
|
||||
// relationship diff
|
||||
checkIfNewRelationship,
|
||||
checkIfRelationshipRemoved,
|
||||
|
||||
events,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</diffContext.Provider>
|
||||
);
|
||||
};
|
||||
10
src/context/diff-context/use-diff.ts
Normal file
10
src/context/diff-context/use-diff.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { useContext } from 'react';
|
||||
import { diffContext } from './diff-context';
|
||||
|
||||
export const useDiff = () => {
|
||||
const context = useContext(diffContext);
|
||||
if (context === undefined) {
|
||||
throw new Error('useDiff must be used within an DiffProvider');
|
||||
}
|
||||
return context;
|
||||
};
|
||||
@@ -7,6 +7,7 @@ export enum KeyboardShortcutAction {
|
||||
SAVE_DIAGRAM = 'save_diagram',
|
||||
TOGGLE_SIDE_PANEL = 'toggle_side_panel',
|
||||
SHOW_ALL = 'show_all',
|
||||
TOGGLE_THEME = 'toggle_theme',
|
||||
}
|
||||
|
||||
export interface KeyboardShortcut {
|
||||
@@ -63,6 +64,13 @@ export const keyboardShortcuts: Record<
|
||||
keyCombinationMac: 'meta+0',
|
||||
keyCombinationWin: 'ctrl+0',
|
||||
},
|
||||
[KeyboardShortcutAction.TOGGLE_THEME]: {
|
||||
action: KeyboardShortcutAction.TOGGLE_THEME,
|
||||
keyCombinationLabelMac: '⌘M',
|
||||
keyCombinationLabelWin: 'Ctrl+M',
|
||||
keyCombinationMac: 'meta+m',
|
||||
keyCombinationWin: 'ctrl+m',
|
||||
},
|
||||
};
|
||||
|
||||
export interface KeyboardShortcutForOS {
|
||||
|
||||
@@ -30,12 +30,6 @@ export interface LocalConfigContext {
|
||||
starUsDialogLastOpen: number;
|
||||
setStarUsDialogLastOpen: (lastOpen: number) => void;
|
||||
|
||||
buckleWaitlistOpened: boolean;
|
||||
setBuckleWaitlistOpened: (githubRepoOpened: boolean) => void;
|
||||
|
||||
buckleDialogLastOpen: number;
|
||||
setBuckleDialogLastOpen: (lastOpen: number) => void;
|
||||
|
||||
showDependenciesOnCanvas: boolean;
|
||||
setShowDependenciesOnCanvas: (showDependenciesOnCanvas: boolean) => void;
|
||||
|
||||
@@ -53,7 +47,7 @@ export const LocalConfigContext = createContext<LocalConfigContext>({
|
||||
schemasFilter: {},
|
||||
setSchemasFilter: emptyFn,
|
||||
|
||||
showCardinality: false,
|
||||
showCardinality: true,
|
||||
setShowCardinality: emptyFn,
|
||||
|
||||
hideMultiSchemaNotification: false,
|
||||
@@ -65,12 +59,6 @@ export const LocalConfigContext = createContext<LocalConfigContext>({
|
||||
starUsDialogLastOpen: 0,
|
||||
setStarUsDialogLastOpen: emptyFn,
|
||||
|
||||
buckleWaitlistOpened: false,
|
||||
setBuckleWaitlistOpened: emptyFn,
|
||||
|
||||
buckleDialogLastOpen: 0,
|
||||
setBuckleDialogLastOpen: emptyFn,
|
||||
|
||||
showDependenciesOnCanvas: false,
|
||||
setShowDependenciesOnCanvas: emptyFn,
|
||||
|
||||
|
||||
@@ -10,8 +10,6 @@ const showCardinalityKey = 'show_cardinality';
|
||||
const hideMultiSchemaNotificationKey = 'hide_multi_schema_notification';
|
||||
const githubRepoOpenedKey = 'github_repo_opened';
|
||||
const starUsDialogLastOpenKey = 'star_us_dialog_last_open';
|
||||
const buckleWaitlistOpenedKey = 'buckle_waitlist_opened';
|
||||
const buckleDialogLastOpenKey = 'buckle_dialog_last_open';
|
||||
const showDependenciesOnCanvasKey = 'show_dependencies_on_canvas';
|
||||
const showMiniMapOnCanvasKey = 'show_minimap_on_canvas';
|
||||
|
||||
@@ -33,7 +31,7 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
);
|
||||
|
||||
const [showCardinality, setShowCardinality] = React.useState<boolean>(
|
||||
(localStorage.getItem(showCardinalityKey) || 'false') === 'true'
|
||||
(localStorage.getItem(showCardinalityKey) || 'true') === 'true'
|
||||
);
|
||||
|
||||
const [hideMultiSchemaNotification, setHideMultiSchemaNotification] =
|
||||
@@ -51,17 +49,6 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
parseInt(localStorage.getItem(starUsDialogLastOpenKey) || '0')
|
||||
);
|
||||
|
||||
const [buckleWaitlistOpened, setBuckleWaitlistOpened] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(buckleWaitlistOpenedKey) || 'false') ===
|
||||
'true'
|
||||
);
|
||||
|
||||
const [buckleDialogLastOpen, setBuckleDialogLastOpen] =
|
||||
React.useState<number>(
|
||||
parseInt(localStorage.getItem(buckleDialogLastOpenKey) || '0')
|
||||
);
|
||||
|
||||
const [showDependenciesOnCanvas, setShowDependenciesOnCanvas] =
|
||||
React.useState<boolean>(
|
||||
(localStorage.getItem(showDependenciesOnCanvasKey) || 'false') ===
|
||||
@@ -84,20 +71,6 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
localStorage.setItem(githubRepoOpenedKey, githubRepoOpened.toString());
|
||||
}, [githubRepoOpened]);
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem(
|
||||
buckleDialogLastOpenKey,
|
||||
buckleDialogLastOpen.toString()
|
||||
);
|
||||
}, [buckleDialogLastOpen]);
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem(
|
||||
buckleWaitlistOpenedKey,
|
||||
buckleWaitlistOpened.toString()
|
||||
);
|
||||
}, [buckleWaitlistOpened]);
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem(
|
||||
hideMultiSchemaNotificationKey,
|
||||
@@ -154,10 +127,6 @@ export const LocalConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
setStarUsDialogLastOpen,
|
||||
showDependenciesOnCanvas,
|
||||
setShowDependenciesOnCanvas,
|
||||
setBuckleDialogLastOpen,
|
||||
buckleDialogLastOpen,
|
||||
buckleWaitlistOpened,
|
||||
setBuckleWaitlistOpened,
|
||||
showMiniMapOnCanvas,
|
||||
setShowMiniMapOnCanvas,
|
||||
}}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import React, { useEffect, useState, useCallback } from 'react';
|
||||
import type { EffectiveTheme } from './theme-context';
|
||||
import { ThemeContext } from './theme-context';
|
||||
import { useMediaQuery } from 'react-responsive';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
import { useHotkeys } from 'react-hotkeys-hook';
|
||||
import {
|
||||
KeyboardShortcutAction,
|
||||
keyboardShortcutsForOS,
|
||||
} from '../keyboard-shortcuts-context/keyboard-shortcuts';
|
||||
|
||||
export const ThemeProvider: React.FC<React.PropsWithChildren> = ({
|
||||
children,
|
||||
@@ -29,6 +34,24 @@ export const ThemeProvider: React.FC<React.PropsWithChildren> = ({
|
||||
}
|
||||
}, [effectiveTheme]);
|
||||
|
||||
const handleThemeToggle = useCallback(() => {
|
||||
if (theme === 'system') {
|
||||
setTheme(effectiveTheme === 'dark' ? 'light' : 'dark');
|
||||
} else {
|
||||
setTheme(theme === 'dark' ? 'light' : 'dark');
|
||||
}
|
||||
}, [theme, effectiveTheme, setTheme]);
|
||||
|
||||
useHotkeys(
|
||||
keyboardShortcutsForOS[KeyboardShortcutAction.TOGGLE_THEME]
|
||||
.keyCombination,
|
||||
handleThemeToggle,
|
||||
{
|
||||
preventDefault: true,
|
||||
},
|
||||
[handleThemeToggle]
|
||||
);
|
||||
|
||||
return (
|
||||
<ThemeContext.Provider value={{ theme, setTheme, effectiveTheme }}>
|
||||
{children}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import React, { useCallback, useEffect } from 'react';
|
||||
import { useDialog } from '@/hooks/use-dialog';
|
||||
import {
|
||||
Dialog,
|
||||
DialogClose,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from '@/components/dialog/dialog';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useLocalConfig } from '@/hooks/use-local-config';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
|
||||
export interface BuckleDialogProps extends BaseDialogProps {}
|
||||
|
||||
export const BuckleDialog: React.FC<BuckleDialogProps> = ({ dialog }) => {
|
||||
const { setBuckleWaitlistOpened } = useLocalConfig();
|
||||
const { effectiveTheme } = useTheme();
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) return;
|
||||
}, [dialog.open]);
|
||||
const { closeBuckleDialog } = useDialog();
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
setBuckleWaitlistOpened(true);
|
||||
window.open('https://waitlist.buckle.dev', '_blank');
|
||||
}, [setBuckleWaitlistOpened]);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
closeBuckleDialog();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogContent
|
||||
className="flex flex-col"
|
||||
showClose={false}
|
||||
onInteractOutside={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle className="hidden" />
|
||||
<DialogDescription className="hidden" />
|
||||
</DialogHeader>
|
||||
<div className="flex w-full flex-col items-center">
|
||||
<img
|
||||
src={
|
||||
effectiveTheme === 'light'
|
||||
? '/buckle-animated.gif'
|
||||
: '/buckle.png'
|
||||
}
|
||||
className="h-16"
|
||||
/>
|
||||
<div className="mt-6 text-center text-base">
|
||||
We've been working on something big -{' '}
|
||||
<span className="font-semibold">Ready to explore?</span>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter className="flex gap-1 md:justify-between">
|
||||
<DialogClose asChild>
|
||||
<Button variant="secondary">Not now</Button>
|
||||
</DialogClose>
|
||||
<DialogClose asChild>
|
||||
<Button onClick={handleConfirm}>
|
||||
Try ChartDB v2.0!
|
||||
</Button>
|
||||
</DialogClose>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import React, { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import {
|
||||
DialogClose,
|
||||
@@ -8,31 +8,10 @@ import {
|
||||
DialogInternalContent,
|
||||
DialogTitle,
|
||||
} from '@/components/dialog/dialog';
|
||||
import { ToggleGroup, ToggleGroupItem } from '@/components/toggle/toggle-group';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { databaseSecondaryLogoMap } from '@/lib/databases';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
import { Textarea } from '@/components/textarea/textarea';
|
||||
import type { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { Editor } from '@/components/code-snippet/code-snippet';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import {
|
||||
databaseEditionToImageMap,
|
||||
databaseEditionToLabelMap,
|
||||
databaseTypeToEditionMap,
|
||||
} from '@/lib/domain/database-edition';
|
||||
import {
|
||||
Avatar,
|
||||
AvatarFallback,
|
||||
AvatarImage,
|
||||
} from '@/components/avatar/avatar';
|
||||
import { SSMSInfo } from './ssms-info/ssms-info';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Tabs, TabsList, TabsTrigger } from '@/components/tabs/tabs';
|
||||
import type { DatabaseClient } from '@/lib/domain/database-clients';
|
||||
import {
|
||||
databaseClientToLabelMap,
|
||||
databaseTypeToClientsMap,
|
||||
} from '@/lib/domain/database-clients';
|
||||
import type { ImportMetadataScripts } from '@/lib/data/import-metadata/scripts/scripts';
|
||||
import { ZoomableImage } from '@/components/zoomable-image/zoomable-image';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
@@ -40,6 +19,17 @@ import {
|
||||
fixMetadataJson,
|
||||
isStringMetadataJson,
|
||||
} from '@/lib/data/import-metadata/utils';
|
||||
import {
|
||||
ResizableHandle,
|
||||
ResizablePanel,
|
||||
ResizablePanelGroup,
|
||||
} from '@/components/resizable/resizable';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
import type { OnChange } from '@monaco-editor/react';
|
||||
import { useDebounce } from '@/hooks/use-debounce-v2';
|
||||
import { InstructionsSection } from './instructions-section/instructions-section';
|
||||
import { parseSQLError } from '@/lib/data/sql-import';
|
||||
import type { editor } from 'monaco-editor';
|
||||
|
||||
const errorScriptOutputMessage =
|
||||
'Invalid JSON. Please correct it or contact us at chartdb.io@gmail.com for help.';
|
||||
@@ -57,6 +47,8 @@ export interface ImportDatabaseProps {
|
||||
>;
|
||||
keepDialogAfterImport?: boolean;
|
||||
title: string;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
}
|
||||
|
||||
export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
@@ -70,34 +62,51 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setDatabaseEdition,
|
||||
keepDialogAfterImport,
|
||||
title,
|
||||
importMethod,
|
||||
setImportMethod,
|
||||
}) => {
|
||||
const databaseClients = databaseTypeToClientsMap[databaseType];
|
||||
const { effectiveTheme } = useTheme();
|
||||
const [errorMessage, setErrorMessage] = useState('');
|
||||
const [databaseClient, setDatabaseClient] = useState<
|
||||
DatabaseClient | undefined
|
||||
>();
|
||||
const { t } = useTranslation();
|
||||
const [importMetadataScripts, setImportMetadataScripts] =
|
||||
useState<ImportMetadataScripts | null>(null);
|
||||
|
||||
const { t } = useTranslation();
|
||||
const { isSm: isDesktop } = useBreakpoint('sm');
|
||||
|
||||
const [showCheckJsonButton, setShowCheckJsonButton] = useState(false);
|
||||
const [isCheckingJson, setIsCheckingJson] = useState(false);
|
||||
|
||||
const [showSSMSInfoDialog, setShowSSMSInfoDialog] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const loadScripts = async () => {
|
||||
const { importMetadataScripts } = await import(
|
||||
'@/lib/data/import-metadata/scripts/scripts'
|
||||
);
|
||||
setImportMetadataScripts(importMetadataScripts);
|
||||
};
|
||||
loadScripts();
|
||||
}, []);
|
||||
setScriptResult('');
|
||||
setErrorMessage('');
|
||||
setShowCheckJsonButton(false);
|
||||
}, [importMethod, setScriptResult]);
|
||||
|
||||
// Check if the ddl is valid
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'ddl') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!scriptResult.trim()) return;
|
||||
|
||||
parseSQLError({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
}).then((result) => {
|
||||
if (result.success) {
|
||||
setErrorMessage('');
|
||||
} else if (!result.success && result.error) {
|
||||
setErrorMessage(result.error);
|
||||
}
|
||||
});
|
||||
}, [importMethod, scriptResult, databaseType]);
|
||||
|
||||
// Check if the script result is a valid JSON
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'query') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (scriptResult.trim().length === 0) {
|
||||
setErrorMessage('');
|
||||
setShowCheckJsonButton(false);
|
||||
@@ -117,7 +126,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setErrorMessage(errorScriptOutputMessage);
|
||||
setShowCheckJsonButton(false);
|
||||
}
|
||||
}, [scriptResult]);
|
||||
}, [scriptResult, importMethod]);
|
||||
|
||||
const handleImport = useCallback(() => {
|
||||
if (errorMessage.length === 0 && scriptResult.trim().length !== 0) {
|
||||
@@ -125,19 +134,20 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
}
|
||||
}, [errorMessage.length, onImport, scriptResult]);
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
const inputValue = e.target.value;
|
||||
setScriptResult(inputValue);
|
||||
const handleInputChange: OnChange = useCallback(
|
||||
(inputValue) => {
|
||||
setScriptResult(inputValue ?? '');
|
||||
|
||||
// Automatically open SSMS info when input length is exactly 65535
|
||||
if (inputValue.length === 65535) {
|
||||
if ((inputValue ?? '').length === 65535) {
|
||||
setShowSSMSInfoDialog(true);
|
||||
}
|
||||
},
|
||||
[setScriptResult]
|
||||
);
|
||||
|
||||
const debouncedHandleInputChange = useDebounce(handleInputChange, 500);
|
||||
|
||||
const handleCheckJson = useCallback(async () => {
|
||||
setIsCheckingJson(true);
|
||||
|
||||
@@ -155,6 +165,17 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setIsCheckingJson(false);
|
||||
}, [scriptResult, setScriptResult]);
|
||||
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: editor.IStandaloneCodeEditor) => {
|
||||
editor.onDidPaste(() => {
|
||||
setTimeout(() => {
|
||||
editor.getAction('editor.action.formatDocument')?.run();
|
||||
}, 0);
|
||||
});
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const renderHeader = useCallback(() => {
|
||||
return (
|
||||
<DialogHeader>
|
||||
@@ -164,228 +185,156 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
);
|
||||
}, [title]);
|
||||
|
||||
const renderInstructions = useCallback(
|
||||
() => (
|
||||
<InstructionsSection
|
||||
databaseType={databaseType}
|
||||
importMethod={importMethod}
|
||||
setDatabaseEdition={setDatabaseEdition}
|
||||
setImportMethod={setImportMethod}
|
||||
databaseEdition={databaseEdition}
|
||||
setShowSSMSInfoDialog={setShowSSMSInfoDialog}
|
||||
showSSMSInfoDialog={showSSMSInfoDialog}
|
||||
/>
|
||||
),
|
||||
[
|
||||
databaseType,
|
||||
importMethod,
|
||||
setDatabaseEdition,
|
||||
setImportMethod,
|
||||
databaseEdition,
|
||||
setShowSSMSInfoDialog,
|
||||
showSSMSInfoDialog,
|
||||
]
|
||||
);
|
||||
|
||||
const renderOutputTextArea = useCallback(
|
||||
() => (
|
||||
<div className="flex size-full flex-col gap-1 overflow-hidden rounded-md border p-1">
|
||||
<div className="w-full text-center text-xs text-muted-foreground">
|
||||
{importMethod === 'query'
|
||||
? 'Smart Query Output'
|
||||
: 'SQL DDL'}
|
||||
</div>
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<Suspense fallback={<Spinner />}>
|
||||
<Editor
|
||||
value={scriptResult}
|
||||
onChange={debouncedHandleInputChange}
|
||||
language={importMethod === 'query' ? 'json' : 'sql'}
|
||||
loading={<Spinner />}
|
||||
onMount={handleEditorDidMount}
|
||||
theme={
|
||||
effectiveTheme === 'dark'
|
||||
? 'dbml-dark'
|
||||
: 'dbml-light'
|
||||
}
|
||||
options={{
|
||||
formatOnPaste: true,
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
glyphMargin: false,
|
||||
lineNumbers: 'on',
|
||||
guides: {
|
||||
indentation: false,
|
||||
},
|
||||
folding: true,
|
||||
lineNumbersMinChars: 3,
|
||||
renderValidationDecorations: 'off',
|
||||
lineDecorationsWidth: 0,
|
||||
overviewRulerBorder: false,
|
||||
overviewRulerLanes: 0,
|
||||
hideCursorInOverviewRuler: true,
|
||||
contextmenu: false,
|
||||
|
||||
scrollbar: {
|
||||
vertical: 'hidden',
|
||||
horizontal: 'hidden',
|
||||
alwaysConsumeMouseWheel: false,
|
||||
},
|
||||
}}
|
||||
className="size-full min-h-40"
|
||||
/>
|
||||
</Suspense>
|
||||
</div>
|
||||
|
||||
{showCheckJsonButton || errorMessage ? (
|
||||
<div className="mt-2 flex shrink-0 items-center gap-2">
|
||||
{showCheckJsonButton ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleCheckJson}
|
||||
disabled={isCheckingJson}
|
||||
className="h-7"
|
||||
>
|
||||
{isCheckingJson ? (
|
||||
<Spinner size="small" />
|
||||
) : (
|
||||
t(
|
||||
'new_diagram_dialog.import_database.check_script_result'
|
||||
)
|
||||
)}
|
||||
</Button>
|
||||
) : (
|
||||
<p className="text-xs text-red-700">
|
||||
{errorMessage}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
),
|
||||
[
|
||||
errorMessage,
|
||||
scriptResult,
|
||||
importMethod,
|
||||
effectiveTheme,
|
||||
debouncedHandleInputChange,
|
||||
handleEditorDidMount,
|
||||
showCheckJsonButton,
|
||||
isCheckingJson,
|
||||
handleCheckJson,
|
||||
t,
|
||||
]
|
||||
);
|
||||
|
||||
const renderContent = useCallback(() => {
|
||||
return (
|
||||
<DialogInternalContent>
|
||||
<div className="flex w-full flex-1 flex-col gap-6">
|
||||
{databaseTypeToEditionMap[databaseType].length > 0 ? (
|
||||
<div className="flex flex-col gap-1 md:flex-row">
|
||||
<p className="text-sm leading-6 text-muted-foreground">
|
||||
{t(
|
||||
'new_diagram_dialog.import_database.database_edition'
|
||||
)}
|
||||
</p>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
className="ml-1 flex-wrap gap-2"
|
||||
value={
|
||||
!databaseEdition
|
||||
? 'regular'
|
||||
: databaseEdition
|
||||
}
|
||||
onValueChange={(value) => {
|
||||
setDatabaseEdition(
|
||||
value === 'regular'
|
||||
? undefined
|
||||
: (value as DatabaseEdition)
|
||||
);
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem
|
||||
value="regular"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<AvatarImage
|
||||
src={
|
||||
databaseSecondaryLogoMap[
|
||||
databaseType
|
||||
]
|
||||
}
|
||||
alt="Regular"
|
||||
/>
|
||||
<AvatarFallback>Regular</AvatarFallback>
|
||||
</Avatar>
|
||||
Regular
|
||||
</ToggleGroupItem>
|
||||
{databaseTypeToEditionMap[databaseType].map(
|
||||
(edition) => (
|
||||
<ToggleGroupItem
|
||||
value={edition}
|
||||
key={edition}
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none"
|
||||
>
|
||||
<Avatar className="size-4">
|
||||
<AvatarImage
|
||||
src={
|
||||
databaseEditionToImageMap[
|
||||
edition
|
||||
]
|
||||
}
|
||||
alt={
|
||||
databaseEditionToLabelMap[
|
||||
edition
|
||||
]
|
||||
}
|
||||
/>
|
||||
<AvatarFallback>
|
||||
{
|
||||
databaseEditionToLabelMap[
|
||||
edition
|
||||
]
|
||||
}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
{databaseEditionToLabelMap[edition]}
|
||||
</ToggleGroupItem>
|
||||
)
|
||||
)}
|
||||
</ToggleGroup>
|
||||
</div>
|
||||
) : null}
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex flex-col gap-1 text-sm text-muted-foreground md:flex-row md:justify-between">
|
||||
<div>
|
||||
1.{' '}
|
||||
{t('new_diagram_dialog.import_database.step_1')}
|
||||
</div>
|
||||
{databaseType === DatabaseType.SQL_SERVER && (
|
||||
<SSMSInfo
|
||||
open={showSSMSInfoDialog}
|
||||
setOpen={setShowSSMSInfoDialog}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{databaseTypeToClientsMap[databaseType].length > 0 ? (
|
||||
<Tabs
|
||||
value={
|
||||
!databaseClient
|
||||
? 'dbclient'
|
||||
: databaseClient
|
||||
}
|
||||
onValueChange={(value) => {
|
||||
setDatabaseClient(
|
||||
value === 'dbclient'
|
||||
? undefined
|
||||
: (value as DatabaseClient)
|
||||
);
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-1">
|
||||
<TabsList className="h-8 justify-start rounded-none rounded-t-sm ">
|
||||
<TabsTrigger
|
||||
value="dbclient"
|
||||
className="h-6 w-20"
|
||||
>
|
||||
DB Client
|
||||
</TabsTrigger>
|
||||
|
||||
{databaseClients?.map((client) => (
|
||||
<TabsTrigger
|
||||
key={client}
|
||||
value={client}
|
||||
className="h-6 !w-20"
|
||||
>
|
||||
{
|
||||
databaseClientToLabelMap[
|
||||
client
|
||||
]
|
||||
}
|
||||
</TabsTrigger>
|
||||
)) ?? []}
|
||||
</TabsList>
|
||||
</div>
|
||||
<CodeSnippet
|
||||
className="h-40 w-full"
|
||||
loading={!importMetadataScripts}
|
||||
code={
|
||||
importMetadataScripts?.[databaseType]?.(
|
||||
{
|
||||
databaseEdition,
|
||||
databaseClient,
|
||||
}
|
||||
) ?? ''
|
||||
}
|
||||
language={databaseClient ? 'shell' : 'sql'}
|
||||
/>
|
||||
</Tabs>
|
||||
) : (
|
||||
<CodeSnippet
|
||||
className="h-40 w-full flex-auto"
|
||||
loading={!importMetadataScripts}
|
||||
code={
|
||||
importMetadataScripts?.[databaseType]?.({
|
||||
databaseEdition,
|
||||
}) ?? ''
|
||||
}
|
||||
language="sql"
|
||||
/>
|
||||
)}
|
||||
{isDesktop ? (
|
||||
<ResizablePanelGroup
|
||||
direction={isDesktop ? 'horizontal' : 'vertical'}
|
||||
className="min-h-[500px] md:min-h-fit"
|
||||
>
|
||||
<ResizablePanel
|
||||
defaultSize={25}
|
||||
minSize={25}
|
||||
maxSize={99}
|
||||
className="min-h-fit rounded-md bg-gradient-to-b from-slate-50 to-slate-100 p-2 dark:from-slate-900 dark:to-slate-800 md:min-h-fit md:min-w-[350px] md:rounded-l-md md:p-2"
|
||||
>
|
||||
{renderInstructions()}
|
||||
</ResizablePanel>
|
||||
<ResizableHandle withHandle />
|
||||
<ResizablePanel className="min-h-40 py-2 md:px-2 md:py-0">
|
||||
{renderOutputTextArea()}
|
||||
</ResizablePanel>
|
||||
</ResizablePanelGroup>
|
||||
) : (
|
||||
<div className="flex flex-col gap-2">
|
||||
{renderInstructions()}
|
||||
{renderOutputTextArea()}
|
||||
</div>
|
||||
<div className="flex h-48 flex-col gap-1">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
2. {t('new_diagram_dialog.import_database.step_2')}
|
||||
</p>
|
||||
<Textarea
|
||||
className="w-full flex-1 rounded-md bg-muted p-2 text-sm"
|
||||
placeholder={t(
|
||||
'new_diagram_dialog.import_database.script_results_placeholder'
|
||||
)}
|
||||
value={scriptResult}
|
||||
onChange={handleInputChange}
|
||||
/>
|
||||
{showCheckJsonButton || errorMessage ? (
|
||||
<div className="mt-2 flex items-center gap-2">
|
||||
{showCheckJsonButton ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleCheckJson}
|
||||
disabled={isCheckingJson}
|
||||
>
|
||||
{isCheckingJson ? (
|
||||
<Spinner size="small" />
|
||||
) : (
|
||||
t(
|
||||
'new_diagram_dialog.import_database.check_script_result'
|
||||
)
|
||||
)}
|
||||
</Button>
|
||||
) : (
|
||||
<p className="text-sm text-red-700">
|
||||
{errorMessage}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DialogInternalContent>
|
||||
);
|
||||
}, [
|
||||
databaseEdition,
|
||||
databaseType,
|
||||
errorMessage,
|
||||
handleInputChange,
|
||||
scriptResult,
|
||||
setDatabaseEdition,
|
||||
databaseClients,
|
||||
databaseClient,
|
||||
importMetadataScripts,
|
||||
t,
|
||||
showCheckJsonButton,
|
||||
isCheckingJson,
|
||||
handleCheckJson,
|
||||
showSSMSInfoDialog,
|
||||
setShowSSMSInfoDialog,
|
||||
]);
|
||||
}, [renderOutputTextArea, renderInstructions, isDesktop]);
|
||||
|
||||
const renderFooter = useCallback(() => {
|
||||
return (
|
||||
<DialogFooter className="mt-4 flex !justify-between gap-2">
|
||||
<DialogFooter className="flex !justify-between gap-2">
|
||||
<div className="flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2">
|
||||
{goBack && (
|
||||
<Button
|
||||
|
||||
@@ -0,0 +1,178 @@
|
||||
import React from 'react';
|
||||
import logo from '@/assets/logo-2.png';
|
||||
import { ToggleGroup, ToggleGroupItem } from '@/components/toggle/toggle-group';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { databaseSecondaryLogoMap } from '@/lib/databases';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import {
|
||||
databaseEditionToImageMap,
|
||||
databaseEditionToLabelMap,
|
||||
databaseTypeToEditionMap,
|
||||
} from '@/lib/domain/database-edition';
|
||||
import {
|
||||
Avatar,
|
||||
AvatarFallback,
|
||||
AvatarImage,
|
||||
} from '@/components/avatar/avatar';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Code } from 'lucide-react';
|
||||
import { SmartQueryInstructions } from './instructions/smart-query-instructions';
|
||||
import { DDLInstructions } from './instructions/ddl-instructions';
|
||||
|
||||
const DatabasesWithoutDDLInstructions: DatabaseType[] = [
|
||||
DatabaseType.CLICKHOUSE,
|
||||
];
|
||||
|
||||
export interface InstructionsSectionProps {
|
||||
databaseType: DatabaseType;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
setDatabaseEdition: React.Dispatch<
|
||||
React.SetStateAction<DatabaseEdition | undefined>
|
||||
>;
|
||||
importMethod: 'query' | 'ddl';
|
||||
setImportMethod: (method: 'query' | 'ddl') => void;
|
||||
showSSMSInfoDialog: boolean;
|
||||
setShowSSMSInfoDialog: (show: boolean) => void;
|
||||
}
|
||||
|
||||
export const InstructionsSection: React.FC<InstructionsSectionProps> = ({
|
||||
databaseType,
|
||||
databaseEdition,
|
||||
setDatabaseEdition,
|
||||
importMethod,
|
||||
setImportMethod,
|
||||
setShowSSMSInfoDialog,
|
||||
showSSMSInfoDialog,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div className="flex w-full flex-1 flex-col gap-4">
|
||||
{databaseTypeToEditionMap[databaseType].length > 0 ? (
|
||||
<div className="flex flex-col gap-1">
|
||||
<p className="text-sm leading-6 text-primary">
|
||||
{t(
|
||||
'new_diagram_dialog.import_database.database_edition'
|
||||
)}
|
||||
</p>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
className="ml-1 flex-wrap justify-start gap-2"
|
||||
value={!databaseEdition ? 'regular' : databaseEdition}
|
||||
onValueChange={(value) => {
|
||||
setDatabaseEdition(
|
||||
value === 'regular'
|
||||
? undefined
|
||||
: (value as DatabaseEdition)
|
||||
);
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem
|
||||
value="regular"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<AvatarImage
|
||||
src={databaseSecondaryLogoMap[databaseType]}
|
||||
alt="Regular"
|
||||
/>
|
||||
<AvatarFallback>Regular</AvatarFallback>
|
||||
</Avatar>
|
||||
Regular
|
||||
</ToggleGroupItem>
|
||||
{databaseTypeToEditionMap[databaseType].map(
|
||||
(edition) => (
|
||||
<ToggleGroupItem
|
||||
value={edition}
|
||||
key={edition}
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4">
|
||||
<AvatarImage
|
||||
src={
|
||||
databaseEditionToImageMap[
|
||||
edition
|
||||
]
|
||||
}
|
||||
alt={
|
||||
databaseEditionToLabelMap[
|
||||
edition
|
||||
]
|
||||
}
|
||||
/>
|
||||
<AvatarFallback>
|
||||
{databaseEditionToLabelMap[edition]}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
{databaseEditionToLabelMap[edition]}
|
||||
</ToggleGroupItem>
|
||||
)
|
||||
)}
|
||||
</ToggleGroup>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{DatabasesWithoutDDLInstructions.includes(databaseType) ? null : (
|
||||
<div className="flex flex-col gap-1">
|
||||
<p className="text-sm leading-6 text-primary">
|
||||
How would you like to import?
|
||||
</p>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
className="ml-1 flex-wrap justify-start gap-2"
|
||||
value={importMethod}
|
||||
onValueChange={(value) => {
|
||||
let selectedImportMethod: 'query' | 'ddl' = 'query';
|
||||
if (value) {
|
||||
selectedImportMethod = value as 'query' | 'ddl';
|
||||
}
|
||||
|
||||
setImportMethod(selectedImportMethod);
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem
|
||||
value="query"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="h-3 w-4 rounded-none">
|
||||
<AvatarImage src={logo} alt="query" />
|
||||
<AvatarFallback>Query</AvatarFallback>
|
||||
</Avatar>
|
||||
Smart Query
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value="ddl"
|
||||
variant="outline"
|
||||
className="h-6 gap-1 p-0 px-2 shadow-none data-[state=on]:bg-slate-200 dark:data-[state=on]:bg-slate-700"
|
||||
>
|
||||
<Avatar className="size-4 rounded-none">
|
||||
<Code size={16} />
|
||||
</Avatar>
|
||||
DDL
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex flex-col gap-2">
|
||||
<div className="text-sm font-semibold">Instructions:</div>
|
||||
{importMethod === 'query' ? (
|
||||
<SmartQueryInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
showSSMSInfoDialog={showSSMSInfoDialog}
|
||||
setShowSSMSInfoDialog={setShowSSMSInfoDialog}
|
||||
/>
|
||||
) : (
|
||||
<DDLInstructions
|
||||
databaseType={databaseType}
|
||||
databaseEdition={databaseEdition}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,48 @@
|
||||
import React from 'react';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
|
||||
export interface DDLInstructionStepProps {
|
||||
index: number;
|
||||
text: string;
|
||||
code?: string;
|
||||
example?: string;
|
||||
}
|
||||
|
||||
export const DDLInstructionStep: React.FC<DDLInstructionStepProps> = ({
|
||||
index,
|
||||
text,
|
||||
code,
|
||||
example,
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex flex-col gap-1 text-sm text-primary">
|
||||
<div>
|
||||
<span className="font-medium">{index}.</span> {text}
|
||||
</div>
|
||||
|
||||
{code ? (
|
||||
<div className="h-[60px]">
|
||||
<CodeSnippet
|
||||
className="h-full"
|
||||
code={code}
|
||||
language={'shell'}
|
||||
/>
|
||||
</div>
|
||||
) : null}
|
||||
{example ? (
|
||||
<>
|
||||
<div className="my-2">Example:</div>
|
||||
<div className="h-[60px]">
|
||||
<CodeSnippet
|
||||
className="h-full"
|
||||
code={example}
|
||||
language={'shell'}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,117 @@
|
||||
import React from 'react';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { DDLInstructionStep } from './ddl-instruction-step';
|
||||
|
||||
interface DDLInstruction {
|
||||
text: string;
|
||||
code?: string;
|
||||
example?: string;
|
||||
}
|
||||
|
||||
const DDLInstructionsMap: Record<DatabaseType, DDLInstruction[]> = {
|
||||
[DatabaseType.GENERIC]: [],
|
||||
[DatabaseType.MYSQL]: [
|
||||
{
|
||||
text: 'Install mysqldump.',
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal (prefix with sudo on Linux if needed):',
|
||||
code: `mysqldump -h <host> -u <username>\n-P <port> -p --no-data\n<database_name> > <output_path>`,
|
||||
example: `mysqldump -h localhost -u root -P\n3306 -p --no-data my_db >\nschema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
},
|
||||
],
|
||||
[DatabaseType.POSTGRESQL]: [
|
||||
{
|
||||
text: 'Install pg_dump.',
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal (prefix with sudo on Linux if needed):',
|
||||
code: `pg_dump -h <host> -p <port> -d <database_name> \n -U <username> -s -F p -E UTF-8 \n -f <output_file_path>`,
|
||||
example: `pg_dump -h localhost -p 5432 -d my_db \n -U postgres -s -F p -E UTF-8 \n -f schema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
},
|
||||
],
|
||||
[DatabaseType.SQLITE]: [
|
||||
{
|
||||
text: 'Install sqlite3.',
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal:',
|
||||
code: `sqlite3 <database_file_path>\n.dump > <output_file_path>`,
|
||||
example: `sqlite3 my_db.db\n.dump > schema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
},
|
||||
],
|
||||
[DatabaseType.SQL_SERVER]: [
|
||||
{
|
||||
text: 'Download and install SQL Server Management Studio (SSMS).',
|
||||
},
|
||||
{
|
||||
text: 'Connect to your SQL Server instance using SSMS.',
|
||||
},
|
||||
{
|
||||
text: 'Right-click on the database you want to export and select Script Database as > CREATE To > New Query Editor Window.',
|
||||
},
|
||||
{
|
||||
text: 'Copy the generated script and paste it here.',
|
||||
},
|
||||
],
|
||||
[DatabaseType.CLICKHOUSE]: [],
|
||||
[DatabaseType.COCKROACHDB]: [
|
||||
{
|
||||
text: 'Install pg_dump.',
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal (prefix with sudo on Linux if needed):',
|
||||
code: `pg_dump -h <host> -p <port> -d <database_name> \n -U <username> -s -F p -E UTF-8 \n -f <output_file_path>`,
|
||||
example: `pg_dump -h localhost -p 5432 -d my_db \n -U postgres -s -F p -E UTF-8 \n -f schema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
},
|
||||
],
|
||||
[DatabaseType.MARIADB]: [
|
||||
{
|
||||
text: 'Install mysqldump.',
|
||||
},
|
||||
{
|
||||
text: 'Execute the following command in your terminal (prefix with sudo on Linux if needed):',
|
||||
code: `mysqldump -h <host> -u <username>\n-P <port> -p --no-data\n<database_name> > <output_path>`,
|
||||
example: `mysqldump -h localhost -u root -P\n3306 -p --no-data my_db >\nschema_export.sql`,
|
||||
},
|
||||
{
|
||||
text: 'Open the exported SQL file, copy its contents, and paste them here.',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export interface DDLInstructionsProps {
|
||||
databaseType: DatabaseType;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
}
|
||||
|
||||
export const DDLInstructions: React.FC<DDLInstructionsProps> = ({
|
||||
databaseType,
|
||||
}) => {
|
||||
return (
|
||||
<>
|
||||
{DDLInstructionsMap[databaseType].map((instruction, index) => (
|
||||
<DDLInstructionStep
|
||||
key={index}
|
||||
index={index + 1}
|
||||
text={instruction.text}
|
||||
code={instruction.code}
|
||||
example={instruction.example}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,147 @@
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { SSMSInfo } from './ssms-info/ssms-info';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Tabs, TabsList, TabsTrigger } from '@/components/tabs/tabs';
|
||||
import type { DatabaseClient } from '@/lib/domain/database-clients';
|
||||
import { minimizeQuery } from '@/lib/data/import-metadata/scripts/minimize-script';
|
||||
import {
|
||||
databaseClientToLabelMap,
|
||||
databaseTypeToClientsMap,
|
||||
databaseEditionToClientsMap,
|
||||
} from '@/lib/domain/database-clients';
|
||||
import type { ImportMetadataScripts } from '@/lib/data/import-metadata/scripts/scripts';
|
||||
|
||||
export interface SmartQueryInstructionsProps {
|
||||
databaseType: DatabaseType;
|
||||
databaseEdition?: DatabaseEdition;
|
||||
showSSMSInfoDialog: boolean;
|
||||
setShowSSMSInfoDialog: (show: boolean) => void;
|
||||
}
|
||||
|
||||
export const SmartQueryInstructions: React.FC<SmartQueryInstructionsProps> = ({
|
||||
databaseType,
|
||||
databaseEdition,
|
||||
showSSMSInfoDialog,
|
||||
setShowSSMSInfoDialog,
|
||||
}) => {
|
||||
const databaseClients = useMemo(
|
||||
() => [
|
||||
...databaseTypeToClientsMap[databaseType],
|
||||
...(databaseEdition
|
||||
? databaseEditionToClientsMap[databaseEdition]
|
||||
: []),
|
||||
],
|
||||
[databaseType, databaseEdition]
|
||||
);
|
||||
const [databaseClient, setDatabaseClient] = useState<
|
||||
DatabaseClient | undefined
|
||||
>();
|
||||
const { t } = useTranslation();
|
||||
const [importMetadataScripts, setImportMetadataScripts] =
|
||||
useState<ImportMetadataScripts | null>(null);
|
||||
|
||||
const code = useMemo(
|
||||
() =>
|
||||
(databaseClients.length > 0
|
||||
? importMetadataScripts?.[databaseType]?.({
|
||||
databaseEdition,
|
||||
databaseClient,
|
||||
})
|
||||
: importMetadataScripts?.[databaseType]?.({
|
||||
databaseEdition,
|
||||
})) ?? '',
|
||||
[
|
||||
databaseType,
|
||||
databaseEdition,
|
||||
databaseClients,
|
||||
importMetadataScripts,
|
||||
databaseClient,
|
||||
]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const loadScripts = async () => {
|
||||
const { importMetadataScripts } = await import(
|
||||
'@/lib/data/import-metadata/scripts/scripts'
|
||||
);
|
||||
setImportMetadataScripts(importMetadataScripts);
|
||||
};
|
||||
loadScripts();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex flex-col gap-1 text-sm text-primary">
|
||||
<div>
|
||||
<span className="font-medium">1.</span>{' '}
|
||||
{t('new_diagram_dialog.import_database.step_1')}
|
||||
</div>
|
||||
{databaseType === DatabaseType.SQL_SERVER && (
|
||||
<SSMSInfo
|
||||
open={showSSMSInfoDialog}
|
||||
setOpen={setShowSSMSInfoDialog}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{databaseClients.length > 0 ? (
|
||||
<Tabs
|
||||
value={!databaseClient ? 'dbclient' : databaseClient}
|
||||
onValueChange={(value) => {
|
||||
setDatabaseClient(
|
||||
value === 'dbclient'
|
||||
? undefined
|
||||
: (value as DatabaseClient)
|
||||
);
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-1">
|
||||
<TabsList className="h-8 justify-start rounded-none rounded-t-sm ">
|
||||
<TabsTrigger
|
||||
value="dbclient"
|
||||
className="h-6 w-20"
|
||||
>
|
||||
DB Client
|
||||
</TabsTrigger>
|
||||
|
||||
{databaseClients?.map((client) => (
|
||||
<TabsTrigger
|
||||
key={client}
|
||||
value={client}
|
||||
className="h-6 !w-20"
|
||||
>
|
||||
{databaseClientToLabelMap[client]}
|
||||
</TabsTrigger>
|
||||
)) ?? []}
|
||||
</TabsList>
|
||||
</div>
|
||||
<CodeSnippet
|
||||
className="h-40 w-full md:h-[200px]"
|
||||
loading={!importMetadataScripts}
|
||||
code={minimizeQuery(code)}
|
||||
codeToCopy={code}
|
||||
language={databaseClient ? 'shell' : 'sql'}
|
||||
/>
|
||||
</Tabs>
|
||||
) : (
|
||||
<CodeSnippet
|
||||
className="h-40 w-full flex-auto md:h-[200px]"
|
||||
loading={!importMetadataScripts}
|
||||
code={minimizeQuery(code)}
|
||||
codeToCopy={code}
|
||||
language="sql"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col gap-1">
|
||||
<p className="text-sm text-primary">
|
||||
<span className="font-medium">2.</span>{' '}
|
||||
{t('new_diagram_dialog.import_database.step_2')}
|
||||
</p>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -17,6 +17,7 @@ import { CreateDiagramDialogStep } from './create-diagram-dialog-step';
|
||||
import { ImportDatabase } from '../common/import-database/import-database';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
|
||||
export interface CreateDiagramDialogProps extends BaseDialogProps {}
|
||||
|
||||
@@ -25,6 +26,7 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}) => {
|
||||
const { diagramId } = useChartDB();
|
||||
const { t } = useTranslation();
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const [databaseType, setDatabaseType] = useState<DatabaseType>(
|
||||
DatabaseType.GENERIC
|
||||
);
|
||||
@@ -41,6 +43,11 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
const [diagramNumber, setDiagramNumber] = useState<number>(1);
|
||||
const navigate = useNavigate();
|
||||
|
||||
useEffect(() => {
|
||||
setDatabaseEdition(undefined);
|
||||
setImportMethod('query');
|
||||
}, [databaseType]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDiagrams = async () => {
|
||||
const diagrams = await listDiagrams();
|
||||
@@ -54,29 +61,41 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
setDatabaseType(DatabaseType.GENERIC);
|
||||
setDatabaseEdition(undefined);
|
||||
setScriptResult('');
|
||||
setImportMethod('query');
|
||||
}, [dialog.open]);
|
||||
|
||||
const hasExistingDiagram = (diagramId ?? '').trim().length !== 0;
|
||||
|
||||
const importNewDiagram = useCallback(async () => {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
let diagram: Diagram | undefined;
|
||||
|
||||
const diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
if (importMethod === 'ddl') {
|
||||
diagram = await sqlImportToDiagram({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
|
||||
diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
}
|
||||
|
||||
await addDiagram({ diagram });
|
||||
await updateConfig({ defaultDiagramId: diagram.id });
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
}, [
|
||||
importMethod,
|
||||
databaseType,
|
||||
addDiagram,
|
||||
databaseEdition,
|
||||
@@ -133,7 +152,7 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}}
|
||||
>
|
||||
<DialogContent
|
||||
className="flex max-h-screen w-[90vw] max-w-[90vw] flex-col overflow-y-auto md:overflow-visible lg:max-w-[60vw] xl:lg:max-w-lg xl:min-w-[45vw]"
|
||||
className="flex max-h-dvh w-full flex-col md:max-w-[900px]"
|
||||
showClose={hasExistingDiagram}
|
||||
>
|
||||
{step === CreateDiagramDialogStep.SELECT_DATABASE ? (
|
||||
@@ -159,6 +178,8 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}
|
||||
setScriptResult={setScriptResult}
|
||||
title={t('new_diagram_dialog.import_database.title')}
|
||||
importMethod={importMethod}
|
||||
setImportMethod={setImportMethod}
|
||||
/>
|
||||
)}
|
||||
</DialogContent>
|
||||
|
||||
@@ -87,7 +87,12 @@ export const ExportSQLDialog: React.FC<ExportSQLDialogProps> = ({
|
||||
};
|
||||
|
||||
if (targetDatabaseType === DatabaseType.GENERIC) {
|
||||
return Promise.resolve(exportBaseSQL(filteredDiagram));
|
||||
return Promise.resolve(
|
||||
exportBaseSQL({
|
||||
diagram: filteredDiagram,
|
||||
targetDatabaseType,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
return exportSQL(filteredDiagram, targetDatabaseType, {
|
||||
stream: true,
|
||||
|
||||
@@ -6,6 +6,7 @@ import { ImportDatabase } from '../common/import-database/import-database';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import { loadDatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { useRedoUndoStack } from '@/hooks/use-redo-undo-stack';
|
||||
@@ -13,6 +14,7 @@ import { Trans, useTranslation } from 'react-i18next';
|
||||
import { useReactFlow } from '@xyflow/react';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { useAlert } from '@/context/alert-context/alert-context';
|
||||
import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
|
||||
export interface ImportDatabaseDialogProps extends BaseDialogProps {
|
||||
databaseType: DatabaseType;
|
||||
@@ -22,6 +24,7 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
dialog,
|
||||
databaseType,
|
||||
}) => {
|
||||
const [importMethod, setImportMethod] = useState<'query' | 'ddl'>('query');
|
||||
const { closeImportDatabaseDialog } = useDialog();
|
||||
const { showAlert } = useAlert();
|
||||
const {
|
||||
@@ -43,6 +46,10 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
DatabaseEdition | undefined
|
||||
>();
|
||||
|
||||
useEffect(() => {
|
||||
setDatabaseEdition(undefined);
|
||||
}, [databaseType]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) return;
|
||||
setDatabaseEdition(undefined);
|
||||
@@ -50,17 +57,27 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
}, [dialog.open]);
|
||||
|
||||
const importDatabase = useCallback(async () => {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
let diagram: Diagram | undefined;
|
||||
|
||||
const diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
if (importMethod === 'ddl') {
|
||||
diagram = await sqlImportToDiagram({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
|
||||
diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
}
|
||||
|
||||
const tableIdsToRemove = tables
|
||||
.filter((table) =>
|
||||
@@ -304,6 +321,7 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
|
||||
closeImportDatabaseDialog();
|
||||
}, [
|
||||
importMethod,
|
||||
databaseEdition,
|
||||
currentDatabaseType,
|
||||
updateDatabaseType,
|
||||
@@ -333,7 +351,7 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
}}
|
||||
>
|
||||
<DialogContent
|
||||
className="flex max-h-screen w-[90vw] flex-col overflow-y-auto md:overflow-visible xl:min-w-[45vw]"
|
||||
className="flex max-h-screen w-full flex-col md:max-w-[900px]"
|
||||
showClose
|
||||
>
|
||||
<ImportDatabase
|
||||
@@ -345,6 +363,8 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
setScriptResult={setScriptResult}
|
||||
keepDialogAfterImport
|
||||
title={t('import_database_dialog.title', { diagramName })}
|
||||
importMethod={importMethod}
|
||||
setImportMethod={setImportMethod}
|
||||
/>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -30,6 +30,14 @@
|
||||
--chart-4: 43 74% 66%;
|
||||
--chart-5: 27 87% 67%;
|
||||
--subtitle: 215.3 19.3% 34.5%;
|
||||
--sidebar-background: 0 0% 98%;
|
||||
--sidebar-foreground: 240 5.3% 26.1%;
|
||||
--sidebar-primary: 240 5.9% 10%;
|
||||
--sidebar-primary-foreground: 0 0% 98%;
|
||||
--sidebar-accent: 240 4.8% 95.9%;
|
||||
--sidebar-accent-foreground: 240 5.9% 10%;
|
||||
--sidebar-border: 220 13% 91%;
|
||||
--sidebar-ring: 217.2 91.2% 59.8%;
|
||||
}
|
||||
|
||||
.dark {
|
||||
@@ -58,6 +66,14 @@
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
--subtitle: 212.7 26.8% 83.9%;
|
||||
--sidebar-background: 240 5.9% 10%;
|
||||
--sidebar-foreground: 240 4.8% 95.9%;
|
||||
--sidebar-primary: 224.3 76.3% 48%;
|
||||
--sidebar-primary-foreground: 0 0% 100%;
|
||||
--sidebar-accent: 240 3.7% 15.9%;
|
||||
--sidebar-accent-foreground: 240 4.8% 95.9%;
|
||||
--sidebar-border: 240 3.7% 15.9%;
|
||||
--sidebar-ring: 217.2 91.2% 59.8%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
47
src/hooks/use-debounce-v2.ts
Normal file
47
src/hooks/use-debounce-v2.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { useEffect, useRef, useCallback } from 'react';
|
||||
import { debounce as utilsDebounce } from '@/lib/utils';
|
||||
|
||||
interface DebouncedFunction {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(...args: any[]): void;
|
||||
cancel?: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A hook that returns a debounced version of the provided function.
|
||||
* The debounced function will only be called after the specified delay
|
||||
* has passed without the function being called again.
|
||||
*
|
||||
* @param callback The function to debounce
|
||||
* @param delay The delay in milliseconds
|
||||
* @returns A debounced version of the callback
|
||||
*/
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function useDebounce<T extends (...args: any[]) => any>(
|
||||
callback: T,
|
||||
delay: number
|
||||
): (...args: Parameters<T>) => void {
|
||||
// Use a ref to store the debounced function
|
||||
const debouncedFnRef = useRef<DebouncedFunction>();
|
||||
|
||||
// Update the debounced function when dependencies change
|
||||
useEffect(() => {
|
||||
// Create the debounced function
|
||||
debouncedFnRef.current = utilsDebounce(callback, delay);
|
||||
|
||||
// Clean up when component unmounts or dependencies change
|
||||
return () => {
|
||||
if (debouncedFnRef.current?.cancel) {
|
||||
debouncedFnRef.current.cancel();
|
||||
}
|
||||
};
|
||||
}, [callback, delay]);
|
||||
|
||||
// Create a stable callback that uses the ref
|
||||
const debouncedCallback = useCallback((...args: Parameters<T>) => {
|
||||
debouncedFnRef.current?.(...args);
|
||||
}, []);
|
||||
|
||||
return debouncedCallback;
|
||||
}
|
||||
23
src/hooks/use-mobile.tsx
Normal file
23
src/hooks/use-mobile.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import * as React from 'react';
|
||||
|
||||
const MOBILE_BREAKPOINT = 768;
|
||||
|
||||
export function useIsMobile() {
|
||||
const [isMobile, setIsMobile] = React.useState<boolean | undefined>(
|
||||
undefined
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
const mql = window.matchMedia(
|
||||
`(max-width: ${MOBILE_BREAKPOINT - 1}px)`
|
||||
);
|
||||
const onChange = () => {
|
||||
setIsMobile(window.innerWidth < MOBILE_BREAKPOINT);
|
||||
};
|
||||
mql.addEventListener('change', onChange);
|
||||
setIsMobile(window.innerWidth < MOBILE_BREAKPOINT);
|
||||
return () => mql.removeEventListener('change', onChange);
|
||||
}, []);
|
||||
|
||||
return !!isMobile;
|
||||
}
|
||||
@@ -42,9 +42,7 @@ export const ar: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'مساعدة',
|
||||
docs_website: 'الوثائق',
|
||||
visit_website: 'ChartDB قم بزيارة',
|
||||
join_discord: 'Discord انضم إلينا على',
|
||||
schedule_a_call: '!تحدث معنا',
|
||||
join_discord: 'انضم إلينا على Discord',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const ar: LanguageTranslation = {
|
||||
comments: 'تعليقات',
|
||||
no_comments: 'لا يوجد تعليقات',
|
||||
delete_field: 'حذف الحقل',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'خصائص الفهرس',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const bn: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'সাহায্য',
|
||||
docs_website: 'ডকুমেন্টেশন',
|
||||
visit_website: 'ChartDB ওয়েবসাইটে যান',
|
||||
join_discord: 'আমাদের Discord-এ যোগ দিন',
|
||||
schedule_a_call: 'আমাদের সাথে কথা বলুন!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -152,6 +150,8 @@ export const bn: LanguageTranslation = {
|
||||
comments: 'মন্তব্য',
|
||||
no_comments: 'কোনো মন্তব্য নেই',
|
||||
delete_field: 'ফিল্ড মুছুন',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ইনডেক্স কর্ম',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const de: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Hilfe',
|
||||
docs_website: 'Dokumentation',
|
||||
visit_website: 'ChartDB Webseite',
|
||||
join_discord: 'Auf Discord beitreten',
|
||||
schedule_a_call: 'Gespräch vereinbaren',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -153,6 +151,8 @@ export const de: LanguageTranslation = {
|
||||
comments: 'Kommentare',
|
||||
no_comments: 'Keine Kommentare',
|
||||
delete_field: 'Feld löschen',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Indexattribute',
|
||||
|
||||
@@ -41,9 +41,7 @@ export const en = {
|
||||
help: {
|
||||
help: 'Help',
|
||||
docs_website: 'Docs',
|
||||
visit_website: 'Visit ChartDB',
|
||||
join_discord: 'Join us on Discord',
|
||||
schedule_a_call: 'Talk with us!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -145,6 +143,7 @@ export const en = {
|
||||
field_actions: {
|
||||
title: 'Field Attributes',
|
||||
unique: 'Unique',
|
||||
character_length: 'Max Length',
|
||||
comments: 'Comments',
|
||||
no_comments: 'No comments',
|
||||
delete_field: 'Delete Field',
|
||||
@@ -233,7 +232,7 @@ export const en = {
|
||||
title: 'Import your Database',
|
||||
database_edition: 'Database Edition:',
|
||||
step_1: 'Run this script in your database:',
|
||||
step_2: 'Paste the script result here:',
|
||||
step_2: 'Paste the script result into this modal.',
|
||||
script_results_placeholder: 'Script results here...',
|
||||
ssms_instructions: {
|
||||
button_text: 'SSMS Instructions',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const es: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Ayuda',
|
||||
docs_website: 'Documentación',
|
||||
visit_website: 'Visitar ChartDB',
|
||||
join_discord: 'Únete a nosotros en Discord',
|
||||
schedule_a_call: '¡Habla con nosotros!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -142,6 +140,8 @@ export const es: LanguageTranslation = {
|
||||
comments: 'Comentarios',
|
||||
no_comments: 'Sin comentarios',
|
||||
delete_field: 'Eliminar Campo',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos del Índice',
|
||||
|
||||
@@ -41,9 +41,7 @@ export const fr: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Aide',
|
||||
docs_website: 'Documentation',
|
||||
visit_website: 'Visitez ChartDB',
|
||||
join_discord: 'Rejoignez-nous sur Discord',
|
||||
schedule_a_call: 'Parlez avec nous !',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -140,6 +138,8 @@ export const fr: LanguageTranslation = {
|
||||
comments: 'Commentaires',
|
||||
no_comments: 'Pas de commentaires',
|
||||
delete_field: 'Supprimer le Champ',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: "Attributs de l'Index",
|
||||
|
||||
@@ -43,9 +43,7 @@ export const gu: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'મદદ',
|
||||
docs_website: 'દસ્તાવેજીકરણ',
|
||||
visit_website: 'ChartDB વેબસાઇટ પર જાઓ',
|
||||
join_discord: 'અમારા Discordમાં જોડાઓ',
|
||||
schedule_a_call: 'અમારી સાથે વાત કરો!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -153,6 +151,8 @@ export const gu: LanguageTranslation = {
|
||||
comments: 'ટિપ્પણીઓ',
|
||||
no_comments: 'કોઈ ટિપ્પણીઓ નથી',
|
||||
delete_field: 'ફીલ્ડ કાઢી નાખો',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ઇન્ડેક્સ લક્ષણો',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const hi: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'मदद',
|
||||
docs_website: 'દસ્તાવેજીકરણ',
|
||||
visit_website: 'ChartDB वेबसाइट पर जाएँ',
|
||||
join_discord: 'हमसे Discord पर जुड़ें',
|
||||
schedule_a_call: 'हमसे बात करें!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -152,6 +150,8 @@ export const hi: LanguageTranslation = {
|
||||
comments: 'टिप्पणियाँ',
|
||||
no_comments: 'कोई टिप्पणी नहीं',
|
||||
delete_field: 'फ़ील्ड हटाएँ',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचकांक विशेषताएँ',
|
||||
|
||||
@@ -41,10 +41,8 @@ export const id_ID: LanguageTranslation = {
|
||||
},
|
||||
help: {
|
||||
help: 'Bantuan',
|
||||
docs_website: 'દસ્તાવેજીકરણ',
|
||||
visit_website: 'Kunjungi ChartDB',
|
||||
docs_website: 'Dokumentasi',
|
||||
join_discord: 'Bergabunglah di Discord kami',
|
||||
schedule_a_call: 'Berbicara dengan kami!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const id_ID: LanguageTranslation = {
|
||||
comments: 'Komentar',
|
||||
no_comments: 'Tidak ada komentar',
|
||||
delete_field: 'Hapus Kolom',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atribut Indeks',
|
||||
|
||||
@@ -44,9 +44,7 @@ export const ja: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'ヘルプ',
|
||||
docs_website: 'ドキュメント',
|
||||
visit_website: 'ChartDBにアクセス',
|
||||
join_discord: 'Discordに参加',
|
||||
schedule_a_call: '話しかけてください!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -155,6 +153,8 @@ export const ja: LanguageTranslation = {
|
||||
comments: 'コメント',
|
||||
no_comments: 'コメントがありません',
|
||||
delete_field: 'フィールドを削除',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'インデックス属性',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const ko_KR: LanguageTranslation = {
|
||||
help: {
|
||||
help: '도움말',
|
||||
docs_website: '선적 서류 비치',
|
||||
visit_website: 'ChartDB 사이트 방문',
|
||||
join_discord: 'Discord 가입',
|
||||
schedule_a_call: 'Talk with us!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
comments: '주석',
|
||||
no_comments: '주석 없음',
|
||||
delete_field: '필드 삭제',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: '인덱스 속성',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const mr: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'मदत',
|
||||
docs_website: 'दस्तऐवजीकरण',
|
||||
visit_website: 'ChartDB ला भेट द्या',
|
||||
join_discord: 'आमच्या डिस्कॉर्डमध्ये सामील व्हा',
|
||||
schedule_a_call: 'आमच्याशी बोला!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -154,6 +152,8 @@ export const mr: LanguageTranslation = {
|
||||
comments: 'टिप्पण्या',
|
||||
no_comments: 'कोणत्याही टिप्पणी नाहीत',
|
||||
delete_field: 'फील्ड हटवा',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'इंडेक्स गुणधर्म',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const ne: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'मद्दत',
|
||||
docs_website: 'कागजात',
|
||||
visit_website: 'वेबसाइटमा जानुहोस्',
|
||||
join_discord: 'डिस्कोर्डमा सामिल हुनुहोस्',
|
||||
schedule_a_call: 'कल अनुसूची गर्नुहोस्',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -152,6 +150,8 @@ export const ne: LanguageTranslation = {
|
||||
comments: 'टिप्पणीहरू',
|
||||
no_comments: 'कुनै टिप्पणीहरू छैनन्',
|
||||
delete_field: 'क्षेत्र हटाउनुहोस्',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'सूचक विशेषताहरू',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const pt_BR: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Ajuda',
|
||||
docs_website: 'Documentação',
|
||||
visit_website: 'Visitar ChartDB',
|
||||
join_discord: 'Junte-se a nós no Discord',
|
||||
schedule_a_call: 'Fale Conosco!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -152,6 +150,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
comments: 'Comentários',
|
||||
no_comments: 'Sem comentários',
|
||||
delete_field: 'Excluir Campo',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Atributos do Índice',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const ru: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Помощь',
|
||||
docs_website: 'Документация',
|
||||
visit_website: 'Перейти на сайт ChartDB',
|
||||
join_discord: 'Присоединиться к сообществу в Discord',
|
||||
schedule_a_call: 'Поговорите с нами!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const ru: LanguageTranslation = {
|
||||
comments: 'Комментарии',
|
||||
no_comments: 'Нет комментария',
|
||||
delete_field: 'Удалить поле',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибуты индекса',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const te: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'సహాయం',
|
||||
docs_website: 'డాక్యుమెంటేషన్',
|
||||
visit_website: 'ChartDB సందర్శించండి',
|
||||
join_discord: 'డిస్కార్డ్లో మా నుంచి చేరండి',
|
||||
schedule_a_call: 'మాతో మాట్లాడండి!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -152,6 +150,8 @@ export const te: LanguageTranslation = {
|
||||
comments: 'వ్యాఖ్యలు',
|
||||
no_comments: 'వ్యాఖ్యలు లేవు',
|
||||
delete_field: 'ఫీల్డ్ తొలగించు',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'ఇండెక్స్ గుణాలు',
|
||||
|
||||
@@ -43,9 +43,7 @@ export const tr: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Yardım',
|
||||
docs_website: 'Belgeleme',
|
||||
visit_website: "ChartDB'yi Ziyaret Et",
|
||||
join_discord: "Discord'a Katıl",
|
||||
schedule_a_call: 'Bize Ulaş!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const tr: LanguageTranslation = {
|
||||
comments: 'Yorumlar',
|
||||
no_comments: 'Yorum yok',
|
||||
delete_field: 'Alanı Sil',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'İndeks Özellikleri',
|
||||
|
||||
@@ -41,9 +41,7 @@ export const uk: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Довідка',
|
||||
docs_website: 'Документація',
|
||||
visit_website: 'Сайт ChartDB',
|
||||
join_discord: 'Приєднуйтесь до нас в Діскорд',
|
||||
schedule_a_call: 'Забронювати зустріч!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -150,6 +148,8 @@ export const uk: LanguageTranslation = {
|
||||
comments: 'Коментарі',
|
||||
no_comments: 'Немає коментарів',
|
||||
delete_field: 'Видалити поле',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Атрибути індексу',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const vi: LanguageTranslation = {
|
||||
help: {
|
||||
help: 'Trợ giúp',
|
||||
docs_website: 'Tài liệu',
|
||||
visit_website: 'Truy cập ChartDB',
|
||||
join_discord: 'Tham gia Discord',
|
||||
schedule_a_call: 'Trò chuyện cùng chúng tôi!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -151,6 +149,8 @@ export const vi: LanguageTranslation = {
|
||||
comments: 'Bình luận',
|
||||
no_comments: 'Không có bình luận',
|
||||
delete_field: 'Xóa trường',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'Thuộc tính chỉ mục',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const zh_CN: LanguageTranslation = {
|
||||
help: {
|
||||
help: '帮助',
|
||||
docs_website: '文档',
|
||||
visit_website: '访问 ChartDB',
|
||||
join_discord: '在 Discord 上加入我们',
|
||||
schedule_a_call: '和我们交流!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -148,6 +146,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
comments: '注释',
|
||||
no_comments: '空',
|
||||
delete_field: '删除字段',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引属性',
|
||||
|
||||
@@ -42,9 +42,7 @@ export const zh_TW: LanguageTranslation = {
|
||||
help: {
|
||||
help: '幫助',
|
||||
docs_website: '文件',
|
||||
visit_website: '訪問 ChartDB 網站',
|
||||
join_discord: '加入 Discord',
|
||||
schedule_a_call: '與我們聯絡!',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -148,6 +146,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
comments: '註解',
|
||||
no_comments: '無註解',
|
||||
delete_field: '刪除欄位',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
title: '索引屬性',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const clickhouseDataTypes: readonly DataType[] = [
|
||||
export const clickhouseDataTypes: readonly DataTypeData[] = [
|
||||
// Numeric Types
|
||||
{ name: 'uint8', id: 'uint8' },
|
||||
{ name: 'uint16', id: 'uint16' },
|
||||
@@ -48,25 +48,41 @@ export const clickhouseDataTypes: readonly DataType[] = [
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'char large object', id: 'char_large_object' },
|
||||
{ name: 'char varying', id: 'char_varying' },
|
||||
{ name: 'char varying', id: 'char_varying', hasCharMaxLength: true },
|
||||
{ name: 'character large object', id: 'character_large_object' },
|
||||
{ name: 'character varying', id: 'character_varying' },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'nchar large object', id: 'nchar_large_object' },
|
||||
{ name: 'nchar varying', id: 'nchar_varying' },
|
||||
{ name: 'nchar varying', id: 'nchar_varying', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'national character large object',
|
||||
id: 'national_character_large_object',
|
||||
},
|
||||
{ name: 'national character varying', id: 'national_character_varying' },
|
||||
{ name: 'national char varying', id: 'national_char_varying' },
|
||||
{ name: 'national character', id: 'national_character' },
|
||||
{ name: 'national char', id: 'national_char' },
|
||||
{
|
||||
name: 'national character varying',
|
||||
id: 'national_character_varying',
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{
|
||||
name: 'national char varying',
|
||||
id: 'national_char_varying',
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{
|
||||
name: 'national character',
|
||||
id: 'national_character',
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'national char', id: 'national_char', hasCharMaxLength: true },
|
||||
{ name: 'binary large object', id: 'binary_large_object' },
|
||||
{ name: 'binary varying', id: 'binary_varying' },
|
||||
{ name: 'fixedstring', id: 'fixedstring' },
|
||||
{ name: 'binary varying', id: 'binary_varying', hasCharMaxLength: true },
|
||||
{ name: 'fixedstring', id: 'fixedstring', hasCharMaxLength: true },
|
||||
{ name: 'string', id: 'string' },
|
||||
|
||||
// Date Types
|
||||
|
||||
@@ -13,12 +13,16 @@ export interface DataType {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface DataTypeData extends DataType {
|
||||
hasCharMaxLength?: boolean;
|
||||
}
|
||||
|
||||
export const dataTypeSchema: z.ZodType<DataType> = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
});
|
||||
|
||||
export const dataTypeMap: Record<DatabaseType, readonly DataType[]> = {
|
||||
export const dataTypeMap: Record<DatabaseType, readonly DataTypeData[]> = {
|
||||
[DatabaseType.GENERIC]: genericDataTypes,
|
||||
[DatabaseType.POSTGRESQL]: postgresDataTypes,
|
||||
[DatabaseType.MYSQL]: mysqlDataTypes,
|
||||
@@ -64,3 +68,21 @@ export function areFieldTypesCompatible(
|
||||
}
|
||||
|
||||
export const dataTypes = Object.values(dataTypeMap).flat();
|
||||
|
||||
export const dataTypeDataToDataType = (
|
||||
dataTypeData: DataTypeData
|
||||
): DataType => ({
|
||||
id: dataTypeData.id,
|
||||
name: dataTypeData.name,
|
||||
});
|
||||
|
||||
export const findDataTypeDataById = (
|
||||
id: string,
|
||||
databaseType?: DatabaseType
|
||||
): DataTypeData | undefined => {
|
||||
const dataTypesOptions = databaseType
|
||||
? dataTypeMap[databaseType]
|
||||
: dataTypes;
|
||||
|
||||
return dataTypesOptions.find((dataType) => dataType.id === id);
|
||||
};
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const genericDataTypes: readonly DataType[] = [
|
||||
export const genericDataTypes: readonly DataTypeData[] = [
|
||||
{ name: 'bigint', id: 'bigint' },
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'boolean', id: 'boolean' },
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'date', id: 'date' },
|
||||
{ name: 'datetime', id: 'datetime' },
|
||||
{ name: 'decimal', id: 'decimal' },
|
||||
@@ -22,6 +22,6 @@ export const genericDataTypes: readonly DataType[] = [
|
||||
{ name: 'time', id: 'time' },
|
||||
{ name: 'timestamp', id: 'timestamp' },
|
||||
{ name: 'uuid', id: 'uuid' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
] as const;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const mariadbDataTypes: readonly DataType[] = [
|
||||
export const mariadbDataTypes: readonly DataTypeData[] = [
|
||||
// Numeric Types
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
@@ -23,10 +23,10 @@ export const mariadbDataTypes: readonly DataType[] = [
|
||||
{ name: 'year', id: 'year' },
|
||||
|
||||
// String Types
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const mysqlDataTypes: readonly DataType[] = [
|
||||
export const mysqlDataTypes: readonly DataTypeData[] = [
|
||||
// Numeric Types
|
||||
{ name: 'tinyint', id: 'tinyint' },
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
@@ -23,10 +23,10 @@ export const mysqlDataTypes: readonly DataType[] = [
|
||||
{ name: 'year', id: 'year' },
|
||||
|
||||
// String Types
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'tinyblob', id: 'tinyblob' },
|
||||
{ name: 'blob', id: 'blob' },
|
||||
{ name: 'mediumblob', id: 'mediumblob' },
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const postgresDataTypes: readonly DataType[] = [
|
||||
export const postgresDataTypes: readonly DataTypeData[] = [
|
||||
// Numeric Types
|
||||
{ name: 'smallint', id: 'smallint' },
|
||||
{ name: 'integer', id: 'integer' },
|
||||
@@ -15,9 +15,13 @@ export const postgresDataTypes: readonly DataType[] = [
|
||||
{ name: 'money', id: 'money' },
|
||||
|
||||
// Character Types
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'character varying', id: 'character_varying' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{
|
||||
name: 'character varying',
|
||||
id: 'character_varying',
|
||||
hasCharMaxLength: true,
|
||||
},
|
||||
{ name: 'text', id: 'text' },
|
||||
|
||||
// Binary Data Types
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const sqlServerDataTypes: readonly DataType[] = [
|
||||
export const sqlServerDataTypes: readonly DataTypeData[] = [
|
||||
// Exact Numerics
|
||||
{ name: 'bigint', id: 'bigint' },
|
||||
{ name: 'bit', id: 'bit' },
|
||||
@@ -25,18 +25,18 @@ export const sqlServerDataTypes: readonly DataType[] = [
|
||||
{ name: 'time', id: 'time' },
|
||||
|
||||
// Character Strings
|
||||
{ name: 'char', id: 'char' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'char', id: 'char', hasCharMaxLength: true },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'text', id: 'text' },
|
||||
|
||||
// Unicode Character Strings
|
||||
{ name: 'nchar', id: 'nchar' },
|
||||
{ name: 'nvarchar', id: 'nvarchar' },
|
||||
{ name: 'nchar', id: 'nchar', hasCharMaxLength: true },
|
||||
{ name: 'nvarchar', id: 'nvarchar', hasCharMaxLength: true },
|
||||
{ name: 'ntext', id: 'ntext' },
|
||||
|
||||
// Binary Strings
|
||||
{ name: 'binary', id: 'binary' },
|
||||
{ name: 'varbinary', id: 'varbinary' },
|
||||
{ name: 'binary', id: 'binary', hasCharMaxLength: true },
|
||||
{ name: 'varbinary', id: 'varbinary', hasCharMaxLength: true },
|
||||
{ name: 'image', id: 'image' },
|
||||
|
||||
// Other Data Types
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DataType } from './data-types';
|
||||
import type { DataTypeData } from './data-types';
|
||||
|
||||
export const sqliteDataTypes: readonly DataType[] = [
|
||||
export const sqliteDataTypes: readonly DataTypeData[] = [
|
||||
// Numeric Types
|
||||
{ name: 'integer', id: 'integer' },
|
||||
{ name: 'real', id: 'real' },
|
||||
@@ -22,6 +22,6 @@ export const sqliteDataTypes: readonly DataType[] = [
|
||||
{ name: 'int', id: 'int' },
|
||||
{ name: 'float', id: 'float' },
|
||||
{ name: 'boolean', id: 'boolean' },
|
||||
{ name: 'varchar', id: 'varchar' },
|
||||
{ name: 'varchar', id: 'varchar', hasCharMaxLength: true },
|
||||
{ name: 'decimal', id: 'decimal' },
|
||||
] as const;
|
||||
|
||||
447
src/lib/data/export-metadata/export-per-type/mysql.ts
Normal file
447
src/lib/data/export-metadata/export-per-type/mysql.ts
Normal file
@@ -0,0 +1,447 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
} from './common';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
|
||||
function parseMySQLDefault(field: DBField): string {
|
||||
if (!field.default) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const defaultValue = field.default.trim();
|
||||
|
||||
// Handle specific MySQL default values
|
||||
if (
|
||||
defaultValue.toLowerCase() === 'now()' ||
|
||||
defaultValue.toLowerCase() === 'current_timestamp'
|
||||
) {
|
||||
return 'CURRENT_TIMESTAMP';
|
||||
}
|
||||
|
||||
// Handle MySQL auto-increment, which is handled via AUTO_INCREMENT
|
||||
if (
|
||||
defaultValue.toLowerCase().includes('identity') ||
|
||||
defaultValue.toLowerCase().includes('autoincrement') ||
|
||||
defaultValue.includes('nextval')
|
||||
) {
|
||||
return ''; // MySQL handles this with AUTO_INCREMENT
|
||||
}
|
||||
|
||||
// If it's a function call, convert to MySQL equivalents
|
||||
if (isFunction(defaultValue)) {
|
||||
// Map common PostgreSQL/MSSQL functions to MySQL equivalents
|
||||
if (
|
||||
defaultValue.toLowerCase().includes('newid()') ||
|
||||
defaultValue.toLowerCase().includes('uuid()')
|
||||
) {
|
||||
return 'UUID()';
|
||||
}
|
||||
|
||||
// For functions we can't translate, return as is (MySQL might not support them)
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a keyword, keep it as is
|
||||
if (isKeyword(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it already has quotes, keep it as is
|
||||
if (strHasQuotes(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a number, keep it as is
|
||||
if (/^-?\d+(\.\d+)?$/.test(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// For other cases, add quotes
|
||||
return `'${defaultValue.replace(/'/g, "''")}'`;
|
||||
}
|
||||
|
||||
// Map problematic types to MySQL compatible types
|
||||
function mapMySQLType(typeName: string): string {
|
||||
typeName = typeName.toLowerCase();
|
||||
|
||||
// Map common types to MySQL type system
|
||||
switch (typeName) {
|
||||
case 'int':
|
||||
case 'integer':
|
||||
return 'INT';
|
||||
|
||||
case 'smallint':
|
||||
return 'SMALLINT';
|
||||
|
||||
case 'bigint':
|
||||
return 'BIGINT';
|
||||
|
||||
case 'decimal':
|
||||
case 'numeric':
|
||||
return 'DECIMAL';
|
||||
|
||||
case 'float':
|
||||
return 'FLOAT';
|
||||
|
||||
case 'double':
|
||||
case 'real':
|
||||
return 'DOUBLE';
|
||||
|
||||
case 'char':
|
||||
case 'character':
|
||||
return 'CHAR';
|
||||
|
||||
case 'varchar':
|
||||
case 'character varying':
|
||||
case 'nvarchar':
|
||||
return 'VARCHAR';
|
||||
|
||||
case 'text':
|
||||
case 'ntext':
|
||||
return 'TEXT';
|
||||
|
||||
case 'longtext':
|
||||
return 'LONGTEXT';
|
||||
|
||||
case 'mediumtext':
|
||||
return 'MEDIUMTEXT';
|
||||
|
||||
case 'tinytext':
|
||||
return 'TINYTEXT';
|
||||
|
||||
case 'date':
|
||||
return 'DATE';
|
||||
|
||||
case 'datetime':
|
||||
case 'timestamp':
|
||||
case 'datetime2':
|
||||
return 'DATETIME';
|
||||
|
||||
case 'time':
|
||||
return 'TIME';
|
||||
|
||||
case 'blob':
|
||||
case 'binary':
|
||||
return 'BLOB';
|
||||
|
||||
case 'varbinary':
|
||||
return 'VARBINARY';
|
||||
|
||||
case 'bit':
|
||||
return 'BIT';
|
||||
|
||||
case 'boolean':
|
||||
case 'bool':
|
||||
return 'TINYINT(1)'; // MySQL uses TINYINT(1) for boolean
|
||||
|
||||
case 'enum':
|
||||
return 'VARCHAR(50)'; // Convert ENUM to VARCHAR instead of assuming values
|
||||
|
||||
case 'json':
|
||||
case 'jsonb':
|
||||
return 'JSON'; // MySQL has JSON type since 5.7.8
|
||||
|
||||
case 'uuid':
|
||||
return 'CHAR(36)'; // MySQL doesn't have a UUID type, use CHAR(36)
|
||||
|
||||
case 'geometry':
|
||||
case 'geography':
|
||||
return 'GEOMETRY'; // If MySQL has spatial extensions
|
||||
|
||||
case 'array':
|
||||
case 'user-defined':
|
||||
return 'JSON'; // Use JSON for complex types like arrays or user-defined
|
||||
}
|
||||
|
||||
// If type has array notation (ends with []), treat as JSON
|
||||
if (typeName.endsWith('[]')) {
|
||||
return 'JSON';
|
||||
}
|
||||
|
||||
// For any other types, default to original type
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportMySQL(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tables = diagram.tables;
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script
|
||||
let sqlScript = '-- MySQL database export\n\n';
|
||||
|
||||
// MySQL doesn't really use transactions for DDL statements but we'll add it for consistency
|
||||
sqlScript += 'START TRANSACTION;\n\n';
|
||||
|
||||
// Create databases (schemas) if they don't exist
|
||||
const schemas = new Set<string>();
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
|
||||
});
|
||||
|
||||
if (schemas.size > 0) {
|
||||
sqlScript += '\n';
|
||||
}
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Use schema prefix if available
|
||||
const tableName = table.schema
|
||||
? `\`${table.schema}\`.\`${table.name}\``
|
||||
: `\`${table.name}\``;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
return `${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
|
||||
// Handle type name - map to MySQL compatible types
|
||||
const typeName = mapMySQLType(field.type.name);
|
||||
|
||||
// Handle MySQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'varbinary'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Set a default size for VARCHAR columns if not specified
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
typeWithSize = `${typeName}(255)`;
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle auto_increment - MySQL uses AUTO_INCREMENT keyword
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTO_INCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
? ` DEFAULT ${parseMySQLDefault(field)}`
|
||||
: '';
|
||||
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${field.comments.replace(/'/g, "''")}'`
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `\`${f.name}\``)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${table.comments.replace(/'/g, "''")}'`
|
||||
: ''
|
||||
};\n\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a unique index name by combining table name, field names, and a unique/non-unique indicator
|
||||
const fieldNamesForIndex = indexFields
|
||||
.map((field) => field?.name || '')
|
||||
.join('_');
|
||||
const uniqueIndicator = index.unique ? '_unique' : '';
|
||||
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `\`${field.name}\`` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Check for text/blob fields that need special handling
|
||||
const hasTextOrBlob = indexFields.some((field) => {
|
||||
const typeName =
|
||||
field?.type.name.toLowerCase() || '';
|
||||
return (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
);
|
||||
});
|
||||
|
||||
// If there are TEXT/BLOB fields, need to add prefix length
|
||||
const indexFieldsWithPrefix = hasTextOrBlob
|
||||
? indexFieldNames.map((name) => {
|
||||
const field = indexFields.find(
|
||||
(f) => `\`${f?.name}\`` === name
|
||||
);
|
||||
if (!field) return name;
|
||||
|
||||
const typeName =
|
||||
field.type.name.toLowerCase();
|
||||
if (
|
||||
typeName === 'text' ||
|
||||
typeName === 'mediumtext' ||
|
||||
typeName === 'longtext' ||
|
||||
typeName === 'blob'
|
||||
) {
|
||||
// Add a prefix length for TEXT/BLOB fields (required in MySQL)
|
||||
return `${name}(255)`;
|
||||
}
|
||||
return name;
|
||||
})
|
||||
: indexFieldNames;
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldsWithPrefix.join(', ')});\n`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate foreign keys
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n\n';
|
||||
|
||||
sqlScript += relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(t) => t.id === r.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(t) => t.id === r.targetTableId
|
||||
);
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `\`${sourceTable.schema}\`.\`${sourceTable.name}\``
|
||||
: `\`${sourceTable.name}\``;
|
||||
const targetTableName = targetTable.schema
|
||||
? `\`${targetTable.schema}\`.\`${targetTable.name}\``
|
||||
: `\`${targetTable.name}\``;
|
||||
|
||||
// Create a descriptive constraint name
|
||||
const constraintName = `\`fk_${sourceTable.name}_${sourceField.name}\``;
|
||||
|
||||
// MySQL supports ON DELETE and ON UPDATE actions
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${sourceField.name}\`) REFERENCES ${targetTableName}(\`${targetField.name}\`)\nON UPDATE CASCADE ON DELETE RESTRICT;\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
// Commit transaction
|
||||
sqlScript += '\nCOMMIT;\n';
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
364
src/lib/data/export-metadata/export-per-type/postgresql.ts
Normal file
364
src/lib/data/export-metadata/export-per-type/postgresql.ts
Normal file
@@ -0,0 +1,364 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
} from './common';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
|
||||
function parsePostgresDefault(field: DBField): string {
|
||||
if (!field.default) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const defaultValue = field.default.trim();
|
||||
|
||||
// Handle specific PostgreSQL default values
|
||||
if (defaultValue.toLowerCase() === 'now()') {
|
||||
return 'CURRENT_TIMESTAMP';
|
||||
}
|
||||
|
||||
// Handle PostgreSQL functions for JSON/JSONB types
|
||||
if (
|
||||
(field.type.name.toLowerCase() === 'json' ||
|
||||
field.type.name.toLowerCase() === 'jsonb') &&
|
||||
(defaultValue.includes('json_build_object') ||
|
||||
defaultValue.includes('jsonb_build_object') ||
|
||||
defaultValue.includes('json_build_array') ||
|
||||
defaultValue.includes('jsonb_build_array') ||
|
||||
defaultValue.includes('to_json') ||
|
||||
defaultValue.includes('to_jsonb'))
|
||||
) {
|
||||
// Remove any enclosing quotes and return the function call as is
|
||||
return defaultValue.replace(/^'(.*)'$/, '$1').replace(/''/, "'");
|
||||
}
|
||||
|
||||
// Handle nextval sequences for PostgreSQL
|
||||
if (defaultValue.includes('nextval')) {
|
||||
return defaultValue; // Keep it as is for PostgreSQL
|
||||
}
|
||||
|
||||
// If it's a function call, keep it as is
|
||||
if (isFunction(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a keyword, keep it as is
|
||||
if (isKeyword(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it already has quotes, keep it as is
|
||||
if (strHasQuotes(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a number, keep it as is
|
||||
if (/^-?\d+(\.\d+)?$/.test(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// For other cases, add quotes
|
||||
return `'${defaultValue.replace(/'/g, "''")}'`;
|
||||
}
|
||||
|
||||
// Map problematic types to PostgreSQL compatible types
|
||||
function mapPostgresType(typeName: string, fieldName: string): string {
|
||||
typeName = typeName.toLowerCase();
|
||||
fieldName = fieldName.toLowerCase();
|
||||
|
||||
// Handle known problematic types
|
||||
if (typeName === 'user-defined') {
|
||||
return 'jsonb'; // Default fallback for user-defined types
|
||||
}
|
||||
|
||||
// Handle generic "array" type (when not specified as array of what)
|
||||
if (typeName === 'array') {
|
||||
return 'text[]'; // Default to text array
|
||||
}
|
||||
|
||||
// Handle array type notation
|
||||
if (typeName.endsWith('[]')) {
|
||||
const baseType = mapPostgresType(typeName.slice(0, -2), fieldName);
|
||||
return `${baseType}[]`;
|
||||
}
|
||||
|
||||
// Default case: return the type as is
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportPostgreSQL(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tables = diagram.tables;
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Create CREATE SCHEMA statements for all schemas
|
||||
let sqlScript = '';
|
||||
const schemas = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
if (table.schema) {
|
||||
schemas.add(table.schema);
|
||||
}
|
||||
});
|
||||
|
||||
// Add schema creation statements
|
||||
schemas.forEach((schema) => {
|
||||
sqlScript += `CREATE SCHEMA IF NOT EXISTS "${schema}";\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
// Add sequence creation statements
|
||||
const sequences = new Set<string>();
|
||||
|
||||
tables.forEach((table) => {
|
||||
table.fields.forEach((field) => {
|
||||
if (field.default) {
|
||||
// Match nextval('schema.sequence_name') or nextval('sequence_name')
|
||||
const match = field.default.match(
|
||||
/nextval\('([^']+)'(?:::[^)]+)?\)/
|
||||
);
|
||||
if (match) {
|
||||
sequences.add(match[1]);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
sequences.forEach((sequence) => {
|
||||
sqlScript += `CREATE SEQUENCE IF NOT EXISTS ${sequence};\n`;
|
||||
});
|
||||
sqlScript += '\n';
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tableName = table.schema
|
||||
? `"${table.schema}"."${table.name}"`
|
||||
: `"${table.name}"`;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
return `${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle type name - map problematic types to PostgreSQL compatible types
|
||||
const typeName = mapPostgresType(
|
||||
field.type.name,
|
||||
field.name
|
||||
);
|
||||
|
||||
// Handle PostgreSQL specific type formatting
|
||||
let typeWithSize = typeName;
|
||||
if (field.characterMaximumLength) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'varchar' ||
|
||||
typeName.toLowerCase() === 'character varying' ||
|
||||
typeName.toLowerCase() === 'char' ||
|
||||
typeName.toLowerCase() === 'character'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.characterMaximumLength})`;
|
||||
}
|
||||
} else if (field.precision && field.scale) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision}, ${field.scale})`;
|
||||
}
|
||||
} else if (field.precision) {
|
||||
if (
|
||||
typeName.toLowerCase() === 'decimal' ||
|
||||
typeName.toLowerCase() === 'numeric'
|
||||
) {
|
||||
typeWithSize = `${typeName}(${field.precision})`;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle array types (check if the type name ends with '[]')
|
||||
if (typeName.endsWith('[]')) {
|
||||
typeWithSize = typeWithSize.replace('[]', '') + '[]';
|
||||
}
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle identity generation
|
||||
let identity = '';
|
||||
if (field.default && field.default.includes('nextval')) {
|
||||
// PostgreSQL already handles this with DEFAULT nextval()
|
||||
} else if (
|
||||
field.default &&
|
||||
field.default.toLowerCase().includes('identity')
|
||||
) {
|
||||
identity = ' GENERATED BY DEFAULT AS IDENTITY';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
// This avoids redundant uniqueness constraints
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value using PostgreSQL specific parser
|
||||
const defaultValue =
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity')
|
||||
? ` DEFAULT ${parsePostgresDefault(field)}`
|
||||
: '';
|
||||
|
||||
// Do not add PRIMARY KEY as a column constraint - will add as table constraint
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${identity}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
primaryKeyFields.length > 0
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `COMMENT ON TABLE ${tableName} IS '${table.comments.replace(/'/g, "''")}';\n\n`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
table.fields
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`COMMENT ON COLUMN ${tableName}."${f.name}" IS '${f.comments?.replace(/'/g, "''")}';\n`
|
||||
)
|
||||
.join('')
|
||||
}\n${
|
||||
// Add indexes only for non-primary key fields or composite indexes
|
||||
// This avoids duplicate indexes on primary key columns
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Get the list of fields for this index
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
// This prevents creating redundant indexes
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create unique index name using table name and index name
|
||||
// This ensures index names are unique across the database
|
||||
const safeTableName = table.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
const safeIndexName = index.name.replace(
|
||||
/[^a-zA-Z0-9_]/g,
|
||||
'_'
|
||||
);
|
||||
|
||||
// Limit index name length to avoid PostgreSQL's 63-character identifier limit
|
||||
let combinedName = `${safeTableName}_${safeIndexName}`;
|
||||
if (combinedName.length > 60) {
|
||||
// If too long, use just the index name or a truncated version
|
||||
combinedName =
|
||||
safeIndexName.length > 60
|
||||
? safeIndexName.substring(0, 60)
|
||||
: safeIndexName;
|
||||
}
|
||||
|
||||
const indexName = `"${combinedName}"`;
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName}\nON ${tableName} (${indexFieldNames.join(', ')});\n\n`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate foreign keys
|
||||
sqlScript += `\n${relationships
|
||||
.map((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `"${sourceTable.schema}"."${sourceTable.name}"`
|
||||
: `"${sourceTable.name}"`;
|
||||
const targetTableName = targetTable.schema
|
||||
? `"${targetTable.schema}"."${targetTable.name}"`
|
||||
: `"${targetTable.name}"`;
|
||||
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${sourceTable.name}_${sourceField.name}_${targetTable.name}_${targetField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
? baseName.substring(0, 60).replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
: baseName.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY("${sourceField.name}") REFERENCES ${targetTableName}("${targetField.name}");\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
358
src/lib/data/export-metadata/export-per-type/sqlite.ts
Normal file
358
src/lib/data/export-metadata/export-per-type/sqlite.ts
Normal file
@@ -0,0 +1,358 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
} from './common';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBRelationship } from '@/lib/domain/db-relationship';
|
||||
|
||||
function parseSQLiteDefault(field: DBField): string {
|
||||
if (!field.default) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const defaultValue = field.default.trim();
|
||||
|
||||
// Handle specific SQLite default values
|
||||
if (
|
||||
defaultValue.toLowerCase() === 'now()' ||
|
||||
defaultValue.toLowerCase() === 'current_timestamp'
|
||||
) {
|
||||
return 'CURRENT_TIMESTAMP';
|
||||
}
|
||||
|
||||
// Handle SQLite auto-increment
|
||||
if (
|
||||
defaultValue.toLowerCase().includes('identity') ||
|
||||
defaultValue.toLowerCase().includes('autoincrement') ||
|
||||
defaultValue.includes('nextval')
|
||||
) {
|
||||
return ''; // SQLite handles this differently with INTEGER PRIMARY KEY AUTOINCREMENT
|
||||
}
|
||||
|
||||
// If it's a function call, convert to SQLite equivalents
|
||||
if (isFunction(defaultValue)) {
|
||||
// Map common PostgreSQL/MSSQL functions to SQLite equivalents
|
||||
if (
|
||||
defaultValue.toLowerCase().includes('newid()') ||
|
||||
defaultValue.toLowerCase().includes('uuid()')
|
||||
) {
|
||||
return 'lower(hex(randomblob(16)))';
|
||||
}
|
||||
|
||||
// For functions we can't translate, return as is (SQLite might not support them)
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a keyword, keep it as is
|
||||
if (isKeyword(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it already has quotes, keep it as is
|
||||
if (strHasQuotes(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// If it's a number, keep it as is
|
||||
if (/^-?\d+(\.\d+)?$/.test(defaultValue)) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// For other cases, add quotes
|
||||
return `'${defaultValue.replace(/'/g, "''")}'`;
|
||||
}
|
||||
|
||||
// Map problematic types to SQLite compatible types
|
||||
function mapSQLiteType(typeName: string, isPrimaryKey: boolean): string {
|
||||
typeName = typeName.toLowerCase();
|
||||
|
||||
// Special handling for primary key integer columns (autoincrement requires INTEGER PRIMARY KEY)
|
||||
if (isPrimaryKey && (typeName === 'integer' || typeName === 'int')) {
|
||||
return 'INTEGER'; // Must be uppercase for SQLite to recognize it for AUTOINCREMENT
|
||||
}
|
||||
|
||||
// Map common types to SQLite's simplified type system
|
||||
switch (typeName) {
|
||||
case 'int':
|
||||
case 'smallint':
|
||||
case 'tinyint':
|
||||
case 'mediumint':
|
||||
case 'bigint':
|
||||
return 'INTEGER';
|
||||
|
||||
case 'decimal':
|
||||
case 'numeric':
|
||||
case 'float':
|
||||
case 'double':
|
||||
case 'real':
|
||||
return 'REAL';
|
||||
|
||||
case 'char':
|
||||
case 'nchar':
|
||||
case 'varchar':
|
||||
case 'nvarchar':
|
||||
case 'text':
|
||||
case 'ntext':
|
||||
case 'character varying':
|
||||
case 'character':
|
||||
return 'TEXT';
|
||||
|
||||
case 'date':
|
||||
case 'datetime':
|
||||
case 'timestamp':
|
||||
case 'datetime2':
|
||||
return 'TEXT'; // SQLite doesn't have dedicated date types
|
||||
|
||||
case 'blob':
|
||||
case 'binary':
|
||||
case 'varbinary':
|
||||
case 'image':
|
||||
return 'BLOB';
|
||||
|
||||
case 'bit':
|
||||
case 'boolean':
|
||||
return 'INTEGER'; // SQLite doesn't have a boolean type, use INTEGER
|
||||
|
||||
case 'user-defined':
|
||||
case 'json':
|
||||
case 'jsonb':
|
||||
return 'TEXT'; // Store as JSON text
|
||||
|
||||
case 'array':
|
||||
return 'TEXT'; // Store as serialized array text
|
||||
|
||||
case 'geometry':
|
||||
case 'geography':
|
||||
return 'BLOB'; // Store spatial data as BLOB in SQLite
|
||||
}
|
||||
|
||||
// If type has array notation (ends with []), treat as TEXT
|
||||
if (typeName.endsWith('[]')) {
|
||||
return 'TEXT';
|
||||
}
|
||||
|
||||
// For any other types, default to TEXT
|
||||
return typeName;
|
||||
}
|
||||
|
||||
export function exportSQLite(diagram: Diagram): string {
|
||||
if (!diagram.tables || !diagram.relationships) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tables = diagram.tables;
|
||||
const relationships = diagram.relationships;
|
||||
|
||||
// Start SQL script - SQLite doesn't use schemas, so we skip schema creation
|
||||
let sqlScript = '-- SQLite database export\n\n';
|
||||
|
||||
// Begin transaction for faster import
|
||||
sqlScript += 'BEGIN TRANSACTION;\n\n';
|
||||
|
||||
// SQLite doesn't have sequences, so we skip sequence creation
|
||||
|
||||
// SQLite system tables that should be skipped
|
||||
const sqliteSystemTables = [
|
||||
'sqlite_sequence',
|
||||
'sqlite_stat1',
|
||||
'sqlite_stat2',
|
||||
'sqlite_stat3',
|
||||
'sqlite_stat4',
|
||||
'sqlite_master',
|
||||
];
|
||||
|
||||
// Generate table creation SQL
|
||||
sqlScript += tables
|
||||
.map((table: DBTable) => {
|
||||
// Skip views
|
||||
if (table.isView) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Skip SQLite system tables
|
||||
if (sqliteSystemTables.includes(table.name.toLowerCase())) {
|
||||
return `-- Skipping SQLite system table: "${table.name}"\n`;
|
||||
}
|
||||
|
||||
// SQLite doesn't use schema prefixes, so we use just the table name
|
||||
// Include the schema in a comment if it exists
|
||||
const schemaComment = table.schema
|
||||
? `-- Original schema: ${table.schema}\n`
|
||||
: '';
|
||||
const tableName = `"${table.name}"`;
|
||||
|
||||
// Get primary key fields
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
// Check if this is a single-column INTEGER PRIMARY KEY (for AUTOINCREMENT)
|
||||
const singleIntegerPrimaryKey =
|
||||
primaryKeyFields.length === 1 &&
|
||||
(primaryKeyFields[0].type.name.toLowerCase() === 'integer' ||
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
|
||||
return `${schemaComment}${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
|
||||
// Handle type name - map to SQLite compatible types
|
||||
const typeName = mapSQLiteType(
|
||||
field.type.name,
|
||||
field.primaryKey
|
||||
);
|
||||
|
||||
// SQLite ignores length specifiers, so we don't add them
|
||||
// We'll keep this simple without size info
|
||||
const typeWithoutSize = typeName;
|
||||
|
||||
const notNull = field.nullable ? '' : ' NOT NULL';
|
||||
|
||||
// Handle autoincrement - only works with INTEGER PRIMARY KEY
|
||||
let autoIncrement = '';
|
||||
if (
|
||||
field.primaryKey &&
|
||||
singleIntegerPrimaryKey &&
|
||||
(field.default?.toLowerCase().includes('identity') ||
|
||||
field.default
|
||||
?.toLowerCase()
|
||||
.includes('autoincrement') ||
|
||||
field.default?.includes('nextval'))
|
||||
) {
|
||||
autoIncrement = ' AUTOINCREMENT';
|
||||
}
|
||||
|
||||
// Only add UNIQUE constraint if the field is not part of the primary key
|
||||
const unique =
|
||||
!field.primaryKey && field.unique ? ' UNIQUE' : '';
|
||||
|
||||
// Handle default value - Special handling for datetime() function
|
||||
let defaultValue = '';
|
||||
if (
|
||||
field.default &&
|
||||
!field.default.toLowerCase().includes('identity') &&
|
||||
!field.default
|
||||
.toLowerCase()
|
||||
.includes('autoincrement') &&
|
||||
!field.default.includes('nextval')
|
||||
) {
|
||||
// Special handling for quoted functions like 'datetime(\'\'now\'\')' - remove extra quotes
|
||||
if (field.default.includes("datetime(''now'')")) {
|
||||
defaultValue = ' DEFAULT CURRENT_TIMESTAMP';
|
||||
} else {
|
||||
defaultValue = ` DEFAULT ${parseSQLiteDefault(field)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add PRIMARY KEY inline only for single INTEGER primary key
|
||||
const primaryKey =
|
||||
field.primaryKey && singleIntegerPrimaryKey
|
||||
? ' PRIMARY KEY' + autoIncrement
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithoutSize}${primaryKey}${notNull}${unique}${defaultValue}`;
|
||||
})
|
||||
.join(',\n')}${
|
||||
// Add PRIMARY KEY as table constraint for composite primary keys or non-INTEGER primary keys
|
||||
primaryKeyFields.length > 0 && !singleIntegerPrimaryKey
|
||||
? `,\n PRIMARY KEY (${primaryKeyFields
|
||||
.map((f) => `"${f.name}"`)
|
||||
.join(', ')})`
|
||||
: ''
|
||||
}\n);\n\n${
|
||||
// Add indexes - SQLite doesn't support indexes in CREATE TABLE
|
||||
table.indexes
|
||||
.map((index) => {
|
||||
// Skip indexes that exactly match the primary key
|
||||
const indexFields = index.fieldIds
|
||||
.map((fieldId) => {
|
||||
const field = table.fields.find(
|
||||
(f) => f.id === fieldId
|
||||
);
|
||||
return field ? field : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Get the properly quoted field names
|
||||
const indexFieldNames = indexFields
|
||||
.map((field) => (field ? `"${field.name}"` : ''))
|
||||
.filter(Boolean);
|
||||
|
||||
// Skip if this index exactly matches the primary key fields
|
||||
if (
|
||||
primaryKeyFields.length === indexFields.length &&
|
||||
primaryKeyFields.every((pk) =>
|
||||
indexFields.some(
|
||||
(field) => field && field.id === pk.id
|
||||
)
|
||||
)
|
||||
) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create safe index name
|
||||
const safeIndexName = `${table.name}_${index.name}`
|
||||
.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
.substring(0, 60);
|
||||
|
||||
return indexFieldNames.length > 0
|
||||
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX IF NOT EXISTS "${safeIndexName}"\nON ${tableName} (${indexFieldNames.join(', ')});\n`
|
||||
: '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
}`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings (views)
|
||||
.join('\n');
|
||||
|
||||
// Generate table constraints and triggers for foreign keys
|
||||
// SQLite handles foreign keys differently - we'll add them with CREATE TABLE statements
|
||||
// But we'll also provide individual ALTER TABLE statements as comments for reference
|
||||
|
||||
if (relationships.length > 0) {
|
||||
sqlScript += '\n-- Foreign key constraints\n';
|
||||
sqlScript +=
|
||||
'-- Note: SQLite requires foreign_keys pragma to be enabled:\n';
|
||||
sqlScript += '-- PRAGMA foreign_keys = ON;\n\n';
|
||||
|
||||
relationships.forEach((r: DBRelationship) => {
|
||||
const sourceTable = tables.find((t) => t.id === r.sourceTableId);
|
||||
const targetTable = tables.find((t) => t.id === r.targetTableId);
|
||||
|
||||
if (
|
||||
!sourceTable ||
|
||||
!targetTable ||
|
||||
sourceTable.isView ||
|
||||
targetTable.isView ||
|
||||
sqliteSystemTables.includes(sourceTable.name.toLowerCase()) ||
|
||||
sqliteSystemTables.includes(targetTable.name.toLowerCase())
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.id === r.sourceFieldId
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.id === r.targetFieldId
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create commented out version of what would be ALTER TABLE statement
|
||||
sqlScript += `-- ALTER TABLE "${sourceTable.name}" ADD CONSTRAINT "fk_${sourceTable.name}_${sourceField.name}" FOREIGN KEY("${sourceField.name}") REFERENCES "${targetTable.name}"("${targetField.name}");\n`;
|
||||
});
|
||||
}
|
||||
|
||||
// Commit transaction
|
||||
sqlScript += '\nCOMMIT;\n';
|
||||
|
||||
return sqlScript;
|
||||
}
|
||||
@@ -5,16 +5,39 @@ import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DataType } from '../data-types/data-types';
|
||||
import { generateCacheKey, getFromCache, setInCache } from './export-sql-cache';
|
||||
import { exportMSSQL } from './export-per-type/mssql';
|
||||
import { exportPostgreSQL } from './export-per-type/postgresql';
|
||||
import { exportSQLite } from './export-per-type/sqlite';
|
||||
import { exportMySQL } from './export-per-type/mysql';
|
||||
|
||||
export const exportBaseSQL = (diagram: Diagram): string => {
|
||||
export const exportBaseSQL = ({
|
||||
diagram,
|
||||
targetDatabaseType,
|
||||
isDBMLFlow = false,
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
targetDatabaseType: DatabaseType;
|
||||
isDBMLFlow?: boolean;
|
||||
}): string => {
|
||||
const { tables, relationships } = diagram;
|
||||
|
||||
if (!tables || tables.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (diagram.databaseType === DatabaseType.SQL_SERVER) {
|
||||
return exportMSSQL(diagram);
|
||||
if (!isDBMLFlow && diagram.databaseType === targetDatabaseType) {
|
||||
switch (diagram.databaseType) {
|
||||
case DatabaseType.SQL_SERVER:
|
||||
return exportMSSQL(diagram);
|
||||
case DatabaseType.POSTGRESQL:
|
||||
return exportPostgreSQL(diagram);
|
||||
case DatabaseType.SQLITE:
|
||||
return exportSQLite(diagram);
|
||||
case DatabaseType.MYSQL:
|
||||
case DatabaseType.MARIADB:
|
||||
return exportMySQL(diagram);
|
||||
default:
|
||||
return exportPostgreSQL(diagram);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out the tables that are views
|
||||
@@ -72,6 +95,11 @@ export const exportBaseSQL = (diagram: Diagram): string => {
|
||||
table.fields.forEach((field, index) => {
|
||||
let typeName = field.type.name;
|
||||
|
||||
// Handle ENUM type
|
||||
if (typeName.toLowerCase() === 'enum') {
|
||||
typeName = 'varchar';
|
||||
}
|
||||
|
||||
// Temp fix for 'array' to be text[]
|
||||
if (typeName.toLowerCase() === 'array') {
|
||||
typeName = 'text[]';
|
||||
@@ -115,8 +143,22 @@ export const exportBaseSQL = (diagram: Diagram): string => {
|
||||
|
||||
// Remove the type cast part after :: if it exists
|
||||
if (fieldDefault.includes('::')) {
|
||||
const endedWithParentheses = fieldDefault.endsWith(')');
|
||||
fieldDefault = fieldDefault.split('::')[0];
|
||||
|
||||
if (
|
||||
(fieldDefault.startsWith('(') &&
|
||||
!fieldDefault.endsWith(')')) ||
|
||||
endedWithParentheses
|
||||
) {
|
||||
fieldDefault += ')';
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldDefault === `('now')`) {
|
||||
fieldDefault = `now()`;
|
||||
}
|
||||
|
||||
sqlScript += ` DEFAULT ${fieldDefault}`;
|
||||
}
|
||||
|
||||
@@ -230,8 +272,12 @@ export const exportSQL = async (
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
): Promise<string> => {
|
||||
const sqlScript = exportBaseSQL(diagram);
|
||||
if (databaseType === DatabaseType.SQL_SERVER) {
|
||||
const sqlScript = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
|
||||
if (databaseType === diagram.databaseType) {
|
||||
return sqlScript;
|
||||
}
|
||||
|
||||
@@ -395,7 +441,7 @@ const generateSQLPrompt = (databaseType: DatabaseType, sqlScript: string) => {
|
||||
- **Sequence Creation**: Use \`CREATE SEQUENCE IF NOT EXISTS\` for sequence creation.
|
||||
- **Table and Index Creation**: Use \`CREATE TABLE IF NOT EXISTS\` and \`CREATE INDEX IF NOT EXISTS\` to avoid errors if the object already exists.
|
||||
- **Serial and Identity Columns**: For auto-increment columns, use \`SERIAL\` or \`GENERATED BY DEFAULT AS IDENTITY\`.
|
||||
- **Conditional Statements**: Utilize PostgreSQL’s support for \`IF NOT EXISTS\` in relevant \`CREATE\` statements.
|
||||
- **Conditional Statements**: Utilize PostgreSQL's support for \`IF NOT EXISTS\` in relevant \`CREATE\` statements.
|
||||
`,
|
||||
mysql: `
|
||||
- **Table Creation**: Use \`CREATE TABLE IF NOT EXISTS\` for creating tables. While creating the table structure, ensure that all foreign key columns use the correct data types as determined in the foreign key review.
|
||||
@@ -415,7 +461,7 @@ const generateSQLPrompt = (databaseType: DatabaseType, sqlScript: string) => {
|
||||
sql_server: `
|
||||
- **Sequence Creation**: Use \`CREATE SEQUENCE\` without \`IF NOT EXISTS\`, and employ conditional logic (\`IF NOT EXISTS\`) to check for sequence existence before creation.
|
||||
- **Identity Columns**: Always prefer using the \`IDENTITY\` keyword (e.g., \`INT IDENTITY(1,1)\`) for auto-incrementing primary key columns when possible.
|
||||
- **Conditional Logic**: Use a conditional block like \`IF NOT EXISTS (SELECT * FROM sys.objects WHERE ...)\` since SQL Server doesn’t support \`IF NOT EXISTS\` directly in \`CREATE\` statements.
|
||||
- **Conditional Logic**: Use a conditional block like \`IF NOT EXISTS (SELECT * FROM sys.objects WHERE ...)\` since SQL Server doesn't support \`IF NOT EXISTS\` directly in \`CREATE\` statements.
|
||||
- **Avoid Unsupported Syntax**: Ensure the script does not include unsupported statements like \`CREATE TABLE IF NOT EXISTS\`.
|
||||
|
||||
**Reminder**: Ensure all column names that conflict with reserved keywords or data types (e.g., key, primary, column, table), escape the column name by enclosing it.
|
||||
@@ -449,7 +495,7 @@ const generateSQLPrompt = (databaseType: DatabaseType, sqlScript: string) => {
|
||||
- **Sequence Creation**: Use \`CREATE SEQUENCE IF NOT EXISTS\` for sequence creation.
|
||||
- **Table and Index Creation**: Use \`CREATE TABLE IF NOT EXISTS\` and \`CREATE INDEX IF NOT EXISTS\` to avoid errors if the object already exists.
|
||||
- **Serial and Identity Columns**: For auto-increment columns, use \`SERIAL\` or \`GENERATED BY DEFAULT AS IDENTITY\`.
|
||||
- **Conditional Statements**: Utilize PostgreSQL’s support for \`IF NOT EXISTS\` in relevant \`CREATE\` statements.
|
||||
- **Conditional Statements**: Utilize PostgreSQL's support for \`IF NOT EXISTS\` in relevant \`CREATE\` statements.
|
||||
`,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface ColumnInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
name: string;
|
||||
type: string;
|
||||
ordinal_position: number;
|
||||
nullable: boolean;
|
||||
nullable: boolean | number;
|
||||
character_maximum_length?: string | null; // The maximum length of the column (if applicable), nullable
|
||||
precision?: {
|
||||
precision: number | null; // The precision for numeric types
|
||||
@@ -14,3 +16,23 @@ export interface ColumnInfo {
|
||||
collation?: string | null;
|
||||
comment?: string | null;
|
||||
}
|
||||
|
||||
export const ColumnInfoSchema: z.ZodType<ColumnInfo> = z.object({
|
||||
schema: z.string(),
|
||||
table: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
ordinal_position: z.number(),
|
||||
nullable: z.union([z.boolean(), z.number()]),
|
||||
character_maximum_length: z.string().nullable().optional(),
|
||||
precision: z
|
||||
.object({
|
||||
precision: z.number().nullable(),
|
||||
scale: z.number().nullable(),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
default: z.string().nullable().optional(),
|
||||
collation: z.string().nullable().optional(),
|
||||
comment: z.string().nullable().optional(),
|
||||
});
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import type { ForeignKeyInfo } from './foreign-key-info';
|
||||
import type { PrimaryKeyInfo } from './primary-key-info';
|
||||
import type { ColumnInfo } from './column-info';
|
||||
import type { IndexInfo } from './index-info';
|
||||
import type { TableInfo } from './table-info';
|
||||
import type { ViewInfo } from './view-info';
|
||||
import { z } from 'zod';
|
||||
import { ForeignKeyInfoSchema, type ForeignKeyInfo } from './foreign-key-info';
|
||||
import { PrimaryKeyInfoSchema, type PrimaryKeyInfo } from './primary-key-info';
|
||||
import { ColumnInfoSchema, type ColumnInfo } from './column-info';
|
||||
import { IndexInfoSchema, type IndexInfo } from './index-info';
|
||||
import { TableInfoSchema, type TableInfo } from './table-info';
|
||||
import { ViewInfoSchema, type ViewInfo } from './view-info';
|
||||
|
||||
export interface DatabaseMetadata {
|
||||
fk_info: ForeignKeyInfo[];
|
||||
pk_info: PrimaryKeyInfo[];
|
||||
@@ -15,16 +17,26 @@ export interface DatabaseMetadata {
|
||||
version: string;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const isDatabaseMetadata = (obj: any): boolean => {
|
||||
return (
|
||||
Array.isArray(obj.fk_info) &&
|
||||
Array.isArray(obj.pk_info) &&
|
||||
Array.isArray(obj.columns) &&
|
||||
Array.isArray(obj.indexes) &&
|
||||
Array.isArray(obj.tables) &&
|
||||
Array.isArray(obj.views)
|
||||
);
|
||||
export const DatabaseMetadataSchema: z.ZodType<DatabaseMetadata> = z.object({
|
||||
fk_info: z.array(ForeignKeyInfoSchema),
|
||||
pk_info: z.array(PrimaryKeyInfoSchema),
|
||||
columns: z.array(ColumnInfoSchema),
|
||||
indexes: z.array(IndexInfoSchema),
|
||||
tables: z.array(TableInfoSchema),
|
||||
views: z.array(ViewInfoSchema),
|
||||
database_name: z.string(),
|
||||
version: z.string(),
|
||||
});
|
||||
|
||||
export const isDatabaseMetadata = (obj: unknown): boolean => {
|
||||
const parsedObject = DatabaseMetadataSchema.safeParse(obj);
|
||||
|
||||
if (!parsedObject.success) {
|
||||
console.error(parsedObject.error);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
export const loadDatabaseMetadata = (jsonString: string): DatabaseMetadata => {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface ForeignKeyInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
@@ -8,3 +10,14 @@ export interface ForeignKeyInfo {
|
||||
reference_column: string;
|
||||
fk_def: string;
|
||||
}
|
||||
|
||||
export const ForeignKeyInfoSchema: z.ZodType<ForeignKeyInfo> = z.object({
|
||||
schema: z.string(),
|
||||
table: z.string(),
|
||||
column: z.string(),
|
||||
foreign_key_name: z.string(),
|
||||
reference_schema: z.string().optional(),
|
||||
reference_table: z.string(),
|
||||
reference_column: z.string(),
|
||||
fk_def: z.string(),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain/db-schema';
|
||||
import type { TableInfo } from './table-info';
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface IndexInfo {
|
||||
schema: string;
|
||||
@@ -7,14 +8,26 @@ export interface IndexInfo {
|
||||
name: string;
|
||||
column: string;
|
||||
index_type: string;
|
||||
cardinality: number;
|
||||
size: number;
|
||||
unique: boolean;
|
||||
is_partial_index: boolean;
|
||||
cardinality?: number | null;
|
||||
size?: number | null;
|
||||
unique: boolean | number;
|
||||
direction: string;
|
||||
column_position: number;
|
||||
}
|
||||
|
||||
export const IndexInfoSchema: z.ZodType<IndexInfo> = z.object({
|
||||
schema: z.string(),
|
||||
table: z.string(),
|
||||
name: z.string(),
|
||||
column: z.string(),
|
||||
index_type: z.string(),
|
||||
cardinality: z.number().nullable().optional(),
|
||||
size: z.number().nullable().optional(),
|
||||
unique: z.union([z.boolean(), z.number()]),
|
||||
direction: z.string(),
|
||||
column_position: z.number(),
|
||||
});
|
||||
|
||||
export type AggregatedIndexInfo = Omit<IndexInfo, 'column'> & {
|
||||
columns: { name: string; position: number }[];
|
||||
};
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface PrimaryKeyInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
column: string;
|
||||
pk_def: string;
|
||||
}
|
||||
|
||||
export const PrimaryKeyInfoSchema: z.ZodType<PrimaryKeyInfo> = z.object({
|
||||
schema: z.string(),
|
||||
table: z.string(),
|
||||
column: z.string(),
|
||||
pk_def: z.string(),
|
||||
});
|
||||
|
||||
@@ -1,9 +1,21 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface TableInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
rows: number;
|
||||
type: string;
|
||||
engine: string;
|
||||
collation: string;
|
||||
rows?: number;
|
||||
type?: string;
|
||||
engine?: string;
|
||||
collation?: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export const TableInfoSchema: z.ZodType<TableInfo> = z.object({
|
||||
schema: z.string(),
|
||||
table: z.string(),
|
||||
rows: z.number().optional(),
|
||||
type: z.string().optional(),
|
||||
engine: z.string().optional(),
|
||||
collation: z.string().optional(),
|
||||
comment: z.string().optional(),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export interface ViewInfo {
|
||||
schema: string;
|
||||
view_name: string;
|
||||
view_definition?: string;
|
||||
}
|
||||
|
||||
export const ViewInfoSchema: z.ZodType<ViewInfo> = z.object({
|
||||
schema: z.string(),
|
||||
view_name: z.string(),
|
||||
view_definition: z.string().optional(),
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ cols AS (
|
||||
concat('{"schema":"', col_tuple.1, '"',
|
||||
',"table":"', col_tuple.2, '"',
|
||||
',"name":"', col_tuple.3, '"',
|
||||
',"ordinal_position":"', toString(col_tuple.4), '"',
|
||||
',"ordinal_position":', toString(col_tuple.4),
|
||||
',"type":"', col_tuple.5, '"',
|
||||
',"nullable":"', if(col_tuple.6 = 'NULLABLE', 'true', 'false'), '"',
|
||||
',"default":"', if(col_tuple.7 = '', 'null', col_tuple.7), '"',
|
||||
|
||||
@@ -96,8 +96,7 @@ indexes_cols AS (
|
||||
(CASE WHEN i.indisunique = TRUE THEN 'true' ELSE 'false' END) AS is_unique,
|
||||
irel.reltuples AS cardinality,
|
||||
1 + Array_position(i.indkey, a.attnum) AS column_position,
|
||||
CASE o.OPTION & 1 WHEN 1 THEN 'DESC' ELSE 'ASC' END AS direction,
|
||||
CASE WHEN indpred IS NOT NULL THEN 'true' ELSE 'false' END AS is_partial_index
|
||||
CASE o.OPTION & 1 WHEN 1 THEN 'DESC' ELSE 'ASC' END AS direction
|
||||
FROM pg_index AS i
|
||||
JOIN pg_class AS trel ON trel.oid = i.indrelid
|
||||
JOIN pg_namespace AS tnsp ON trel.relnamespace = tnsp.oid
|
||||
@@ -114,8 +113,8 @@ cols AS (
|
||||
SELECT array_to_string(array_agg(CONCAT('{"schema":"', cols.table_schema::TEXT,
|
||||
'","table":"', cols.table_name::TEXT,
|
||||
'","name":"', cols.column_name::TEXT,
|
||||
'","ordinal_position":"', cols.ordinal_position::TEXT,
|
||||
'","type":"', LOWER(replace(cols.data_type::TEXT, '"', '')),
|
||||
'","ordinal_position":', cols.ordinal_position::TEXT,
|
||||
',"type":"', LOWER(replace(cols.data_type::TEXT, '"', '')),
|
||||
'","character_maximum_length":"', COALESCE(cols.character_maximum_length::TEXT, 'null'),
|
||||
'","precision":',
|
||||
CASE
|
||||
@@ -124,7 +123,7 @@ cols AS (
|
||||
',"scale":', COALESCE(cols.numeric_scale::TEXT, 'null'), '}')
|
||||
ELSE 'null'
|
||||
END,
|
||||
',"nullable":', CASE WHEN (cols.IS_NULLABLE = 'YES') THEN 'true' ELSE 'false' END::TEXT,
|
||||
',"nullable":', CASE WHEN (cols.IS_NULLABLE = 'YES') THEN true ELSE false END::TEXT,
|
||||
',"default":"', COALESCE(replace(replace(cols.column_default::TEXT, '"', '\\"'), '\\x', '\\\\x'), ''),
|
||||
'","collation":"', COALESCE(cols.COLLATION_NAME::TEXT, ''),
|
||||
'","comment":"', COALESCE(replace(replace(dsc.description::TEXT, '"', '\\"'), '\\x', '\\\\x'), ''),
|
||||
@@ -146,7 +145,6 @@ cols AS (
|
||||
'","cardinality":', COALESCE(cardinality::TEXT, '0'),
|
||||
',"size":', COALESCE(index_size::TEXT, 'null'),
|
||||
',"unique":', is_unique::TEXT,
|
||||
',"is_partial_index":', is_partial_index::TEXT,
|
||||
',"column_position":', column_position::TEXT,
|
||||
',"direction":"', LOWER(direction::TEXT),
|
||||
'"}')), ',') AS indexes_metadata
|
||||
|
||||
@@ -74,8 +74,8 @@ export const mariaDBQuery = `WITH fk_info as (
|
||||
',"scale":', IFNULL(cols.numeric_scale, 'null'), '}')
|
||||
ELSE 'null'
|
||||
END,
|
||||
',"ordinal_position":"', cols.ordinal_position,
|
||||
'","nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"ordinal_position":', cols.ordinal_position,
|
||||
',"nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"default":"', IFNULL(REPLACE(REPLACE(cols.column_default, '\\\\', ''), '"', '\\"'), ''),
|
||||
'","collation":"', IFNULL(cols.collation_name, ''), '"}'
|
||||
)))))
|
||||
@@ -88,7 +88,7 @@ export const mariaDBQuery = `WITH fk_info as (
|
||||
AND (0x00) IN (@indexes:=CONCAT_WS(',', @indexes, CONCAT('{"schema":"',indexes.table_schema,
|
||||
'","table":"',indexes.table_name,
|
||||
'","name":"', indexes.index_name,
|
||||
'","size":"',
|
||||
'","size":',
|
||||
(SELECT IFNULL(SUM(stat_value * @@innodb_page_size), -1) AS size_in_bytes
|
||||
FROM mysql.innodb_index_stats
|
||||
WHERE stat_name = 'size'
|
||||
@@ -96,11 +96,12 @@ export const mariaDBQuery = `WITH fk_info as (
|
||||
AND index_name = indexes.index_name
|
||||
AND TABLE_NAME = indexes.table_name
|
||||
AND database_name = indexes.table_schema),
|
||||
'","column":"', indexes.column_name,
|
||||
',"column":"', indexes.column_name,
|
||||
'","index_type":"', LOWER(indexes.index_type),
|
||||
'","cardinality":', indexes.cardinality,
|
||||
',"direction":"', (CASE WHEN indexes.collation = 'D' THEN 'desc' ELSE 'asc' END),
|
||||
'","unique":', IF(indexes.non_unique = 1, 'false', 'true'), '}')))))
|
||||
'","column_position":', indexes.seq_in_index,
|
||||
',"unique":', IF(indexes.non_unique = 1, 'false', 'true'), '}')))))
|
||||
), tbls as
|
||||
(
|
||||
(SELECT (@tbls:=NULL),
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export const minimizeQuery = (query: string) => {
|
||||
if (!query) return '';
|
||||
|
||||
// Split into lines, trim leading spaces from each line, then rejoin
|
||||
return query
|
||||
.replace(/\s+/g, ' ') // Replace multiple spaces with a single space
|
||||
.replace(/\s*;\s*/g, ';') // Remove spaces around semicolons
|
||||
.trim(); // Remove leading and trailing spaces
|
||||
.split('\n')
|
||||
.map((line) => line.replace(/^\s+/, '')) // Remove only leading spaces
|
||||
.join('\n');
|
||||
};
|
||||
|
||||
@@ -84,8 +84,8 @@ export const getMySQLQuery = (
|
||||
',"scale":', IFNULL(cols.numeric_scale, 'null'), '}')
|
||||
ELSE 'null'
|
||||
END,
|
||||
',"ordinal_position":"', cols.ordinal_position,
|
||||
'","nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"ordinal_position":', cols.ordinal_position,
|
||||
',"nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"default":"', IFNULL(REPLACE(REPLACE(cols.column_default, '\\\\', ''), '"', 'ֿֿֿ\\"'), ''),
|
||||
'","collation":"', IFNULL(cols.collation_name, ''), '"}'
|
||||
)))))
|
||||
@@ -98,7 +98,7 @@ export const getMySQLQuery = (
|
||||
AND (0x00) IN (@indexes:=CONCAT_WS(',', @indexes, CONCAT('{"schema":"',indexes.table_schema,
|
||||
'","table":"',indexes.table_name,
|
||||
'","name":"', indexes.index_name,
|
||||
'","size":"',
|
||||
'","size":',
|
||||
(SELECT IFNULL(SUM(stat_value * @@innodb_page_size), -1) AS size_in_bytes
|
||||
FROM mysql.innodb_index_stats
|
||||
WHERE stat_name = 'size'
|
||||
@@ -106,7 +106,7 @@ export const getMySQLQuery = (
|
||||
AND index_name = indexes.index_name
|
||||
AND TABLE_NAME = indexes.table_name
|
||||
AND database_name = indexes.table_schema),
|
||||
'","column":"', indexes.column_name,
|
||||
',"column":"', indexes.column_name,
|
||||
'","index_type":"', LOWER(indexes.index_type),
|
||||
'","cardinality":', indexes.cardinality,
|
||||
',"direction":"', (CASE WHEN indexes.collation = 'D' THEN 'desc' ELSE 'asc' END),
|
||||
@@ -209,8 +209,8 @@ export const getMySQLQuery = (
|
||||
IF(cols.data_type IN ('decimal', 'numeric'),
|
||||
CONCAT('{"precision":', IFNULL(cols.numeric_precision, 'null'),
|
||||
',"scale":', IFNULL(cols.numeric_scale, 'null'), '}'), 'null'),
|
||||
',"ordinal_position":"', cols.ordinal_position,
|
||||
'","nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"ordinal_position":', cols.ordinal_position,
|
||||
',"nullable":', IF(cols.is_nullable = 'YES', 'true', 'false'),
|
||||
',"default":"', IFNULL(REPLACE(REPLACE(cols.column_default, '\\\\', ''), '"', '\\"'), ''),
|
||||
'","collation":"', IFNULL(cols.collation_name, ''), '"}')
|
||||
) FROM (
|
||||
@@ -233,7 +233,7 @@ export const getMySQLQuery = (
|
||||
CONCAT('{"schema":"', cast(idx.table_schema as CHAR),
|
||||
'","table":"', idx.table_name,
|
||||
'","name":"', idx.index_name,
|
||||
'","size":"', IFNULL(
|
||||
'","size":', IFNULL(
|
||||
(SELECT SUM(stat_value * @@innodb_page_size)
|
||||
FROM mysql.innodb_index_stats
|
||||
WHERE stat_name = 'size'
|
||||
@@ -241,7 +241,7 @@ export const getMySQLQuery = (
|
||||
AND index_name = idx.index_name
|
||||
AND TABLE_NAME = idx.table_name
|
||||
AND database_name = idx.table_schema), -1),
|
||||
'","column":"', idx.column_name,
|
||||
',"column":"', idx.column_name,
|
||||
'","index_type":"', LOWER(idx.index_type),
|
||||
'","cardinality":', idx.cardinality,
|
||||
',"direction":"', (CASE WHEN idx.collation = 'D' THEN 'desc' ELSE 'asc' END),
|
||||
|
||||
@@ -147,8 +147,7 @@ indexes_cols AS (
|
||||
(CASE WHEN i.indisunique = TRUE THEN 'true' ELSE 'false' END) AS is_unique,
|
||||
irel.reltuples AS cardinality,
|
||||
1 + Array_position(i.indkey, a.attnum) AS column_position,
|
||||
CASE o.OPTION & 1 WHEN 1 THEN 'DESC' ELSE 'ASC' END AS direction,
|
||||
CASE WHEN indpred IS NOT NULL THEN 'true' ELSE 'false' END AS is_partial_index
|
||||
CASE o.OPTION & 1 WHEN 1 THEN 'DESC' ELSE 'ASC' END AS direction
|
||||
FROM pg_index AS i
|
||||
JOIN pg_class AS trel ON trel.oid = i.indrelid
|
||||
JOIN pg_namespace AS tnsp ON trel.relnamespace = tnsp.oid
|
||||
@@ -165,8 +164,8 @@ cols AS (
|
||||
SELECT array_to_string(array_agg(CONCAT('{"schema":"', cols.table_schema,
|
||||
'","table":"', cols.table_name,
|
||||
'","name":"', cols.column_name,
|
||||
'","ordinal_position":"', cols.ordinal_position,
|
||||
'","type":"', LOWER(replace(cols.data_type, '"', '')),
|
||||
'","ordinal_position":', cols.ordinal_position,
|
||||
',"type":"', LOWER(replace(cols.data_type, '"', '')),
|
||||
'","character_maximum_length":"', COALESCE(cols.character_maximum_length::text, 'null'),
|
||||
'","precision":',
|
||||
CASE
|
||||
@@ -203,7 +202,6 @@ cols AS (
|
||||
'","cardinality":', cardinality,
|
||||
',"size":', index_size,
|
||||
',"unique":', is_unique,
|
||||
',"is_partial_index":', is_partial_index,
|
||||
',"column_position":', column_position,
|
||||
',"direction":"', LOWER(direction),
|
||||
'"}')), ',') AS indexes_metadata
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { getPostgresQuery } from './postgres-script';
|
||||
import { getMySQLQuery } from './mysql-script';
|
||||
import { sqliteQuery } from './sqlite-script';
|
||||
import { getSQLiteQuery } from './sqlite-script';
|
||||
import { getSqlServerQuery } from './sqlserver-script';
|
||||
import { mariaDBQuery } from './maria-script';
|
||||
import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
@@ -21,7 +21,7 @@ export const importMetadataScripts: ImportMetadataScripts = {
|
||||
[DatabaseType.GENERIC]: () => '',
|
||||
[DatabaseType.POSTGRESQL]: getPostgresQuery,
|
||||
[DatabaseType.MYSQL]: getMySQLQuery,
|
||||
[DatabaseType.SQLITE]: () => sqliteQuery,
|
||||
[DatabaseType.SQLITE]: getSQLiteQuery,
|
||||
[DatabaseType.SQL_SERVER]: getSqlServerQuery,
|
||||
[DatabaseType.MARIADB]: () => mariaDBQuery,
|
||||
[DatabaseType.CLICKHOUSE]: () => clickhouseQuery,
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
export const sqliteQuery = `WITH fk_info AS (
|
||||
import { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { DatabaseClient } from '@/lib/domain/database-clients';
|
||||
|
||||
const sqliteQuery = `${`/* Standard SQLite */`}
|
||||
WITH fk_info AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
@@ -56,9 +60,9 @@ export const sqliteQuery = `WITH fk_info AS (
|
||||
'name', idx.name,
|
||||
'column', ic.name,
|
||||
'index_type', 'B-TREE', -- SQLite uses B-Trees for indexing
|
||||
'cardinality', '', -- SQLite does not provide cardinality
|
||||
'size', '', -- SQLite does not provide index size
|
||||
'unique', (CASE WHEN idx."unique" = 1 THEN 'true' ELSE 'false' END),
|
||||
'cardinality', null, -- SQLite does not provide cardinality
|
||||
'size', null, -- SQLite does not provide index size
|
||||
'unique', (CASE WHEN idx."unique" = 1 THEN true ELSE false END),
|
||||
'direction', '', -- SQLite does not provide direction info
|
||||
'column_position', ic.seqno + 1 -- Adding 1 to convert from zero-based to one-based index
|
||||
)
|
||||
@@ -103,12 +107,12 @@ export const sqliteQuery = `WITH fk_info AS (
|
||||
CASE
|
||||
WHEN instr(p.type, '(') > 0 THEN
|
||||
json_object(
|
||||
'precision', substr(p.type, instr(p.type, '(') + 1, instr(p.type, ',') - instr(p.type, '(') - 1),
|
||||
'scale', substr(p.type, instr(p.type, ',') + 1, instr(p.type, ')') - instr(p.type, ',') - 1)
|
||||
'precision', CAST(substr(p.type, instr(p.type, '(') + 1, instr(p.type, ',') - instr(p.type, '(') - 1) AS INTEGER),
|
||||
'scale', CAST(substr(p.type, instr(p.type, ',') + 1, instr(p.type, ')') - instr(p.type, ',') - 1) AS INTEGER)
|
||||
)
|
||||
ELSE 'null'
|
||||
ELSE null
|
||||
END
|
||||
ELSE 'null'
|
||||
ELSE null
|
||||
END,
|
||||
'default', COALESCE(REPLACE(p.dflt_value, '"', '\\"'), '')
|
||||
)
|
||||
@@ -163,3 +167,225 @@ replace(replace(replace(
|
||||
'\\"', '"'),'"[', '['), ']"', ']'
|
||||
) AS metadata_json_to_import;
|
||||
`;
|
||||
|
||||
const cloudflareD1Query = `${`/* Cloudflare D1 SQLite */`}
|
||||
WITH fk_info AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'table', m.name,
|
||||
'column', fk.[from],
|
||||
'foreign_key_name',
|
||||
'fk_' || m.name || '_' || fk.[from] || '_' || fk.[table] || '_' || fk.[to],
|
||||
'reference_schema', '',
|
||||
'reference_table', fk.[table],
|
||||
'reference_column', fk.[to],
|
||||
'fk_def',
|
||||
'FOREIGN KEY (' || fk.[from] || ') REFERENCES ' || fk.[table] || '(' || fk.[to] || ')' ||
|
||||
' ON UPDATE ' || fk.on_update || ' ON DELETE ' || fk.on_delete
|
||||
)
|
||||
) AS fk_metadata
|
||||
FROM
|
||||
sqlite_master m
|
||||
JOIN
|
||||
pragma_foreign_key_list(m.name) fk
|
||||
ON
|
||||
m.type = 'table'
|
||||
WHERE
|
||||
m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
), pk_info AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'table', pk.table_name,
|
||||
'field_count', pk.field_count,
|
||||
'column', pk.pk_column,
|
||||
'pk_def', 'PRIMARY KEY (' || pk.pk_column || ')'
|
||||
)
|
||||
) AS pk_metadata
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
m.name AS table_name,
|
||||
COUNT(p.name) AS field_count,
|
||||
GROUP_CONCAT(p.name) AS pk_column
|
||||
FROM
|
||||
sqlite_master m
|
||||
JOIN
|
||||
pragma_table_info(m.name) p
|
||||
ON
|
||||
m.type = 'table' AND p.pk > 0
|
||||
WHERE
|
||||
m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
GROUP BY
|
||||
m.name
|
||||
) pk
|
||||
), indexes_metadata AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'table', m.name,
|
||||
'name', idx.name,
|
||||
'column', ic.name,
|
||||
'index_type', 'B-TREE',
|
||||
'cardinality', null,
|
||||
'size', null,
|
||||
'unique', (CASE WHEN idx.[unique] = 1 THEN true ELSE false END),
|
||||
'direction', '',
|
||||
'column_position', ic.seqno + 1
|
||||
)
|
||||
) AS indexes_metadata
|
||||
FROM
|
||||
sqlite_master m
|
||||
JOIN
|
||||
pragma_index_list(m.name) idx
|
||||
ON
|
||||
m.type = 'table'
|
||||
JOIN
|
||||
pragma_index_info(idx.name) ic
|
||||
WHERE
|
||||
m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
), cols AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'table', m.name,
|
||||
'name', p.name,
|
||||
'type',
|
||||
CASE
|
||||
WHEN INSTR(LOWER(p.type), '(') > 0 THEN
|
||||
SUBSTR(LOWER(p.type), 1, INSTR(LOWER(p.type), '(') - 1)
|
||||
ELSE LOWER(p.type)
|
||||
END,
|
||||
'ordinal_position', p.cid,
|
||||
'nullable', (CASE WHEN p.[notnull] = 0 THEN true ELSE false END),
|
||||
'collation', '',
|
||||
'character_maximum_length',
|
||||
CASE
|
||||
WHEN LOWER(p.type) LIKE 'char%' OR LOWER(p.type) LIKE 'varchar%' THEN
|
||||
CASE
|
||||
WHEN INSTR(p.type, '(') > 0 THEN
|
||||
REPLACE(SUBSTR(p.type, INSTR(p.type, '(') + 1, LENGTH(p.type) - INSTR(p.type, '(') - 1), ')', '')
|
||||
ELSE 'null'
|
||||
END
|
||||
ELSE 'null'
|
||||
END,
|
||||
'precision',
|
||||
CASE
|
||||
WHEN LOWER(p.type) LIKE 'decimal%' OR LOWER(p.type) LIKE 'numeric%' THEN
|
||||
CASE
|
||||
WHEN instr(p.type, '(') > 0 THEN
|
||||
json_object(
|
||||
'precision', CAST(substr(p.type, instr(p.type, '(') + 1, instr(p.type, ',') - instr(p.type, '(') - 1) AS INTEGER),
|
||||
'scale', CAST(substr(p.type, instr(p.type, ',') + 1, instr(p.type, ')') - instr(p.type, ',') - 1) AS INTEGER)
|
||||
)
|
||||
ELSE null
|
||||
END
|
||||
ELSE null
|
||||
END,
|
||||
'default', COALESCE(REPLACE(p.dflt_value, '"', '\\"'), '')
|
||||
)
|
||||
) AS cols_metadata
|
||||
FROM
|
||||
sqlite_master m
|
||||
JOIN
|
||||
pragma_table_info(m.name) p
|
||||
ON
|
||||
m.type in ('table', 'view')
|
||||
WHERE
|
||||
m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
), tbls AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'table', m.name,
|
||||
'rows', -1,
|
||||
'type', 'table',
|
||||
'engine', '',
|
||||
'collation', ''
|
||||
)
|
||||
) AS tbls_metadata
|
||||
FROM
|
||||
sqlite_master m
|
||||
WHERE
|
||||
m.type in ('table', 'view') AND m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
), views AS (
|
||||
SELECT
|
||||
json_group_array(
|
||||
json_object(
|
||||
'schema', '',
|
||||
'view_name', m.name
|
||||
)
|
||||
) AS views_metadata
|
||||
FROM
|
||||
sqlite_master m
|
||||
WHERE
|
||||
m.type = 'view' AND m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
|
||||
)
|
||||
SELECT
|
||||
replace(replace(replace(
|
||||
json_object(
|
||||
'fk_info', (SELECT fk_metadata FROM fk_info),
|
||||
'pk_info', (SELECT pk_metadata FROM pk_info),
|
||||
'columns', (SELECT cols_metadata FROM cols),
|
||||
'indexes', (SELECT indexes_metadata FROM indexes_metadata),
|
||||
'tables', (SELECT tbls_metadata FROM tbls),
|
||||
'views', (SELECT views_metadata FROM views),
|
||||
'database_name', 'sqlite',
|
||||
'version', ''
|
||||
),
|
||||
'\\"', '"'),'"[', '['), ']"', ']'
|
||||
) AS metadata_json_to_import;
|
||||
`;
|
||||
|
||||
// Generate Wrangler CLI command wrapper around the D1 query
|
||||
const generateWranglerCommand = (): string => {
|
||||
return `# Cloudflare D1 (via Wrangler CLI) Import Script
|
||||
# ------------------------------------------------------
|
||||
# This query will extract your D1 database schema using Cloudflare's Wrangler CLI
|
||||
#
|
||||
# Prerequisites:
|
||||
# 1. Install Wrangler CLI if you haven't already: npm install -g wrangler
|
||||
# 2. Login to your Cloudflare account: wrangler login
|
||||
# 3. Make sure that your wrangler.jsonc or wrangler.toml file has the following:
|
||||
# [d1_databases]
|
||||
# [d1_databases.DB]
|
||||
# database_name = "YOUR_DB_NAME"
|
||||
# database_id = "YOUR_DB_ID"
|
||||
# 4. Replace YOUR_DB_NAME with your actual D1 database name
|
||||
# 5. Replace YOUR_DB_ID with your actual D1 database ID
|
||||
|
||||
# Step 1: Write the query to a file
|
||||
wrangler d1 execute YOUR_DB_NAME --command $'WITH fk_info AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'column\\', fk.[from], \\'foreign_key_name\\', \\'fk_\\' || m.name || \\'_\\' || fk.[from] || \\'_\\' || fk.[table] || \\'_\\' || fk.[to], \\'reference_schema\\', \\'\\', \\'reference_table\\', fk.[table], \\'reference_column\\', fk.[to], \\'fk_def\\', \\'FOREIGN KEY (\\' || fk.[from] || \\') REFERENCES \\' || fk.[table] || \\'(\\' || fk.[to] || \\')\\' || \\' ON UPDATE \\' || fk.on_update || \\' ON DELETE \\' || fk.on_delete ) ) AS fk_metadata FROM sqlite_master m JOIN pragma_foreign_key_list(m.name) fk ON m.type = \\'table\\' WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), pk_info AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', pk.table_name, \\'field_count\\', pk.field_count, \\'column\\', pk.pk_column, \\'pk_def\\', \\'PRIMARY KEY (\\' || pk.pk_column || \\')\\' ) ) AS pk_metadata FROM ( SELECT m.name AS table_name, COUNT(p.name) AS field_count, GROUP_CONCAT(p.name) AS pk_column FROM sqlite_master m JOIN pragma_table_info(m.name) p ON m.type = \\'table\\' AND p.pk > 0 WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' GROUP BY m.name ) pk ), indexes_metadata AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'name\\', idx.name, \\'column\\', ic.name, \\'index_type\\', \\'B-TREE\\', \\'cardinality\\', \\'\\', \\'size\\', null, \\'unique\\', CASE WHEN idx.[unique] = 1 THEN true ELSE false END, \\'direction\\', \\'\\', \\'column_position\\', ic.seqno + 1 ) ) AS indexes_metadata FROM sqlite_master m JOIN pragma_index_list(m.name) idx ON m.type = \\'table\\' JOIN pragma_index_info(idx.name) ic WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), cols AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'name\\', p.name, \\'type\\', CASE WHEN INSTR(LOWER(p.type), \\'(\\') > 0 THEN SUBSTR(LOWER(p.type), 1, INSTR(LOWER(p.type), \\'(\\') - 1) ELSE LOWER(p.type) END, \\'ordinal_position\\', p.cid, \\'nullable\\', CASE WHEN p.[notnull] = 0 THEN true ELSE false END, \\'collation\\', \\'\\', \\'character_maximum_length\\', CASE WHEN LOWER(p.type) LIKE \\'char%\\' OR LOWER(p.type) LIKE \\'varchar%\\' THEN CASE WHEN INSTR(p.type, \\'(\\') > 0 THEN REPLACE( SUBSTR(p.type, INSTR(p.type, \\'(\\') + 1, LENGTH(p.type) - INSTR(p.type, \\'(\\') - 1), \\')\\', \\'\\' ) ELSE \\'null\\' END ELSE \\'null\\' END, \\'precision\\', CASE WHEN LOWER(p.type) LIKE \\'decimal%\\' OR LOWER(p.type) LIKE \\'numeric%\\' THEN CASE WHEN instr(p.type, \\'(\\') > 0 THEN json_object( \\'precision\\', CAST(substr(p.type, instr(p.type, \\'(\\') + 1, instr(p.type, \\',\\') - instr(p.type, \\'(\\') - 1) as INTIGER), \\'scale\\', CAST(substr(p.type, instr(p.type, \\',\\') + 1, instr(p.type, \\')\\') - instr(p.type, \\',\\') - 1) AS INTIGER) ) ELSE null END ELSE null END, \\'default\\', COALESCE(REPLACE(p.dflt_value, \\'"\\', \\'\\\\\\"\\'), \\'\\') ) ) AS cols_metadata FROM sqlite_master m JOIN pragma_table_info(m.name) p ON m.type in (\\'table\\', \\'view\\') WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), tbls AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'rows\\', -1, \\'type\\', \\'table\\', \\'engine\\', \\'\\', \\'collation\\', \\'\\' ) ) AS tbls_metadata FROM sqlite_master m WHERE m.type in (\\'table\\', \\'view\\') AND m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), views AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'view_name\\', m.name ) ) AS views_metadata FROM sqlite_master m WHERE m.type = \\'view\\' AND m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ) SELECT json_object( \\'fk_info\\', json((SELECT fk_metadata FROM fk_info)), \\'pk_info\\', json((SELECT pk_metadata FROM pk_info)), \\'columns\\', json((SELECT cols_metadata FROM cols)), \\'indexes\\', json((SELECT indexes_metadata FROM indexes_metadata)), \\'tables\\', json((SELECT tbls_metadata FROM tbls)), \\'views\\', json((SELECT views_metadata FROM views)), \\'database_name\\', \\'sqlite\\', \\'version\\', \\'\\' ) AS metadata_json_to_import;' --remote
|
||||
|
||||
# Step 2: Copy the output of the command above and paste it into app.chartdb.io
|
||||
`;
|
||||
};
|
||||
|
||||
export const getSQLiteQuery = (
|
||||
options: {
|
||||
databaseEdition?: DatabaseEdition;
|
||||
databaseClient?: DatabaseClient;
|
||||
} = {}
|
||||
): string => {
|
||||
// For Cloudflare D1 edition, return the D1 script
|
||||
if (options.databaseEdition === DatabaseEdition.SQLITE_CLOUDFLARE_D1) {
|
||||
// Generate the Wrangler CLI command based on client
|
||||
const isWranglerClient =
|
||||
options?.databaseClient === DatabaseClient.SQLITE_WRANGLER;
|
||||
|
||||
if (isWranglerClient) {
|
||||
return generateWranglerCommand();
|
||||
}
|
||||
|
||||
return cloudflareD1Query;
|
||||
}
|
||||
|
||||
// Default SQLite script
|
||||
return sqliteQuery;
|
||||
};
|
||||
|
||||
@@ -68,12 +68,12 @@ cols AS (
|
||||
'", "name": "' + STRING_ESCAPE(COALESCE(REPLACE(cols.COLUMN_NAME, '"', ''), ''), 'json') +
|
||||
'", "ordinal_position": ' + CAST(cols.ORDINAL_POSITION AS NVARCHAR(MAX)) +
|
||||
', "type": "' + STRING_ESCAPE(LOWER(cols.DATA_TYPE), 'json') +
|
||||
'", "character_maximum_length": ' +
|
||||
'", "character_maximum_length": "' +
|
||||
CASE
|
||||
WHEN cols.CHARACTER_MAXIMUM_LENGTH IS NULL THEN 'null'
|
||||
ELSE CAST(cols.CHARACTER_MAXIMUM_LENGTH AS NVARCHAR(MAX))
|
||||
END +
|
||||
', "precision": ' +
|
||||
'", "precision": ' +
|
||||
CASE
|
||||
WHEN cols.DATA_TYPE IN ('numeric', 'decimal')
|
||||
THEN '{"precision":' + COALESCE(CAST(cols.NUMERIC_PRECISION AS NVARCHAR(MAX)), 'null') +
|
||||
@@ -270,12 +270,12 @@ cols AS (
|
||||
'", "name": "' + STRING_ESCAPE(COALESCE(REPLACE(cols.COLUMN_NAME, '"', ''), ''), 'json') +
|
||||
'", "ordinal_position": ' + CAST(cols.ORDINAL_POSITION AS NVARCHAR(MAX)) +
|
||||
', "type": "' + STRING_ESCAPE(LOWER(cols.DATA_TYPE), 'json') +
|
||||
'", "character_maximum_length": ' +
|
||||
'", "character_maximum_length": "' +
|
||||
CASE
|
||||
WHEN cols.CHARACTER_MAXIMUM_LENGTH IS NULL THEN 'null'
|
||||
ELSE CAST(cols.CHARACTER_MAXIMUM_LENGTH AS NVARCHAR(MAX))
|
||||
END +
|
||||
', "precision": ' +
|
||||
'", "precision": ' +
|
||||
CASE
|
||||
WHEN cols.DATA_TYPE IN ('numeric', 'decimal')
|
||||
THEN '{"precision":' + COALESCE(CAST(cols.NUMERIC_PRECISION AS NVARCHAR(MAX)), 'null') +
|
||||
|
||||
504
src/lib/data/sql-import/common.ts
Normal file
504
src/lib/data/sql-import/common.ts
Normal file
@@ -0,0 +1,504 @@
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { generateDiagramId, generateId } from '@/lib/utils';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { Cardinality, DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import { genericDataTypes } from '@/lib/data/data-types/generic-data-types';
|
||||
import { randomColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
// Common interfaces for SQL entities
|
||||
export interface SQLColumn {
|
||||
name: string;
|
||||
type: string;
|
||||
nullable: boolean;
|
||||
primaryKey: boolean;
|
||||
unique: boolean;
|
||||
typeArgs?: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
comment?: string;
|
||||
default?: string;
|
||||
increment?: boolean;
|
||||
}
|
||||
|
||||
export interface SQLTable {
|
||||
id: string;
|
||||
name: string;
|
||||
schema?: string;
|
||||
columns: SQLColumn[];
|
||||
indexes: SQLIndex[];
|
||||
comment?: string;
|
||||
order: number;
|
||||
}
|
||||
|
||||
export interface SQLIndex {
|
||||
name: string;
|
||||
columns: string[];
|
||||
unique: boolean;
|
||||
}
|
||||
|
||||
export interface SQLForeignKey {
|
||||
name: string;
|
||||
sourceTable: string;
|
||||
sourceSchema?: string;
|
||||
sourceColumn: string;
|
||||
targetTable: string;
|
||||
targetSchema?: string;
|
||||
targetColumn: string;
|
||||
sourceTableId: string;
|
||||
targetTableId: string;
|
||||
updateAction?: string;
|
||||
deleteAction?: string;
|
||||
}
|
||||
|
||||
export interface SQLParserResult {
|
||||
tables: SQLTable[];
|
||||
relationships: SQLForeignKey[];
|
||||
types?: SQLCustomType[];
|
||||
enums?: SQLEnumType[];
|
||||
}
|
||||
|
||||
// Define more specific types for SQL AST nodes
|
||||
export interface SQLASTNode {
|
||||
type: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface SQLBinaryExpr extends SQLASTNode {
|
||||
type: 'binary_expr';
|
||||
left: SQLASTNode;
|
||||
right: SQLASTNode;
|
||||
operator: string;
|
||||
}
|
||||
|
||||
export interface SQLFunctionNode extends SQLASTNode {
|
||||
type: 'function';
|
||||
name: string;
|
||||
args?: {
|
||||
value: SQLASTArg[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface SQLColumnRef extends SQLASTNode {
|
||||
type: 'column_ref';
|
||||
column: string;
|
||||
table?: string;
|
||||
}
|
||||
|
||||
export interface SQLExprList extends SQLASTNode {
|
||||
type: 'expr_list';
|
||||
value: Array<{ value: string | number }>;
|
||||
}
|
||||
|
||||
export interface SQLStringLiteral extends SQLASTNode {
|
||||
type: 'single_quote_string' | 'double_quote_string';
|
||||
value: string;
|
||||
}
|
||||
|
||||
export type SQLASTArg =
|
||||
| SQLColumnRef
|
||||
| SQLStringLiteral
|
||||
| { type: string; value: string | number };
|
||||
|
||||
export interface SQLCustomType {
|
||||
name: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface SQLEnumType {
|
||||
name: string;
|
||||
values: string[];
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
// Helper functions for SQL dialect handling
|
||||
export function quoteIdentifier(str: string, dbType: DatabaseType): string {
|
||||
switch (dbType) {
|
||||
case DatabaseType.MYSQL:
|
||||
case DatabaseType.MARIADB:
|
||||
return `\`${str}\``;
|
||||
case DatabaseType.POSTGRESQL:
|
||||
case DatabaseType.SQLITE:
|
||||
return `"${str}"`;
|
||||
case DatabaseType.SQL_SERVER:
|
||||
return `[${str}]`;
|
||||
default:
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
export function buildSQLFromAST(
|
||||
ast: SQLASTNode | null | undefined,
|
||||
dbType: DatabaseType = DatabaseType.GENERIC
|
||||
): string {
|
||||
if (!ast) return '';
|
||||
|
||||
if (ast.type === 'binary_expr') {
|
||||
const expr = ast as SQLBinaryExpr;
|
||||
const leftSQL = buildSQLFromAST(expr.left, dbType);
|
||||
const rightSQL = buildSQLFromAST(expr.right, dbType);
|
||||
return `${leftSQL} ${expr.operator} ${rightSQL}`;
|
||||
}
|
||||
|
||||
if (ast.type === 'function') {
|
||||
const func = ast as SQLFunctionNode;
|
||||
let expr = func.name;
|
||||
if (func.args) {
|
||||
expr +=
|
||||
'(' +
|
||||
func.args.value
|
||||
.map((v: SQLASTArg) => {
|
||||
if (v.type === 'column_ref')
|
||||
return quoteIdentifier(
|
||||
(v as SQLColumnRef).column,
|
||||
dbType
|
||||
);
|
||||
if (
|
||||
v.type === 'single_quote_string' ||
|
||||
v.type === 'double_quote_string'
|
||||
)
|
||||
return "'" + (v as SQLStringLiteral).value + "'";
|
||||
return v.value;
|
||||
})
|
||||
.join(', ') +
|
||||
')';
|
||||
}
|
||||
return expr;
|
||||
} else if (ast.type === 'column_ref') {
|
||||
return quoteIdentifier((ast as SQLColumnRef).column, dbType);
|
||||
} else if (ast.type === 'expr_list') {
|
||||
return (ast as SQLExprList).value.map((v) => v.value).join(' AND ');
|
||||
} else {
|
||||
const valueNode = ast as { type: string; value: string | number };
|
||||
return typeof valueNode.value === 'string'
|
||||
? "'" + valueNode.value + "'"
|
||||
: String(valueNode.value);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to determine cardinality of relationships
|
||||
export function determineCardinality(
|
||||
isSourceUnique: boolean,
|
||||
isTargetUnique: boolean
|
||||
): { sourceCardinality: Cardinality; targetCardinality: Cardinality } {
|
||||
if (isSourceUnique && isTargetUnique) {
|
||||
return {
|
||||
sourceCardinality: 'one' as Cardinality,
|
||||
targetCardinality: 'one' as Cardinality,
|
||||
};
|
||||
} else if (isSourceUnique) {
|
||||
return {
|
||||
sourceCardinality: 'one' as Cardinality,
|
||||
targetCardinality: 'many' as Cardinality,
|
||||
};
|
||||
} else if (isTargetUnique) {
|
||||
return {
|
||||
sourceCardinality: 'many' as Cardinality,
|
||||
targetCardinality: 'one' as Cardinality,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
sourceCardinality: 'many' as Cardinality,
|
||||
targetCardinality: 'many' as Cardinality,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Map SQL data type to generic data type in our system
|
||||
export function mapSQLTypeToGenericType(sqlType: string): DataType {
|
||||
const normalizedType = sqlType.toLowerCase().replace(/\(.*\)/, '');
|
||||
const matchedType = genericDataTypes.find((t) => t.id === normalizedType);
|
||||
if (matchedType) return matchedType;
|
||||
|
||||
const typeMap: Record<string, string> = {
|
||||
int: 'integer',
|
||||
integer: 'integer',
|
||||
smallint: 'smallint',
|
||||
bigint: 'bigint',
|
||||
decimal: 'decimal',
|
||||
numeric: 'numeric',
|
||||
float: 'float',
|
||||
double: 'double',
|
||||
varchar: 'varchar',
|
||||
'character varying': 'varchar',
|
||||
char: 'char',
|
||||
character: 'char',
|
||||
text: 'text',
|
||||
boolean: 'boolean',
|
||||
bool: 'boolean',
|
||||
timestamp: 'timestamp',
|
||||
datetime: 'timestamp',
|
||||
date: 'date',
|
||||
time: 'time',
|
||||
json: 'json',
|
||||
jsonb: 'json',
|
||||
};
|
||||
|
||||
const mappedType = typeMap[normalizedType];
|
||||
if (mappedType) {
|
||||
const foundType = genericDataTypes.find((t) => t.id === mappedType);
|
||||
if (foundType) return foundType;
|
||||
}
|
||||
|
||||
return genericDataTypes.find((t) => t.id === 'varchar')!;
|
||||
}
|
||||
|
||||
// Type affinity definitions for different database dialects
|
||||
export const typeAffinity: Record<string, Record<string, string>> = {
|
||||
[DatabaseType.POSTGRESQL]: {
|
||||
INT: 'INTEGER',
|
||||
INTEGER: 'INTEGER',
|
||||
MEDIUMINT: 'INTEGER',
|
||||
BIT: 'BOOLEAN',
|
||||
},
|
||||
[DatabaseType.MYSQL]: {
|
||||
INT: 'INTEGER',
|
||||
INTEGER: 'INTEGER',
|
||||
BOOL: 'BOOLEAN',
|
||||
BOOLEAN: 'TINYINT',
|
||||
},
|
||||
[DatabaseType.MARIADB]: {
|
||||
INT: 'INTEGER',
|
||||
INTEGER: 'INTEGER',
|
||||
BOOL: 'BOOLEAN',
|
||||
BOOLEAN: 'TINYINT',
|
||||
},
|
||||
[DatabaseType.SQL_SERVER]: {
|
||||
INT: 'INTEGER',
|
||||
INTEGER: 'INT',
|
||||
BOOL: 'BIT',
|
||||
BOOLEAN: 'BIT',
|
||||
},
|
||||
[DatabaseType.SQLITE]: {
|
||||
INT: 'INTEGER',
|
||||
BOOL: 'INTEGER',
|
||||
BOOLEAN: 'INTEGER',
|
||||
},
|
||||
[DatabaseType.GENERIC]: {
|
||||
INTEGER: 'integer',
|
||||
INT: 'integer',
|
||||
MEDIUMINT: 'integer',
|
||||
BIT: 'boolean',
|
||||
VARCHAR: 'varchar',
|
||||
'CHARACTER VARYING': 'varchar',
|
||||
CHAR: 'char',
|
||||
CHARACTER: 'char',
|
||||
TEXT: 'text',
|
||||
BOOLEAN: 'boolean',
|
||||
BOOL: 'boolean',
|
||||
TIMESTAMP: 'timestamp',
|
||||
DATETIME: 'timestamp',
|
||||
DATE: 'date',
|
||||
TIME: 'time',
|
||||
JSON: 'json',
|
||||
JSONB: 'json',
|
||||
DECIMAL: 'decimal',
|
||||
NUMERIC: 'numeric',
|
||||
FLOAT: 'float',
|
||||
DOUBLE: 'double',
|
||||
BIGINT: 'bigint',
|
||||
SMALLINT: 'smallint',
|
||||
},
|
||||
};
|
||||
|
||||
// For safe type conversions
|
||||
export function getTypeAffinity(
|
||||
databaseType: DatabaseType,
|
||||
sqlType: string
|
||||
): string {
|
||||
if (!sqlType) return 'varchar';
|
||||
|
||||
const normalizedType = sqlType.toUpperCase();
|
||||
const dialectAffinity =
|
||||
typeAffinity[databaseType] || typeAffinity[DatabaseType.GENERIC];
|
||||
return dialectAffinity[normalizedType] || sqlType.toLowerCase();
|
||||
}
|
||||
|
||||
// Convert SQLParserResult to ChartDB Diagram structure
|
||||
export function convertToChartDBDiagram(
|
||||
parserResult: SQLParserResult,
|
||||
sourceDatabaseType: DatabaseType,
|
||||
targetDatabaseType: DatabaseType
|
||||
): Diagram {
|
||||
// Create a mapping of old table IDs to new ones
|
||||
const tableIdMapping = new Map<string, string>();
|
||||
|
||||
// Convert SQL tables to ChartDB tables
|
||||
const tables: DBTable[] = parserResult.tables.map((table, index) => {
|
||||
const row = Math.floor(index / 4);
|
||||
const col = index % 4;
|
||||
const tableSpacing = 300;
|
||||
const newId = generateId();
|
||||
tableIdMapping.set(table.id, newId);
|
||||
|
||||
// Create fields from columns
|
||||
const fields: DBField[] = table.columns.map((column) => {
|
||||
const field: DBField = {
|
||||
id: generateId(),
|
||||
name: column.name,
|
||||
type: mapSQLTypeToGenericType(column.type),
|
||||
nullable: column.nullable,
|
||||
primaryKey: column.primaryKey,
|
||||
unique: column.unique,
|
||||
default: column.default || '',
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
// Add type arguments if present
|
||||
if (column.typeArgs) {
|
||||
// Transfer length for varchar/char types
|
||||
if (
|
||||
column.typeArgs.length !== undefined &&
|
||||
(field.type.id === 'varchar' || field.type.id === 'char')
|
||||
) {
|
||||
field.characterMaximumLength =
|
||||
column.typeArgs.length.toString();
|
||||
}
|
||||
|
||||
// Transfer precision/scale for numeric types
|
||||
if (
|
||||
column.typeArgs.precision !== undefined &&
|
||||
(field.type.id === 'numeric' || field.type.id === 'decimal')
|
||||
) {
|
||||
field.precision = column.typeArgs.precision;
|
||||
field.scale = column.typeArgs.scale;
|
||||
}
|
||||
}
|
||||
|
||||
return field;
|
||||
});
|
||||
|
||||
// Create indexes
|
||||
const indexes = table.indexes.map((sqlIndex) => {
|
||||
const fieldIds = sqlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: sqlIndex.name,
|
||||
fieldIds,
|
||||
unique: sqlIndex.unique,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
id: newId,
|
||||
name: table.name,
|
||||
schema: table.schema || '',
|
||||
order: index,
|
||||
fields,
|
||||
indexes,
|
||||
x: col * tableSpacing,
|
||||
y: row * tableSpacing,
|
||||
color: randomColor(),
|
||||
isView: false,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
});
|
||||
|
||||
// Process relationships
|
||||
const relationships: DBRelationship[] = [];
|
||||
|
||||
parserResult.relationships.forEach((rel) => {
|
||||
// First try to find the table with exact schema match
|
||||
let sourceTable = tables.find(
|
||||
(t) => t.name === rel.sourceTable && rel.sourceSchema === t.schema
|
||||
);
|
||||
|
||||
// If not found, try without schema requirements
|
||||
if (!sourceTable) {
|
||||
sourceTable = tables.find((t) => t.name === rel.sourceTable);
|
||||
}
|
||||
|
||||
// Similar approach for target table
|
||||
let targetTable = tables.find(
|
||||
(t) => t.name === rel.targetTable && rel.targetSchema === t.schema
|
||||
);
|
||||
|
||||
// If not found, try without schema requirements
|
||||
if (!targetTable) {
|
||||
targetTable = tables.find((t) => t.name === rel.targetTable);
|
||||
}
|
||||
|
||||
if (!sourceTable || !targetTable) {
|
||||
console.warn('Relationship refers to non-existent table:', {
|
||||
sourceTable: rel.sourceTable,
|
||||
sourceSchema: rel.sourceSchema,
|
||||
targetTable: rel.targetTable,
|
||||
targetSchema: rel.targetSchema,
|
||||
availableTables: tables.map(
|
||||
(t) => `${t.schema || ''}.${t.name}`
|
||||
),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceTableId = tableIdMapping.get(rel.sourceTableId);
|
||||
const targetTableId = tableIdMapping.get(rel.targetTableId);
|
||||
|
||||
if (!sourceTableId || !targetTableId) {
|
||||
console.warn('Could not find mapped table IDs for relationship');
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceField = sourceTable.fields.find(
|
||||
(f) => f.name === rel.sourceColumn
|
||||
);
|
||||
const targetField = targetTable.fields.find(
|
||||
(f) => f.name === rel.targetColumn
|
||||
);
|
||||
|
||||
if (!sourceField || !targetField) {
|
||||
console.warn('Relationship refers to non-existent field:', {
|
||||
sourceTable: rel.sourceTable,
|
||||
sourceField: rel.sourceColumn,
|
||||
targetTable: rel.targetTable,
|
||||
targetField: rel.targetColumn,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { sourceCardinality, targetCardinality } = determineCardinality(
|
||||
sourceField.unique || sourceField.primaryKey,
|
||||
targetField.unique || targetField.primaryKey
|
||||
);
|
||||
|
||||
relationships.push({
|
||||
id: generateId(),
|
||||
name: rel.name,
|
||||
sourceSchema: sourceTable.schema,
|
||||
targetSchema: targetTable.schema,
|
||||
sourceTableId: sourceTableId,
|
||||
targetTableId: targetTableId,
|
||||
sourceFieldId: sourceField.id,
|
||||
targetFieldId: targetField.id,
|
||||
sourceCardinality,
|
||||
targetCardinality,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
});
|
||||
|
||||
const diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: `SQL Import (${sourceDatabaseType})`,
|
||||
databaseType: targetDatabaseType,
|
||||
tables,
|
||||
relationships,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
return diagram;
|
||||
}
|
||||
239
src/lib/data/sql-import/dialect-importers/mysql/mysql-common.ts
Normal file
239
src/lib/data/sql-import/dialect-importers/mysql/mysql-common.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
export const parserOpts = {
|
||||
database: 'MySQL', // Set dialect to MySQL
|
||||
};
|
||||
|
||||
// Define interfaces for AST nodes - Fixed no-explicit-any issues
|
||||
export interface SQLAstNode {
|
||||
type: string;
|
||||
keyword?: string;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
[key: string]: any; // Need to keep any here for compatibility with Parser's AST
|
||||
}
|
||||
|
||||
// Define a minimal interface for table objects used in helper functions
|
||||
export interface TableLike {
|
||||
id: string;
|
||||
name: string;
|
||||
schema?: string;
|
||||
columns: unknown[];
|
||||
indexes: unknown[];
|
||||
}
|
||||
|
||||
export interface TableReference {
|
||||
table?: string;
|
||||
schema?: string;
|
||||
db?: string; // Support for PostgreSQL AST compatibility
|
||||
}
|
||||
|
||||
export interface ColumnReference {
|
||||
column?:
|
||||
| string
|
||||
| { value?: string; expr?: { value?: string; type?: string } };
|
||||
expr?: { value?: string; type?: string };
|
||||
value?: string;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
resource: string;
|
||||
column: string | ColumnReference;
|
||||
definition?: {
|
||||
dataType?: string;
|
||||
constraint?: string;
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
primary_key?: string;
|
||||
nullable?: { type?: string };
|
||||
unique?: string;
|
||||
default_val?: SQLAstNode;
|
||||
auto_increment?: string;
|
||||
}
|
||||
|
||||
export interface ConstraintDefinition {
|
||||
resource: string;
|
||||
constraint_type: string;
|
||||
constraint_name?: string;
|
||||
definition?: Array<ColumnReference> | { columns?: string[] };
|
||||
columns?: string[];
|
||||
reference_definition?: ReferenceDefinition;
|
||||
reference?: ReferenceDefinition;
|
||||
}
|
||||
|
||||
export interface ReferenceDefinition {
|
||||
table?: string | TableReference | TableReference[];
|
||||
columns?: Array<ColumnReference | string> | string[];
|
||||
definition?: Array<ColumnReference>;
|
||||
on_update?: string;
|
||||
on_delete?: string;
|
||||
}
|
||||
|
||||
export interface CreateTableStatement extends SQLAstNode {
|
||||
table: TableReference | TableReference[];
|
||||
create_definitions?: Array<ColumnDefinition | ConstraintDefinition>;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export interface CreateIndexStatement extends SQLAstNode {
|
||||
table: TableReference | TableReference[] | string;
|
||||
index?: string;
|
||||
index_name?: string;
|
||||
index_type?: string;
|
||||
unique?: boolean;
|
||||
columns?: Array<ColumnReference>;
|
||||
index_columns?: Array<{ column?: ColumnReference } | ColumnReference>;
|
||||
}
|
||||
|
||||
export interface AlterTableConstraintDefinition extends ConstraintDefinition {
|
||||
constraint?: string;
|
||||
}
|
||||
|
||||
export interface AlterTableExprItem {
|
||||
action: string;
|
||||
resource?: string;
|
||||
type?: string;
|
||||
constraint?: { constraint_type?: string };
|
||||
create_definitions?:
|
||||
| AlterTableConstraintDefinition
|
||||
| {
|
||||
constraint_type?: string;
|
||||
definition?: Array<ColumnReference>;
|
||||
constraint?: string;
|
||||
reference_definition?: ReferenceDefinition;
|
||||
resource?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface AlterTableStatement extends SQLAstNode {
|
||||
table: TableReference[] | TableReference | string;
|
||||
expr: AlterTableExprItem[];
|
||||
}
|
||||
|
||||
// Define type for column type arguments
|
||||
export interface TypeArgs {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
}
|
||||
|
||||
// Helper to extract column name from different AST formats
|
||||
export function extractColumnName(
|
||||
columnObj: string | ColumnReference | undefined
|
||||
): string {
|
||||
if (!columnObj) return '';
|
||||
|
||||
// Handle different formats based on actual AST structure
|
||||
if (typeof columnObj === 'string') return columnObj;
|
||||
|
||||
if (typeof columnObj === 'object') {
|
||||
// Direct column property
|
||||
if (columnObj.column) {
|
||||
if (typeof columnObj.column === 'string') return columnObj.column;
|
||||
if (typeof columnObj.column === 'object') {
|
||||
// Handle nested value property
|
||||
if (columnObj.column.value) return columnObj.column.value;
|
||||
// Handle expression property with value
|
||||
if (columnObj.column.expr?.value)
|
||||
return columnObj.column.expr.value;
|
||||
// Handle double_quote_string type which is common in PostgreSQL
|
||||
if (columnObj.column.expr?.type === 'double_quote_string')
|
||||
return columnObj.column.expr.value || '';
|
||||
// Direct access to expr
|
||||
if (columnObj.column.expr?.type === 'default')
|
||||
return columnObj.column.expr.value || '';
|
||||
}
|
||||
}
|
||||
|
||||
// Direct expr property
|
||||
if (columnObj.expr) {
|
||||
if (columnObj.expr.type === 'default')
|
||||
return columnObj.expr.value || '';
|
||||
if (columnObj.expr.type === 'double_quote_string')
|
||||
return columnObj.expr.value || '';
|
||||
if (columnObj.expr.value) return columnObj.expr.value;
|
||||
}
|
||||
|
||||
// Direct value property
|
||||
if (columnObj.value) return columnObj.value;
|
||||
}
|
||||
|
||||
console.warn('Could not extract column name from:', columnObj);
|
||||
return '';
|
||||
}
|
||||
|
||||
// Helper function to extract type arguments from column definition
|
||||
export function getTypeArgs(
|
||||
definition: ColumnDefinition['definition'] | undefined
|
||||
): TypeArgs {
|
||||
const typeArgs: TypeArgs = {};
|
||||
|
||||
if (!definition) return typeArgs;
|
||||
|
||||
if (definition.length !== undefined) {
|
||||
typeArgs.length = definition.length;
|
||||
}
|
||||
|
||||
if (definition.scale !== undefined && definition.precision !== undefined) {
|
||||
typeArgs.precision = definition.precision;
|
||||
typeArgs.scale = definition.scale;
|
||||
}
|
||||
|
||||
return typeArgs;
|
||||
}
|
||||
|
||||
// Helper function to find a table with consistent schema handling
|
||||
export function findTableWithSchemaSupport(
|
||||
tables: TableLike[],
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): TableLike | undefined {
|
||||
// Default to public schema if none provided
|
||||
const effectiveSchema = schemaName || 'public';
|
||||
|
||||
// First try with exact schema match
|
||||
let table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === effectiveSchema
|
||||
);
|
||||
|
||||
// If not found with schema, try with the legacy schema match
|
||||
if (!table && schemaName) {
|
||||
table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === schemaName
|
||||
);
|
||||
}
|
||||
|
||||
// If still not found with schema, try any match on the table name
|
||||
if (!table) {
|
||||
table = tables.find((t) => t.name === tableName);
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
// Helper function to find table ID with schema support
|
||||
export function getTableIdWithSchemaSupport(
|
||||
tableMap: Record<string, string>,
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): string | undefined {
|
||||
// Default to public schema if none provided
|
||||
const effectiveSchema = schemaName || 'public';
|
||||
|
||||
// First try with schema
|
||||
const tableKey = `${effectiveSchema}.${tableName}`;
|
||||
let tableId = tableMap[tableKey];
|
||||
|
||||
// If not found with the effective schema, try with the original schema if different
|
||||
if (!tableId && schemaName && schemaName !== effectiveSchema) {
|
||||
const originalSchemaKey = `${schemaName}.${tableName}`;
|
||||
tableId = tableMap[originalSchemaKey];
|
||||
}
|
||||
|
||||
// If still not found with schema, try without schema
|
||||
if (!tableId) {
|
||||
tableId = tableMap[tableName];
|
||||
}
|
||||
|
||||
return tableId;
|
||||
}
|
||||
929
src/lib/data/sql-import/dialect-importers/mysql/mysql.ts
Normal file
929
src/lib/data/sql-import/dialect-importers/mysql/mysql.ts
Normal file
@@ -0,0 +1,929 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type {
|
||||
SQLParserResult,
|
||||
SQLTable,
|
||||
SQLColumn,
|
||||
SQLIndex,
|
||||
SQLForeignKey,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import type {
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
TableReference,
|
||||
} from './mysql-common';
|
||||
import { parserOpts, extractColumnName, getTypeArgs } from './mysql-common';
|
||||
|
||||
// Interface for pending foreign keys that need to be processed later
|
||||
interface PendingForeignKey {
|
||||
name: string;
|
||||
sourceTable: string;
|
||||
sourceTableId: string;
|
||||
sourceColumns: string[];
|
||||
targetTable: string;
|
||||
targetColumns: string[];
|
||||
updateAction?: string;
|
||||
deleteAction?: string;
|
||||
}
|
||||
|
||||
// Helper to extract statements from PostgreSQL dump
|
||||
function extractStatements(sqlContent: string): string[] {
|
||||
const statements: string[] = [];
|
||||
let currentStatement = '';
|
||||
const lines = sqlContent.split('\n');
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
|
||||
// Skip comments and empty lines
|
||||
if (line.startsWith('--') || line === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add line to current statement
|
||||
currentStatement += line + ' ';
|
||||
|
||||
// If line ends with semicolon, consider statement complete
|
||||
if (line.endsWith(';')) {
|
||||
statements.push(currentStatement.trim());
|
||||
currentStatement = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Handle any remaining statement
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
|
||||
return statements;
|
||||
}
|
||||
|
||||
// Function to extract columns from a CREATE TABLE statement using regex
|
||||
function extractColumnsFromCreateTable(statement: string): SQLColumn[] {
|
||||
const columns: SQLColumn[] = [];
|
||||
|
||||
// Extract everything between the first opening and last closing parenthesis
|
||||
const columnMatch = statement.match(/CREATE\s+TABLE.*?\((.*)\)[^)]*;$/s);
|
||||
if (!columnMatch || !columnMatch[1]) {
|
||||
return columns;
|
||||
}
|
||||
|
||||
const columnDefs = columnMatch[1].trim();
|
||||
// Split by commas, but not those within parentheses (for nested type definitions)
|
||||
const columnLines = columnDefs.split(/,(?![^(]*\))/);
|
||||
|
||||
for (const columnLine of columnLines) {
|
||||
const line = columnLine.trim();
|
||||
// Skip constraints at the table level
|
||||
if (
|
||||
line.toUpperCase().startsWith('CONSTRAINT') ||
|
||||
line.toUpperCase().startsWith('PRIMARY KEY') ||
|
||||
line.toUpperCase().startsWith('FOREIGN KEY') ||
|
||||
line.toUpperCase().startsWith('UNIQUE')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract column name and definition
|
||||
const columnNameMatch = line.match(/^"?([^"\s]+)"?\s+(.+)$/);
|
||||
if (columnNameMatch) {
|
||||
const columnName = columnNameMatch[1];
|
||||
const definition = columnNameMatch[2];
|
||||
|
||||
// Determine if column is nullable
|
||||
const nullable = !definition.toUpperCase().includes('NOT NULL');
|
||||
|
||||
// Determine if column is primary key
|
||||
const primaryKey = definition.toUpperCase().includes('PRIMARY KEY');
|
||||
|
||||
// Extract data type
|
||||
const typeMatch = definition.match(/^([^\s(]+)(?:\(([^)]+)\))?/);
|
||||
const dataType = typeMatch ? typeMatch[1] : '';
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable,
|
||||
primaryKey,
|
||||
unique: definition.toUpperCase().includes('UNIQUE'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return columns;
|
||||
}
|
||||
|
||||
// Process PostgreSQL pg_dump CREATE INDEX statements
|
||||
function processCreateIndexStatement(
|
||||
statement: string,
|
||||
tableMap: Record<string, string>,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
if (
|
||||
!statement.startsWith('CREATE INDEX') &&
|
||||
!statement.startsWith('CREATE UNIQUE INDEX')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Determine if the index is unique
|
||||
const isUnique = statement.startsWith('CREATE UNIQUE INDEX');
|
||||
|
||||
// Extract index name
|
||||
const indexNameRegex = /CREATE (?:UNIQUE )?INDEX\s+"?([^"\s]+)"?/i;
|
||||
const indexNameMatch = statement.match(indexNameRegex);
|
||||
const indexName = indexNameMatch ? indexNameMatch[1] : '';
|
||||
|
||||
if (!indexName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract table name and schema
|
||||
const tableRegex = /ON\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tableSchema = tableMatch[1] || '';
|
||||
const tableName = tableMatch[2];
|
||||
|
||||
// Extract index columns
|
||||
const columnsRegex = /\(\s*([^)]+)\)/i;
|
||||
const columnsMatch = statement.match(columnsRegex);
|
||||
|
||||
if (!columnsMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse columns (handle function-based indexes, etc.)
|
||||
const columnsStr = columnsMatch[1];
|
||||
// This is a simplified approach - advanced indexes may need more complex parsing
|
||||
const indexColumns = columnsStr.split(',').map((col) => {
|
||||
// Extract basic column name, handling possible expressions
|
||||
const colName = col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s(]+)"?\s*.*$/, '$1'); // Get just the column name part
|
||||
return colName;
|
||||
});
|
||||
|
||||
if (indexColumns.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the table
|
||||
const tableKey = `${tableSchema ? tableSchema + '.' : ''}${tableName}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
if (!tableId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const table = tables.find((t) => t.id === tableId);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if a similar index already exists (to avoid duplicates)
|
||||
const existingIndex = table.indexes.find(
|
||||
(idx) =>
|
||||
idx.name === indexName ||
|
||||
(idx.columns.length === indexColumns.length &&
|
||||
idx.columns.every((col, i) => col === indexColumns[i]))
|
||||
);
|
||||
|
||||
if (!existingIndex) {
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns: indexColumns,
|
||||
unique: isUnique,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing CREATE INDEX statement:', error);
|
||||
}
|
||||
}
|
||||
|
||||
export async function fromMySQL(sqlContent: string): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
const pendingForeignKeys: PendingForeignKey[] = []; // Store FKs that reference tables not yet created
|
||||
|
||||
try {
|
||||
// Extract SQL statements from the dump
|
||||
const statements = extractStatements(sqlContent);
|
||||
|
||||
// First pass: process CREATE TABLE statements
|
||||
for (const statement of statements) {
|
||||
const trimmedStmt = statement.trim();
|
||||
// Process only CREATE TABLE statements
|
||||
if (trimmedStmt.toUpperCase().startsWith('CREATE TABLE')) {
|
||||
try {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
// Parse with SQL parser
|
||||
const ast = parser.astify(trimmedStmt, parserOpts);
|
||||
if (
|
||||
Array.isArray(ast) &&
|
||||
ast.length > 0 &&
|
||||
ast[0].type === 'create' &&
|
||||
ast[0].keyword === 'table'
|
||||
) {
|
||||
const createTableStmt = ast[0] as CreateTableStatement;
|
||||
|
||||
// Extract table name
|
||||
let tableName = '';
|
||||
if (typeof createTableStmt.table === 'object') {
|
||||
if (
|
||||
Array.isArray(createTableStmt.table) &&
|
||||
createTableStmt.table.length > 0
|
||||
) {
|
||||
tableName =
|
||||
createTableStmt.table[0].table || '';
|
||||
} else {
|
||||
const tableObj =
|
||||
createTableStmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
}
|
||||
} else if (typeof createTableStmt.table === 'string') {
|
||||
tableName = createTableStmt.table;
|
||||
}
|
||||
|
||||
// Remove backticks from table name
|
||||
tableName = tableName.replace(/`/g, '');
|
||||
|
||||
if (tableName) {
|
||||
// Generate table ID
|
||||
const tableId = generateId();
|
||||
|
||||
// Handle database-qualified table names
|
||||
const tableNameParts = tableName.split('.');
|
||||
let database = '';
|
||||
let simpleTableName = tableName;
|
||||
|
||||
if (tableNameParts.length > 1) {
|
||||
database = tableNameParts[0];
|
||||
simpleTableName = tableNameParts[1];
|
||||
// Store with fully qualified name (for ALTER TABLE references)
|
||||
tableMap[tableName] = tableId;
|
||||
// Also store with just table name (for simpler lookups)
|
||||
tableMap[simpleTableName] = tableId;
|
||||
} else {
|
||||
tableMap[tableName] = tableId;
|
||||
}
|
||||
|
||||
// Process columns
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
if (
|
||||
createTableStmt.create_definitions &&
|
||||
Array.isArray(
|
||||
createTableStmt.create_definitions
|
||||
)
|
||||
) {
|
||||
createTableStmt.create_definitions.forEach(
|
||||
(
|
||||
def:
|
||||
| ColumnDefinition
|
||||
| ConstraintDefinition
|
||||
) => {
|
||||
if (def.resource === 'column') {
|
||||
const columnDef =
|
||||
def as ColumnDefinition;
|
||||
let columnName = extractColumnName(
|
||||
columnDef.column
|
||||
);
|
||||
|
||||
// Remove backticks
|
||||
columnName = columnName.replace(
|
||||
/`/g,
|
||||
''
|
||||
);
|
||||
const dataType =
|
||||
columnDef.definition
|
||||
?.dataType || '';
|
||||
|
||||
// Check column constraints
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key ===
|
||||
'primary key' ||
|
||||
columnDef.definition
|
||||
?.constraint ===
|
||||
'primary key';
|
||||
|
||||
const isAutoIncrement =
|
||||
columnDef.auto_increment ===
|
||||
'auto_increment';
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable:
|
||||
columnDef.nullable?.type !==
|
||||
'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique:
|
||||
columnDef.unique ===
|
||||
'unique' ||
|
||||
columnDef.definition
|
||||
?.constraint ===
|
||||
'unique',
|
||||
typeArgs: getTypeArgs(
|
||||
columnDef.definition
|
||||
),
|
||||
default: columnDef.default_val
|
||||
? buildSQLFromAST(
|
||||
columnDef.default_val
|
||||
)
|
||||
: undefined,
|
||||
increment: isAutoIncrement,
|
||||
});
|
||||
} else if (
|
||||
def.resource === 'constraint'
|
||||
) {
|
||||
const constraintDef =
|
||||
def as ConstraintDefinition;
|
||||
|
||||
// Handle PRIMARY KEY constraint
|
||||
if (
|
||||
constraintDef.constraint_type ===
|
||||
'primary key'
|
||||
) {
|
||||
if (
|
||||
Array.isArray(
|
||||
constraintDef.definition
|
||||
)
|
||||
) {
|
||||
const pkColumns =
|
||||
constraintDef.definition
|
||||
.filter(
|
||||
(colDef) =>
|
||||
typeof colDef ===
|
||||
'object' &&
|
||||
'type' in
|
||||
colDef &&
|
||||
colDef.type ===
|
||||
'column_ref'
|
||||
)
|
||||
.map((colDef) =>
|
||||
extractColumnName(
|
||||
colDef
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
);
|
||||
|
||||
// Mark columns as PK
|
||||
for (const colName of pkColumns) {
|
||||
const col =
|
||||
columns.find(
|
||||
(c) =>
|
||||
c.name ===
|
||||
colName
|
||||
);
|
||||
if (col) {
|
||||
col.primaryKey =
|
||||
true;
|
||||
}
|
||||
}
|
||||
|
||||
// Add PK index
|
||||
if (pkColumns.length > 0) {
|
||||
indexes.push({
|
||||
name: `pk_${tableName}`,
|
||||
columns: pkColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle UNIQUE constraint
|
||||
else if (
|
||||
constraintDef.constraint_type ===
|
||||
'unique'
|
||||
) {
|
||||
const uniqueColumns =
|
||||
Array.isArray(
|
||||
constraintDef.definition
|
||||
)
|
||||
? constraintDef.definition.map(
|
||||
(colDef) =>
|
||||
extractColumnName(
|
||||
colDef
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
)
|
||||
: (
|
||||
constraintDef
|
||||
.definition
|
||||
?.columns ||
|
||||
[]
|
||||
).map((col) =>
|
||||
typeof col ===
|
||||
'string'
|
||||
? col.replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
: extractColumnName(
|
||||
col
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
);
|
||||
|
||||
if (uniqueColumns.length > 0) {
|
||||
indexes.push({
|
||||
name: constraintDef.constraint_name
|
||||
? constraintDef.constraint_name.replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
: `${tableName}_${uniqueColumns[0]}_key`,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Handle FOREIGN KEY constraints
|
||||
else if (
|
||||
constraintDef.constraint_type ===
|
||||
'foreign key' ||
|
||||
constraintDef.constraint_type ===
|
||||
'FOREIGN KEY'
|
||||
) {
|
||||
// Extract source columns
|
||||
let sourceColumns: string[] =
|
||||
[];
|
||||
if (
|
||||
Array.isArray(
|
||||
constraintDef.definition
|
||||
)
|
||||
) {
|
||||
sourceColumns =
|
||||
constraintDef.definition.map(
|
||||
(col) => {
|
||||
const colName =
|
||||
extractColumnName(
|
||||
col
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Process reference info (target table/columns)
|
||||
const reference =
|
||||
constraintDef.reference_definition ||
|
||||
constraintDef.reference;
|
||||
|
||||
if (
|
||||
reference &&
|
||||
sourceColumns.length > 0
|
||||
) {
|
||||
// Extract target table
|
||||
let targetTable = '';
|
||||
if (reference.table) {
|
||||
if (
|
||||
typeof reference.table ===
|
||||
'object'
|
||||
) {
|
||||
if (
|
||||
Array.isArray(
|
||||
reference.table
|
||||
) &&
|
||||
reference.table
|
||||
.length > 0
|
||||
) {
|
||||
targetTable =
|
||||
reference
|
||||
.table[0]
|
||||
.table ||
|
||||
'';
|
||||
} else {
|
||||
const tableRef =
|
||||
reference.table as TableReference;
|
||||
targetTable =
|
||||
tableRef.table ||
|
||||
'';
|
||||
}
|
||||
} else {
|
||||
targetTable =
|
||||
reference.table as string;
|
||||
}
|
||||
|
||||
// Remove backticks
|
||||
targetTable =
|
||||
targetTable.replace(
|
||||
/`/g,
|
||||
''
|
||||
);
|
||||
}
|
||||
|
||||
// Extract target columns
|
||||
let targetColumns: string[] =
|
||||
[];
|
||||
if (
|
||||
reference.columns &&
|
||||
Array.isArray(
|
||||
reference.columns
|
||||
)
|
||||
) {
|
||||
targetColumns =
|
||||
reference.columns.map(
|
||||
(col) => {
|
||||
const colName =
|
||||
typeof col ===
|
||||
'string'
|
||||
? col.replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
: extractColumnName(
|
||||
col
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
} else if (
|
||||
reference.definition &&
|
||||
Array.isArray(
|
||||
reference.definition
|
||||
)
|
||||
) {
|
||||
targetColumns =
|
||||
reference.definition.map(
|
||||
(col) => {
|
||||
const colName =
|
||||
extractColumnName(
|
||||
col
|
||||
).replace(
|
||||
/`/g,
|
||||
''
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Add relationships for matching columns
|
||||
if (
|
||||
targetTable &&
|
||||
targetColumns.length > 0
|
||||
) {
|
||||
const targetTableId =
|
||||
tableMap[
|
||||
targetTable
|
||||
];
|
||||
|
||||
if (!targetTableId) {
|
||||
// Store for later processing (after all tables are created)
|
||||
const pendingFk: PendingForeignKey =
|
||||
{
|
||||
name: constraintDef.constraint_name
|
||||
? constraintDef.constraint_name.replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
: `${tableName}_${sourceColumns[0]}_fkey`,
|
||||
sourceTable:
|
||||
tableName,
|
||||
sourceTableId:
|
||||
tableId,
|
||||
sourceColumns,
|
||||
targetTable,
|
||||
targetColumns,
|
||||
updateAction:
|
||||
reference.on_update,
|
||||
deleteAction:
|
||||
reference.on_delete,
|
||||
};
|
||||
pendingForeignKeys.push(
|
||||
pendingFk
|
||||
);
|
||||
} else {
|
||||
// Create foreign key relationships
|
||||
for (
|
||||
let i = 0;
|
||||
i <
|
||||
Math.min(
|
||||
sourceColumns.length,
|
||||
targetColumns.length
|
||||
);
|
||||
i++
|
||||
) {
|
||||
const fk: SQLForeignKey =
|
||||
{
|
||||
name: constraintDef.constraint_name
|
||||
? constraintDef.constraint_name.replace(
|
||||
/`/g,
|
||||
''
|
||||
)
|
||||
: `${tableName}_${sourceColumns[i]}_fkey`,
|
||||
sourceTable:
|
||||
tableName,
|
||||
sourceColumn:
|
||||
sourceColumns[
|
||||
i
|
||||
],
|
||||
targetTable,
|
||||
targetColumn:
|
||||
targetColumns[
|
||||
i
|
||||
],
|
||||
sourceTableId:
|
||||
tableId,
|
||||
targetTableId,
|
||||
updateAction:
|
||||
reference.on_update,
|
||||
deleteAction:
|
||||
reference.on_delete,
|
||||
};
|
||||
|
||||
relationships.push(
|
||||
fk
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// If parser fails, try regex-based extraction as fallback
|
||||
const extractedColumns =
|
||||
extractColumnsFromCreateTable(trimmedStmt);
|
||||
if (extractedColumns.length > 0) {
|
||||
columns.push(...extractedColumns);
|
||||
}
|
||||
}
|
||||
|
||||
// Create and store the table
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: simpleTableName || tableName,
|
||||
schema: database || undefined,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.error(
|
||||
'Error parsing CREATE TABLE statement:',
|
||||
parseError
|
||||
);
|
||||
|
||||
// Error handling without logging
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: process CREATE INDEX statements
|
||||
for (const statement of statements) {
|
||||
const trimmedStmt = statement.trim();
|
||||
if (
|
||||
trimmedStmt.toUpperCase().startsWith('CREATE INDEX') ||
|
||||
trimmedStmt.toUpperCase().startsWith('CREATE UNIQUE INDEX')
|
||||
) {
|
||||
processCreateIndexStatement(trimmedStmt, tableMap, tables);
|
||||
}
|
||||
}
|
||||
|
||||
// Third pass: process ALTER TABLE statements for foreign keys
|
||||
for (const statement of statements) {
|
||||
const trimmedStmt = statement.trim();
|
||||
if (
|
||||
trimmedStmt.toUpperCase().startsWith('ALTER TABLE') &&
|
||||
trimmedStmt.toUpperCase().includes('FOREIGN KEY')
|
||||
) {
|
||||
try {
|
||||
// Extract table name and schema
|
||||
const tableRegex =
|
||||
/ALTER TABLE\s+(?:`?([^`\s.]+)`?\.)?`?([^`\s.(]+)`?\s+/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) continue;
|
||||
|
||||
const databaseName = tableMatch[1] || '';
|
||||
const sourceTable = tableMatch[2];
|
||||
|
||||
// Look for source table in tableMap - try with and without database prefix
|
||||
let sourceTableId = tableMap[sourceTable];
|
||||
if (!sourceTableId && databaseName) {
|
||||
sourceTableId =
|
||||
tableMap[`${databaseName}.${sourceTable}`];
|
||||
}
|
||||
if (!sourceTableId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract constraint name if it exists
|
||||
let constraintName = '';
|
||||
const constraintMatch = statement.match(
|
||||
/ADD CONSTRAINT\s+`?([^`\s(]+)`?\s+/i
|
||||
);
|
||||
if (constraintMatch) {
|
||||
constraintName = constraintMatch[1].replace(/`/g, '');
|
||||
}
|
||||
|
||||
// Extract source columns
|
||||
const sourceColMatch = statement.match(
|
||||
/FOREIGN KEY\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (!sourceColMatch) continue;
|
||||
|
||||
const sourceColumns = sourceColMatch[1]
|
||||
.split(',')
|
||||
.map((col) => col.trim().replace(/`/g, ''));
|
||||
|
||||
// Extract target table and columns
|
||||
const targetMatch = statement.match(
|
||||
/REFERENCES\s+(?:`?([^`\s.]+)`?\.)?`?([^`\s(]+)`?\s*\(([^)]+)\)/i
|
||||
);
|
||||
if (!targetMatch) continue;
|
||||
|
||||
const targetDatabase = targetMatch[1] || '';
|
||||
const targetTable = targetMatch[2];
|
||||
const targetColumns = targetMatch[3]
|
||||
.split(',')
|
||||
.map((col) => col.trim().replace(/`/g, ''));
|
||||
|
||||
// Try to find target table with and without database prefix
|
||||
let targetTableId = tableMap[targetTable];
|
||||
if (!targetTableId && targetDatabase) {
|
||||
targetTableId =
|
||||
tableMap[`${targetDatabase}.${targetTable}`];
|
||||
}
|
||||
|
||||
if (!targetTableId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract ON DELETE and ON UPDATE actions
|
||||
let updateAction: string | undefined;
|
||||
let deleteAction: string | undefined;
|
||||
|
||||
const onDeleteMatch = statement.match(
|
||||
/ON DELETE\s+([A-Z\s]+?)(?=\s+ON|\s*$)/i
|
||||
);
|
||||
if (onDeleteMatch) {
|
||||
deleteAction = onDeleteMatch[1].trim();
|
||||
}
|
||||
|
||||
const onUpdateMatch = statement.match(
|
||||
/ON UPDATE\s+([A-Z\s]+?)(?=\s+ON|\s*$)/i
|
||||
);
|
||||
if (onUpdateMatch) {
|
||||
updateAction = onUpdateMatch[1].trim();
|
||||
}
|
||||
|
||||
// Create the foreign key relationships
|
||||
for (
|
||||
let i = 0;
|
||||
i <
|
||||
Math.min(sourceColumns.length, targetColumns.length);
|
||||
i++
|
||||
) {
|
||||
const fk: SQLForeignKey = {
|
||||
name:
|
||||
constraintName ||
|
||||
`${sourceTable}_${sourceColumns[i]}_fkey`,
|
||||
sourceTable,
|
||||
sourceColumn: sourceColumns[i],
|
||||
targetTable,
|
||||
targetColumn: targetColumns[i],
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
updateAction,
|
||||
deleteAction,
|
||||
};
|
||||
|
||||
relationships.push(fk);
|
||||
}
|
||||
} catch (fkError) {
|
||||
console.error(
|
||||
'Error processing foreign key in ALTER TABLE:',
|
||||
fkError
|
||||
);
|
||||
|
||||
// Error handling without logging
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// After processing all tables, process pending foreign keys:
|
||||
if (pendingForeignKeys.length > 0) {
|
||||
for (const pendingFk of pendingForeignKeys) {
|
||||
// Try with and without database prefix
|
||||
let targetTableId = tableMap[pendingFk.targetTable];
|
||||
|
||||
// Try to extract database if the target table has a database prefix
|
||||
const targetTableParts = pendingFk.targetTable.split('.');
|
||||
if (!targetTableId && targetTableParts.length > 1) {
|
||||
const tableName = targetTableParts[1];
|
||||
targetTableId = tableMap[tableName];
|
||||
}
|
||||
|
||||
if (!targetTableId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create foreign key relationships
|
||||
for (
|
||||
let i = 0;
|
||||
i <
|
||||
Math.min(
|
||||
pendingFk.sourceColumns.length,
|
||||
pendingFk.targetColumns.length
|
||||
);
|
||||
i++
|
||||
) {
|
||||
const fk: SQLForeignKey = {
|
||||
name:
|
||||
pendingFk.name ||
|
||||
`${pendingFk.sourceTable}_${pendingFk.sourceColumns[i]}_fkey`,
|
||||
sourceTable: pendingFk.sourceTable,
|
||||
sourceColumn: pendingFk.sourceColumns[i],
|
||||
targetTable: pendingFk.targetTable,
|
||||
targetColumn: pendingFk.targetColumns[i],
|
||||
sourceTableId: pendingFk.sourceTableId,
|
||||
targetTableId,
|
||||
updateAction: pendingFk.updateAction,
|
||||
deleteAction: pendingFk.deleteAction,
|
||||
};
|
||||
|
||||
relationships.push(fk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { tables, relationships };
|
||||
} catch (error) {
|
||||
console.error('Error in MySQL dump parser:', error);
|
||||
|
||||
throw new Error(
|
||||
`Error parsing MySQL dump: ${(error as Error).message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function isMySQLFormat(sqlContent: string): boolean {
|
||||
// Common patterns in MySQL dumps
|
||||
const mysqlDumpPatterns = [
|
||||
/START TRANSACTION/i,
|
||||
/CREATE TABLE.*IF NOT EXISTS/i,
|
||||
/ENGINE\s*=\s*(?:InnoDB|MyISAM|MEMORY|ARCHIVE)/i,
|
||||
/DEFAULT CHARSET\s*=\s*(?:utf8|latin1)/i,
|
||||
/COLLATE\s*=\s*(?:utf8_general_ci|latin1_swedish_ci)/i,
|
||||
/AUTO_INCREMENT\s*=\s*\d+/i,
|
||||
/ALTER TABLE.*ADD CONSTRAINT.*FOREIGN KEY/i,
|
||||
/-- (MySQL|MariaDB) dump/i,
|
||||
];
|
||||
|
||||
// Look for backticks around identifiers (common in MySQL)
|
||||
const hasBackticks = /`[^`]+`/.test(sqlContent);
|
||||
|
||||
// Check for MySQL specific comments
|
||||
const hasMysqlComments =
|
||||
/-- MySQL dump|-- Host:|-- Server version:|-- Dump completed on/.test(
|
||||
sqlContent
|
||||
);
|
||||
|
||||
// If there are MySQL specific comments, it's likely a MySQL dump
|
||||
if (hasMysqlComments) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Count how many MySQL patterns are found
|
||||
let patternCount = 0;
|
||||
for (const pattern of mysqlDumpPatterns) {
|
||||
if (pattern.test(sqlContent)) {
|
||||
patternCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// If the SQL has backticks and at least a few MySQL patterns, it's likely MySQL
|
||||
const isLikelyMysql = hasBackticks && patternCount >= 2;
|
||||
|
||||
return isLikelyMysql;
|
||||
}
|
||||
@@ -0,0 +1,250 @@
|
||||
export const parserOpts = { database: 'postgresql' };
|
||||
|
||||
// Define interfaces for AST nodes - Fixed no-explicit-any issues
|
||||
export interface SQLAstNode {
|
||||
type: string;
|
||||
keyword?: string;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
[key: string]: any; // Need to keep any here for compatibility with Parser's AST
|
||||
}
|
||||
|
||||
// Define a minimal interface for table objects used in helper functions
|
||||
export interface TableLike {
|
||||
id: string;
|
||||
name: string;
|
||||
schema?: string;
|
||||
columns: unknown[];
|
||||
indexes: unknown[];
|
||||
}
|
||||
|
||||
export interface TableReference {
|
||||
table?: string;
|
||||
schema?: string;
|
||||
db?: string; // Support for PostgreSQL AST compatibility
|
||||
}
|
||||
|
||||
export interface ColumnReference {
|
||||
column?:
|
||||
| string
|
||||
| { value?: string; expr?: { value?: string; type?: string } };
|
||||
expr?: { value?: string; type?: string };
|
||||
value?: string;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
resource: string;
|
||||
column: string | ColumnReference;
|
||||
definition?: {
|
||||
dataType?: string;
|
||||
constraint?: string;
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
};
|
||||
primary_key?: string;
|
||||
nullable?: { type?: string };
|
||||
unique?: string;
|
||||
default_val?: SQLAstNode;
|
||||
auto_increment?: string;
|
||||
}
|
||||
|
||||
export interface ConstraintDefinition {
|
||||
resource: string;
|
||||
constraint_type: string;
|
||||
constraint_name?: string;
|
||||
definition?: Array<ColumnReference> | { columns?: string[] };
|
||||
columns?: string[];
|
||||
reference_definition?: ReferenceDefinition;
|
||||
reference?: ReferenceDefinition;
|
||||
}
|
||||
|
||||
export interface ReferenceDefinition {
|
||||
table?: string | TableReference | TableReference[];
|
||||
columns?: Array<ColumnReference | string> | string[];
|
||||
definition?: Array<ColumnReference>;
|
||||
on_update?: string;
|
||||
on_delete?: string;
|
||||
}
|
||||
|
||||
export interface CreateTableStatement extends SQLAstNode {
|
||||
table: TableReference | TableReference[];
|
||||
create_definitions?: Array<ColumnDefinition | ConstraintDefinition>;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export interface CreateIndexStatement extends SQLAstNode {
|
||||
table: TableReference | TableReference[] | string;
|
||||
index?: string;
|
||||
index_name?: string;
|
||||
index_type?: string;
|
||||
unique?: boolean;
|
||||
columns?: Array<ColumnReference>;
|
||||
index_columns?: Array<{ column?: ColumnReference } | ColumnReference>;
|
||||
}
|
||||
|
||||
export interface AlterTableConstraintDefinition extends ConstraintDefinition {
|
||||
constraint?: string;
|
||||
}
|
||||
|
||||
export interface AlterTableExprItem {
|
||||
action: string;
|
||||
resource?: string;
|
||||
type?: string;
|
||||
constraint?: { constraint_type?: string };
|
||||
create_definitions?:
|
||||
| AlterTableConstraintDefinition
|
||||
| {
|
||||
constraint_type?: string;
|
||||
definition?: Array<ColumnReference>;
|
||||
constraint?: string;
|
||||
reference_definition?: ReferenceDefinition;
|
||||
resource?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface AlterTableStatement extends SQLAstNode {
|
||||
table: TableReference[] | TableReference | string;
|
||||
expr: AlterTableExprItem[];
|
||||
}
|
||||
|
||||
// Define type for column type arguments
|
||||
export interface TypeArgs {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
}
|
||||
|
||||
// Helper to extract column name from different AST formats
|
||||
export function extractColumnName(
|
||||
columnObj: string | ColumnReference | undefined
|
||||
): string {
|
||||
if (!columnObj) return '';
|
||||
|
||||
// Handle different formats based on actual AST structure
|
||||
if (typeof columnObj === 'string') return columnObj;
|
||||
|
||||
if (typeof columnObj === 'object') {
|
||||
// Direct column property
|
||||
if (columnObj.column) {
|
||||
if (typeof columnObj.column === 'string') return columnObj.column;
|
||||
if (typeof columnObj.column === 'object') {
|
||||
// Handle nested value property
|
||||
if (columnObj.column.value) return columnObj.column.value;
|
||||
// Handle expression property with value
|
||||
if (columnObj.column.expr?.value)
|
||||
return columnObj.column.expr.value;
|
||||
// Handle double_quote_string type which is common in PostgreSQL
|
||||
if (columnObj.column.expr?.type === 'double_quote_string')
|
||||
return columnObj.column.expr.value || '';
|
||||
// Direct access to expr
|
||||
if (columnObj.column.expr?.type === 'default')
|
||||
return columnObj.column.expr.value || '';
|
||||
}
|
||||
}
|
||||
|
||||
// Direct expr property
|
||||
if (columnObj.expr) {
|
||||
if (columnObj.expr.type === 'default')
|
||||
return columnObj.expr.value || '';
|
||||
if (columnObj.expr.type === 'double_quote_string')
|
||||
return columnObj.expr.value || '';
|
||||
if (columnObj.expr.value) return columnObj.expr.value;
|
||||
}
|
||||
|
||||
// Direct value property
|
||||
if (columnObj.value) return columnObj.value;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
// Helper function to extract type arguments from column definition
|
||||
export function getTypeArgs(
|
||||
definition: ColumnDefinition['definition'] | undefined
|
||||
): TypeArgs {
|
||||
const typeArgs: TypeArgs = {};
|
||||
|
||||
if (!definition) return typeArgs;
|
||||
|
||||
if (definition.length !== undefined) {
|
||||
typeArgs.length = definition.length;
|
||||
}
|
||||
|
||||
if (definition.scale !== undefined && definition.precision !== undefined) {
|
||||
typeArgs.precision = definition.precision;
|
||||
typeArgs.scale = definition.scale;
|
||||
}
|
||||
|
||||
return typeArgs;
|
||||
}
|
||||
|
||||
// Helper function to find a table with consistent schema handling
|
||||
export function findTableWithSchemaSupport(
|
||||
tables: TableLike[],
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): TableLike | undefined {
|
||||
// Default to public schema if none provided
|
||||
const effectiveSchema = schemaName || 'public';
|
||||
|
||||
// First try with exact schema match
|
||||
let table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === effectiveSchema
|
||||
);
|
||||
|
||||
// If not found with schema, try with the legacy schema match
|
||||
if (!table && schemaName) {
|
||||
table = tables.find(
|
||||
(t) => t.name === tableName && t.schema === schemaName
|
||||
);
|
||||
}
|
||||
|
||||
// If still not found with schema, try any match on the table name
|
||||
if (!table) {
|
||||
table = tables.find((t) => t.name === tableName);
|
||||
if (table) {
|
||||
console.log(
|
||||
`Found table ${tableName} without schema match, source schema: ${effectiveSchema}, table schema: ${table.schema}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
// Helper function to find table ID with schema support
|
||||
export function getTableIdWithSchemaSupport(
|
||||
tableMap: Record<string, string>,
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): string | undefined {
|
||||
// Default to public schema if none provided
|
||||
const effectiveSchema = schemaName || 'public';
|
||||
|
||||
// First try with schema
|
||||
const tableKey = `${effectiveSchema}.${tableName}`;
|
||||
let tableId = tableMap[tableKey];
|
||||
|
||||
// If not found with the effective schema, try with the original schema if different
|
||||
if (!tableId && schemaName && schemaName !== effectiveSchema) {
|
||||
const originalSchemaKey = `${schemaName}.${tableName}`;
|
||||
tableId = tableMap[originalSchemaKey];
|
||||
}
|
||||
|
||||
// If still not found with schema, try without schema
|
||||
if (!tableId) {
|
||||
tableId = tableMap[tableName];
|
||||
if (tableId) {
|
||||
console.log(
|
||||
`Found table ID for ${tableName} without schema match, source schema: ${effectiveSchema}`
|
||||
);
|
||||
} else {
|
||||
console.warn(
|
||||
`No table ID found for ${tableName} with schema ${effectiveSchema}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return tableId;
|
||||
}
|
||||
@@ -0,0 +1,781 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type {
|
||||
SQLParserResult,
|
||||
SQLTable,
|
||||
SQLColumn,
|
||||
SQLIndex,
|
||||
SQLForeignKey,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import type {
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
TableReference,
|
||||
} from './postgresql-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
} from './postgresql-common';
|
||||
|
||||
// Helper to extract statements from PostgreSQL dump
|
||||
function extractStatements(sqlContent: string): string[] {
|
||||
const statements: string[] = [];
|
||||
let currentStatement = '';
|
||||
const lines = sqlContent.split('\n');
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
|
||||
// Skip comments and empty lines
|
||||
if (line.startsWith('--') || line === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add line to current statement
|
||||
currentStatement += line + ' ';
|
||||
|
||||
// If line ends with semicolon, consider statement complete
|
||||
if (line.endsWith(';')) {
|
||||
statements.push(currentStatement.trim());
|
||||
currentStatement = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Handle any remaining statement
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
|
||||
return statements;
|
||||
}
|
||||
|
||||
// Process PostgreSQL pg_dump foreign key constraints
|
||||
function processForeignKeyConstraint(
|
||||
statement: string,
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
// Only process statements that look like foreign key constraints
|
||||
if (
|
||||
!statement.includes('ADD CONSTRAINT') ||
|
||||
!statement.includes('FOREIGN KEY') ||
|
||||
!statement.includes('REFERENCES')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Extract source table info - find between ALTER TABLE and ADD CONSTRAINT
|
||||
// This regex handles:
|
||||
// - ALTER TABLE ONLY schema.table
|
||||
// - ALTER TABLE ONLY "schema"."table"
|
||||
// - ALTER TABLE schema.table
|
||||
// - ALTER TABLE "schema"."table"
|
||||
const tableRegex =
|
||||
/ALTER TABLE(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract source schema and table name
|
||||
const sourceSchema = tableMatch[1] || '';
|
||||
const sourceTable = tableMatch[2];
|
||||
|
||||
// Find constraint name
|
||||
const constraintRegex = /ADD CONSTRAINT\s+"?([^"\s]+)"?\s+FOREIGN KEY/i;
|
||||
const constraintMatch = statement.match(constraintRegex);
|
||||
const constraintName = constraintMatch ? constraintMatch[1] : '';
|
||||
|
||||
// Extract source columns - handles either quoted or unquoted column names
|
||||
// This regex captures columns in format: FOREIGN KEY (col1, col2, ...)
|
||||
const sourceColRegex = /FOREIGN KEY\s+\(\s*([^)]+)\)/i;
|
||||
const sourceColMatch = statement.match(sourceColRegex);
|
||||
|
||||
if (!sourceColMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the captured group to extract all columns
|
||||
const sourceColumnsPart = sourceColMatch[1];
|
||||
const sourceColumns = sourceColumnsPart.split(',').map((col) =>
|
||||
col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s]+)"?\s*$/, '$1')
|
||||
);
|
||||
|
||||
// Extract target table and columns
|
||||
// This regex handles: REFERENCES schema.table (col1, col2, ...)
|
||||
const targetRegex =
|
||||
/REFERENCES\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)\s*\(\s*([^)]+)\)/i;
|
||||
const targetMatch = statement.match(targetRegex);
|
||||
|
||||
if (!targetMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract target schema, table and columns
|
||||
const targetSchema = targetMatch[1] || '';
|
||||
const targetTable = targetMatch[2];
|
||||
|
||||
// Parse the captured group to extract all target columns
|
||||
const targetColumnsPart = targetMatch[3];
|
||||
const targetColumns = targetColumnsPart.split(',').map((col) =>
|
||||
col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s]+)"?\s*$/, '$1')
|
||||
);
|
||||
|
||||
// Extract ON DELETE and ON UPDATE actions
|
||||
const deleteActionRegex = /ON DELETE\s+([A-Z\s]+?)(?:\s+ON|;|\s*$)/i;
|
||||
const deleteActionMatch = statement.match(deleteActionRegex);
|
||||
const deleteAction = deleteActionMatch
|
||||
? deleteActionMatch[1].trim()
|
||||
: undefined;
|
||||
|
||||
const updateActionRegex = /ON UPDATE\s+([A-Z\s]+?)(?:\s+ON|;|\s*$)/i;
|
||||
const updateActionMatch = statement.match(updateActionRegex);
|
||||
const updateAction = updateActionMatch
|
||||
? updateActionMatch[1].trim()
|
||||
: undefined;
|
||||
|
||||
// Look up table IDs
|
||||
const sourceTableKey = `${sourceSchema ? sourceSchema + '.' : ''}${sourceTable}`;
|
||||
const sourceTableId = tableMap[sourceTableKey];
|
||||
|
||||
const targetTableKey = `${targetSchema ? targetSchema + '.' : ''}${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
|
||||
if (!sourceTableId || !targetTableId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create relationships for each column pair
|
||||
for (
|
||||
let i = 0;
|
||||
i < Math.min(sourceColumns.length, targetColumns.length);
|
||||
i++
|
||||
) {
|
||||
const relationship: SQLForeignKey = {
|
||||
name:
|
||||
constraintName || `${sourceTable}_${sourceColumns[i]}_fkey`,
|
||||
sourceTable,
|
||||
sourceSchema,
|
||||
sourceColumn: sourceColumns[i],
|
||||
targetTable,
|
||||
targetSchema,
|
||||
targetColumn: targetColumns[i],
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
updateAction,
|
||||
deleteAction,
|
||||
};
|
||||
|
||||
relationships.push(relationship);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing foreign key constraint:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Function to extract columns from a CREATE TABLE statement using regex
|
||||
function extractColumnsFromCreateTable(statement: string): SQLColumn[] {
|
||||
const columns: SQLColumn[] = [];
|
||||
|
||||
// Extract everything between the first opening and last closing parenthesis
|
||||
const columnMatch = statement.match(/CREATE\s+TABLE.*?\((.*)\)[^)]*;$/s);
|
||||
if (!columnMatch || !columnMatch[1]) {
|
||||
return columns;
|
||||
}
|
||||
|
||||
const columnDefs = columnMatch[1].trim();
|
||||
// Split by commas, but not those within parentheses (for nested type definitions)
|
||||
const columnLines = columnDefs.split(/,(?![^(]*\))/);
|
||||
|
||||
for (const columnLine of columnLines) {
|
||||
const line = columnLine.trim();
|
||||
// Skip constraints at the table level
|
||||
if (
|
||||
line.toUpperCase().startsWith('CONSTRAINT') ||
|
||||
line.toUpperCase().startsWith('PRIMARY KEY') ||
|
||||
line.toUpperCase().startsWith('FOREIGN KEY') ||
|
||||
line.toUpperCase().startsWith('UNIQUE')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract column name and definition
|
||||
const columnNameMatch = line.match(/^"?([^"\s]+)"?\s+(.+)$/);
|
||||
if (columnNameMatch) {
|
||||
const columnName = columnNameMatch[1];
|
||||
const definition = columnNameMatch[2];
|
||||
|
||||
// Determine if column is nullable
|
||||
const nullable = !definition.toUpperCase().includes('NOT NULL');
|
||||
|
||||
// Determine if column is primary key
|
||||
const primaryKey = definition.toUpperCase().includes('PRIMARY KEY');
|
||||
|
||||
// Extract data type
|
||||
const typeMatch = definition.match(/^([^\s(]+)(?:\(([^)]+)\))?/);
|
||||
const dataType = typeMatch ? typeMatch[1] : '';
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable,
|
||||
primaryKey,
|
||||
unique: definition.toUpperCase().includes('UNIQUE'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return columns;
|
||||
}
|
||||
|
||||
// Process PostgreSQL pg_dump primary key constraints
|
||||
function processPrimaryKeyConstraint(
|
||||
statement: string,
|
||||
tableMap: Record<string, string>,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
// Only process statements that look like primary key constraints
|
||||
if (
|
||||
!statement.includes('ADD CONSTRAINT') ||
|
||||
!statement.includes('PRIMARY KEY')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Extract source table info - similar pattern as FK extraction
|
||||
const tableRegex =
|
||||
/ALTER TABLE(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract source schema and table name
|
||||
const sourceSchema = tableMatch[1] || '';
|
||||
const sourceTable = tableMatch[2];
|
||||
|
||||
// Extract primary key columns
|
||||
const pkColRegex = /PRIMARY KEY\s+\(\s*([^)]+)\)/i;
|
||||
const pkColMatch = statement.match(pkColRegex);
|
||||
|
||||
if (!pkColMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the captured group to extract all columns
|
||||
const pkColumnsPart = pkColMatch[1];
|
||||
const pkColumns = pkColumnsPart.split(',').map((col) =>
|
||||
col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s]+)"?\s*$/, '$1')
|
||||
);
|
||||
|
||||
// Find the table in our collection
|
||||
const tableKey = `${sourceSchema ? sourceSchema + '.' : ''}${sourceTable}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
if (!tableId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the table in our tables array
|
||||
const table = tables.find((t) => t.id === tableId);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark columns as primary key
|
||||
pkColumns.forEach((colName) => {
|
||||
const column = table.columns.find((c) => c.name === colName);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
});
|
||||
|
||||
// Add a primary key index if it doesn't exist
|
||||
if (pkColumns.length > 0) {
|
||||
const existingPkIndex = table.indexes.find(
|
||||
(idx) =>
|
||||
idx.unique &&
|
||||
idx.columns.length === pkColumns.length &&
|
||||
idx.columns.every((col, i) => col === pkColumns[i])
|
||||
);
|
||||
|
||||
if (!existingPkIndex) {
|
||||
const pkIndexName =
|
||||
statement.match(
|
||||
/ADD CONSTRAINT\s+"?([^"\s]+)"?\s+PRIMARY KEY/i
|
||||
)?.[1] || `pk_${sourceTable}`;
|
||||
|
||||
table.indexes.push({
|
||||
name: pkIndexName,
|
||||
columns: pkColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing primary key constraint:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Process PostgreSQL pg_dump unique constraints
|
||||
function processUniqueConstraint(
|
||||
statement: string,
|
||||
tableMap: Record<string, string>,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
// Only process statements that look like unique constraints
|
||||
if (
|
||||
!statement.includes('ADD CONSTRAINT') ||
|
||||
!statement.includes('UNIQUE')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Extract source table info - similar pattern as other constraints
|
||||
const tableRegex =
|
||||
/ALTER TABLE(?:\s+ONLY)?\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract source schema and table name
|
||||
const sourceSchema = tableMatch[1] || '';
|
||||
const sourceTable = tableMatch[2];
|
||||
|
||||
// Extract constraint name
|
||||
const constraintNameRegex = /ADD CONSTRAINT\s+"?([^"\s]+)"?\s+UNIQUE/i;
|
||||
const constraintNameMatch = statement.match(constraintNameRegex);
|
||||
const constraintName = constraintNameMatch
|
||||
? constraintNameMatch[1]
|
||||
: `unique_${sourceTable}`;
|
||||
|
||||
// Extract unique columns
|
||||
const uniqueColRegex = /UNIQUE\s+\(\s*([^)]+)\)/i;
|
||||
const uniqueColMatch = statement.match(uniqueColRegex);
|
||||
|
||||
if (!uniqueColMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the captured group to extract all columns
|
||||
const uniqueColumnsPart = uniqueColMatch[1];
|
||||
const uniqueColumns = uniqueColumnsPart.split(',').map((col) =>
|
||||
col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s]+)"?\s*$/, '$1')
|
||||
);
|
||||
|
||||
// Find the table in our collection
|
||||
const tableKey = `${sourceSchema ? sourceSchema + '.' : ''}${sourceTable}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
if (!tableId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the table in our tables array
|
||||
const table = tables.find((t) => t.id === tableId);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark columns as unique if it's a single column constraint
|
||||
if (uniqueColumns.length === 1) {
|
||||
const column = table.columns.find(
|
||||
(c) => c.name === uniqueColumns[0]
|
||||
);
|
||||
if (column) {
|
||||
column.unique = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Add a unique index if it doesn't exist
|
||||
if (uniqueColumns.length > 0) {
|
||||
const existingUniqueIndex = table.indexes.find(
|
||||
(idx) =>
|
||||
idx.unique &&
|
||||
idx.columns.length === uniqueColumns.length &&
|
||||
idx.columns.every((col, i) => col === uniqueColumns[i])
|
||||
);
|
||||
|
||||
if (!existingUniqueIndex) {
|
||||
table.indexes.push({
|
||||
name: constraintName,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing unique constraint:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Process PostgreSQL pg_dump CREATE INDEX statements
|
||||
function processCreateIndexStatement(
|
||||
statement: string,
|
||||
tableMap: Record<string, string>,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
if (
|
||||
!statement.startsWith('CREATE INDEX') &&
|
||||
!statement.startsWith('CREATE UNIQUE INDEX')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Determine if the index is unique
|
||||
const isUnique = statement.startsWith('CREATE UNIQUE INDEX');
|
||||
|
||||
// Extract index name
|
||||
const indexNameRegex = /CREATE (?:UNIQUE )?INDEX\s+"?([^"\s]+)"?/i;
|
||||
const indexNameMatch = statement.match(indexNameRegex);
|
||||
const indexName = indexNameMatch ? indexNameMatch[1] : '';
|
||||
|
||||
if (!indexName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract table name and schema
|
||||
const tableRegex = /ON\s+(?:"?([^"\s.]+)"?\.)?(?:"?([^"\s.(]+)"?)/i;
|
||||
const tableMatch = statement.match(tableRegex);
|
||||
|
||||
if (!tableMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tableSchema = tableMatch[1] || '';
|
||||
const tableName = tableMatch[2];
|
||||
|
||||
// Extract index columns
|
||||
const columnsRegex = /\(\s*([^)]+)\)/i;
|
||||
const columnsMatch = statement.match(columnsRegex);
|
||||
|
||||
if (!columnsMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse columns (handle function-based indexes, etc.)
|
||||
const columnsStr = columnsMatch[1];
|
||||
// This is a simplified approach - advanced indexes may need more complex parsing
|
||||
const indexColumns = columnsStr.split(',').map((col) => {
|
||||
// Extract basic column name, handling possible expressions
|
||||
const colName = col
|
||||
.trim()
|
||||
.replace(/^"(.*)"$/, '$1')
|
||||
.replace(/^\s*"?([^"\s(]+)"?\s*.*$/, '$1'); // Get just the column name part
|
||||
return colName;
|
||||
});
|
||||
|
||||
if (indexColumns.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the table
|
||||
const tableKey = `${tableSchema ? tableSchema + '.' : ''}${tableName}`;
|
||||
const tableId = tableMap[tableKey];
|
||||
|
||||
if (!tableId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const table = tables.find((t) => t.id === tableId);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if a similar index already exists (to avoid duplicates)
|
||||
const existingIndex = table.indexes.find(
|
||||
(idx) =>
|
||||
idx.name === indexName ||
|
||||
(idx.columns.length === indexColumns.length &&
|
||||
idx.columns.every((col, i) => col === indexColumns[i]))
|
||||
);
|
||||
|
||||
if (!existingIndex) {
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns: indexColumns,
|
||||
unique: isUnique,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing CREATE INDEX statement:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// PostgreSQL dump-specific parsing logic - optimized for pg_dump output format
|
||||
export async function fromPostgresDump(
|
||||
sqlContent: string
|
||||
): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
// Extract statements for different types to process in the correct order
|
||||
const alterTableStatements: string[] = [];
|
||||
const createTableStatements: string[] = [];
|
||||
const createIndexStatements: string[] = [];
|
||||
|
||||
// Split SQL dump into statements
|
||||
const statements = extractStatements(sqlContent);
|
||||
|
||||
for (const statement of statements) {
|
||||
if (statement.trim().startsWith('CREATE TABLE')) {
|
||||
createTableStatements.push(statement);
|
||||
} else if (statement.trim().startsWith('CREATE INDEX')) {
|
||||
createIndexStatements.push(statement);
|
||||
} else if (statement.trim().startsWith('ALTER TABLE')) {
|
||||
alterTableStatements.push(statement);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Phase 1: Process CREATE TABLE statements individually
|
||||
for (const statement of createTableStatements) {
|
||||
try {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
// Parse just this statement with the SQL parser
|
||||
const ast = parser.astify(statement, parserOpts);
|
||||
if (Array.isArray(ast) && ast.length > 0) {
|
||||
const createTableStmt = ast[0] as CreateTableStatement;
|
||||
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (
|
||||
createTableStmt.table &&
|
||||
typeof createTableStmt.table === 'object'
|
||||
) {
|
||||
// Handle array of tables if needed
|
||||
if (
|
||||
Array.isArray(createTableStmt.table) &&
|
||||
createTableStmt.table.length > 0
|
||||
) {
|
||||
const tableObj = createTableStmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj =
|
||||
createTableStmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
// Try to extract table name using regex for cases where the parser might fail
|
||||
const tableNameMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+(?:ONLY\s+)?(?:(?:"?([^"\s.]+)"?\.)?"?([^"\s.(]+)"?)/i
|
||||
);
|
||||
if (tableNameMatch) {
|
||||
schemaName = tableNameMatch[1] || '';
|
||||
tableName = tableNameMatch[2];
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If schema is not in the AST, try to extract it from the SQL
|
||||
if (!schemaName) {
|
||||
// Look for schema in CREATE TABLE statement: CREATE TABLE schema.table (
|
||||
const schemaMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+(?:ONLY\s+)?(?:"?([^"\s.]+)"?\.)/i
|
||||
);
|
||||
if (schemaMatch && schemaMatch[1]) {
|
||||
schemaName = schemaMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Process table columns
|
||||
let columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
// Try to extract columns from AST first
|
||||
let columnsFromAst = false;
|
||||
if (
|
||||
createTableStmt.create_definitions &&
|
||||
Array.isArray(createTableStmt.create_definitions)
|
||||
) {
|
||||
createTableStmt.create_definitions.forEach(
|
||||
(def: ColumnDefinition | ConstraintDefinition) => {
|
||||
if (def.resource === 'column') {
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(
|
||||
columnDef.column
|
||||
);
|
||||
const dataType =
|
||||
columnDef.definition?.dataType || '';
|
||||
|
||||
if (columnName) {
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key ===
|
||||
'primary key' ||
|
||||
columnDef.definition?.constraint ===
|
||||
'primary key';
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable:
|
||||
columnDef.nullable?.type !==
|
||||
'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique:
|
||||
columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(
|
||||
columnDef.definition
|
||||
),
|
||||
default: columnDef.default_val
|
||||
? buildSQLFromAST(
|
||||
columnDef.default_val
|
||||
)
|
||||
: undefined,
|
||||
increment:
|
||||
columnDef.auto_increment ===
|
||||
'auto_increment',
|
||||
});
|
||||
columnsFromAst = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// If we couldn't extract columns from AST, try regex approach
|
||||
if (!columnsFromAst || columns.length === 0) {
|
||||
columns = extractColumnsFromCreateTable(statement);
|
||||
}
|
||||
|
||||
// Create and add the table object
|
||||
const table: SQLTable = {
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
};
|
||||
|
||||
// Set comment if available
|
||||
if (
|
||||
'comment' in createTableStmt &&
|
||||
typeof createTableStmt.comment === 'string'
|
||||
) {
|
||||
table.comment = createTableStmt.comment;
|
||||
}
|
||||
|
||||
tables.push(table);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error parsing CREATE TABLE statement:', error);
|
||||
|
||||
// Fallback: extract table and columns using regex
|
||||
try {
|
||||
const tableNameMatch = statement.match(
|
||||
/CREATE\s+TABLE\s+(?:ONLY\s+)?(?:(?:"?([^"\s.]+)"?\.)?"?([^"\s.(]+)"?)/i
|
||||
);
|
||||
if (tableNameMatch) {
|
||||
const schemaName = tableNameMatch[1] || '';
|
||||
const tableName = tableNameMatch[2];
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Extract columns using regex
|
||||
const columns =
|
||||
extractColumnsFromCreateTable(statement);
|
||||
|
||||
// Create and add the table object
|
||||
const table: SQLTable = {
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes: [],
|
||||
order: tables.length,
|
||||
};
|
||||
|
||||
tables.push(table);
|
||||
}
|
||||
} catch (fallbackError) {
|
||||
console.error(
|
||||
'Fallback extraction also failed:',
|
||||
fallbackError
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: Process CREATE INDEX statements
|
||||
for (const statement of createIndexStatements) {
|
||||
processCreateIndexStatement(statement, tableMap, tables);
|
||||
}
|
||||
|
||||
// Phase 3: First process PRIMARY KEY constraints
|
||||
for (const statement of alterTableStatements) {
|
||||
if (statement.includes('PRIMARY KEY')) {
|
||||
processPrimaryKeyConstraint(statement, tableMap, tables);
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3.5: Then process UNIQUE constraints
|
||||
for (const statement of alterTableStatements) {
|
||||
if (
|
||||
statement.includes('UNIQUE') &&
|
||||
!statement.includes('PRIMARY KEY')
|
||||
) {
|
||||
processUniqueConstraint(statement, tableMap, tables);
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 4: Then process FOREIGN KEY constraints
|
||||
for (const statement of alterTableStatements) {
|
||||
if (statement.includes('FOREIGN KEY')) {
|
||||
processForeignKeyConstraint(statement, tableMap, relationships);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out relationships with missing IDs
|
||||
const validRelationships = relationships.filter(
|
||||
(rel) => rel.sourceTableId && rel.targetTableId
|
||||
);
|
||||
|
||||
return { tables, relationships: validRelationships };
|
||||
} catch (error: unknown) {
|
||||
console.error('Error in PostgreSQL dump parser:', error);
|
||||
throw new Error(
|
||||
`Error parsing PostgreSQL dump: ${(error as Error).message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,881 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type {
|
||||
SQLParserResult,
|
||||
SQLTable,
|
||||
SQLColumn,
|
||||
SQLIndex,
|
||||
SQLForeignKey,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import type {
|
||||
SQLAstNode,
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
CreateIndexStatement,
|
||||
AlterTableExprItem,
|
||||
AlterTableStatement,
|
||||
} from './postgresql-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
findTableWithSchemaSupport,
|
||||
getTableIdWithSchemaSupport,
|
||||
} from './postgresql-common';
|
||||
|
||||
// PostgreSQL-specific parsing logic
|
||||
export async function fromPostgres(
|
||||
sqlContent: string
|
||||
): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
// Parse the SQL DDL statements
|
||||
const ast = parser.astify(sqlContent, parserOpts);
|
||||
|
||||
if (!Array.isArray(ast)) {
|
||||
throw new Error('Failed to parse SQL DDL - AST is not an array');
|
||||
}
|
||||
|
||||
// Process each CREATE TABLE statement
|
||||
ast.forEach((stmt: SQLAstNode) => {
|
||||
if (stmt.type === 'create' && stmt.keyword === 'table') {
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
const createTableStmt = stmt as CreateTableStatement;
|
||||
|
||||
if (
|
||||
createTableStmt.table &&
|
||||
typeof createTableStmt.table === 'object'
|
||||
) {
|
||||
// Handle array of tables if needed
|
||||
if (
|
||||
Array.isArray(createTableStmt.table) &&
|
||||
createTableStmt.table.length > 0
|
||||
) {
|
||||
const tableObj = createTableStmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
// Check for schema in both 'schema' and 'db' fields
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj =
|
||||
createTableStmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
// Check for schema in both 'schema' and 'db' fields
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if tableName contains a schema prefix (schema.table)
|
||||
if (!schemaName && tableName.includes('.')) {
|
||||
const parts = tableName.split('.');
|
||||
schemaName = parts[0].replace(/"/g, '');
|
||||
tableName = parts[1].replace(/"/g, '');
|
||||
}
|
||||
|
||||
// If still no schema, ensure default schema is set to public
|
||||
if (!schemaName) {
|
||||
schemaName = 'public';
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Process table columns
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
// Debugged from actual parse output - handle different structure formats
|
||||
if (
|
||||
createTableStmt.create_definitions &&
|
||||
Array.isArray(createTableStmt.create_definitions)
|
||||
) {
|
||||
createTableStmt.create_definitions.forEach(
|
||||
(def: ColumnDefinition | ConstraintDefinition) => {
|
||||
// Process column definition
|
||||
if (def.resource === 'column') {
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(
|
||||
columnDef.column
|
||||
);
|
||||
const dataType =
|
||||
columnDef.definition?.dataType || '';
|
||||
|
||||
// Handle the column definition and add to columns array
|
||||
if (columnName) {
|
||||
// Check if the column has a PRIMARY KEY constraint inline
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key ===
|
||||
'primary key' ||
|
||||
columnDef.definition?.constraint ===
|
||||
'primary key';
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
nullable:
|
||||
columnDef.nullable?.type !==
|
||||
'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(
|
||||
columnDef.definition
|
||||
),
|
||||
default: columnDef.default_val
|
||||
? buildSQLFromAST(
|
||||
columnDef.default_val
|
||||
)
|
||||
: undefined,
|
||||
increment:
|
||||
columnDef.auto_increment ===
|
||||
'auto_increment',
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
// Handle constraint definitions
|
||||
const constraintDef =
|
||||
def as ConstraintDefinition;
|
||||
if (
|
||||
constraintDef.constraint_type ===
|
||||
'primary key'
|
||||
) {
|
||||
// Check if definition is an array (standalone PRIMARY KEY constraint)
|
||||
if (
|
||||
Array.isArray(constraintDef.definition)
|
||||
) {
|
||||
// Extract column names from the constraint definition
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef &&
|
||||
colDef.column
|
||||
) {
|
||||
const pkColumnName =
|
||||
extractColumnName(colDef);
|
||||
|
||||
// Find and mark the column as primary key
|
||||
const column = columns.find(
|
||||
(col) =>
|
||||
col.name ===
|
||||
pkColumnName
|
||||
);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add a primary key index
|
||||
const pkColumnNames =
|
||||
constraintDef.definition
|
||||
.filter(
|
||||
(colDef: ColumnReference) =>
|
||||
typeof colDef ===
|
||||
'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type ===
|
||||
'column_ref' &&
|
||||
'column' in colDef &&
|
||||
colDef.column
|
||||
)
|
||||
.map(
|
||||
(colDef: ColumnReference) =>
|
||||
extractColumnName(
|
||||
colDef
|
||||
)
|
||||
);
|
||||
|
||||
if (pkColumnNames.length > 0) {
|
||||
indexes.push({
|
||||
name: `pk_${tableName}`,
|
||||
columns: pkColumnNames,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
} else if (
|
||||
constraintDef.definition &&
|
||||
typeof constraintDef.definition ===
|
||||
'object' &&
|
||||
!Array.isArray(
|
||||
constraintDef.definition
|
||||
) &&
|
||||
'columns' in constraintDef.definition
|
||||
) {
|
||||
// Handle different format where columns are in def.definition.columns
|
||||
const colDefs =
|
||||
constraintDef.definition.columns ||
|
||||
[];
|
||||
for (const colName of colDefs) {
|
||||
// Find and mark the column as primary key
|
||||
const column = columns.find(
|
||||
(col) => col.name === colName
|
||||
);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Add a primary key index
|
||||
if (colDefs.length > 0) {
|
||||
indexes.push({
|
||||
name: `pk_${tableName}`,
|
||||
columns: colDefs,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
constraintDef.constraint_type ===
|
||||
'unique' &&
|
||||
constraintDef.definition &&
|
||||
typeof constraintDef.definition ===
|
||||
'object' &&
|
||||
!Array.isArray(constraintDef.definition) &&
|
||||
'columns' in constraintDef.definition
|
||||
) {
|
||||
// Handle unique constraint
|
||||
const columnDefs =
|
||||
constraintDef.definition.columns || [];
|
||||
columnDefs.forEach(
|
||||
(
|
||||
uniqueCol: string | ColumnReference
|
||||
) => {
|
||||
const colName =
|
||||
typeof uniqueCol === 'string'
|
||||
? uniqueCol
|
||||
: extractColumnName(
|
||||
uniqueCol
|
||||
);
|
||||
const col = columns.find(
|
||||
(c) => c.name === colName
|
||||
);
|
||||
if (col) {
|
||||
col.unique = true;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Add as a unique index
|
||||
if (columnDefs.length > 0) {
|
||||
indexes.push({
|
||||
name:
|
||||
constraintDef.constraint_name ||
|
||||
`${tableName}_${
|
||||
typeof columnDefs[0] ===
|
||||
'string'
|
||||
? columnDefs[0]
|
||||
: extractColumnName(
|
||||
columnDefs[0] as ColumnReference
|
||||
)
|
||||
}_key`,
|
||||
columns: columnDefs.map(
|
||||
(
|
||||
col:
|
||||
| string
|
||||
| ColumnReference
|
||||
) =>
|
||||
typeof col === 'string'
|
||||
? col
|
||||
: extractColumnName(col)
|
||||
),
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
} else if (
|
||||
constraintDef.constraint_type ===
|
||||
'foreign key' ||
|
||||
constraintDef.constraint_type ===
|
||||
'FOREIGN KEY'
|
||||
) {
|
||||
// Handle foreign key directly at this level
|
||||
|
||||
// Extra code for this specific format
|
||||
let sourceColumns: string[] = [];
|
||||
if (
|
||||
constraintDef.definition &&
|
||||
Array.isArray(constraintDef.definition)
|
||||
) {
|
||||
sourceColumns =
|
||||
constraintDef.definition.map(
|
||||
(col: ColumnReference) => {
|
||||
const colName =
|
||||
extractColumnName(col);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
} else if (
|
||||
constraintDef.columns &&
|
||||
Array.isArray(constraintDef.columns)
|
||||
) {
|
||||
sourceColumns =
|
||||
constraintDef.columns.map(
|
||||
(
|
||||
col:
|
||||
| string
|
||||
| ColumnReference
|
||||
) => {
|
||||
const colName =
|
||||
typeof col === 'string'
|
||||
? col
|
||||
: extractColumnName(
|
||||
col
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const reference =
|
||||
constraintDef.reference_definition ||
|
||||
constraintDef.reference;
|
||||
if (reference && sourceColumns.length > 0) {
|
||||
// Process similar to the constraint resource case
|
||||
let targetTable = '';
|
||||
let targetSchema = '';
|
||||
|
||||
if (reference.table) {
|
||||
if (
|
||||
typeof reference.table ===
|
||||
'object'
|
||||
) {
|
||||
if (
|
||||
Array.isArray(
|
||||
reference.table
|
||||
) &&
|
||||
reference.table.length > 0
|
||||
) {
|
||||
targetTable =
|
||||
reference.table[0]
|
||||
.table || '';
|
||||
targetSchema =
|
||||
reference.table[0]
|
||||
.schema ||
|
||||
reference.table[0].db ||
|
||||
'';
|
||||
} else {
|
||||
const tableRef =
|
||||
reference.table as TableReference;
|
||||
targetTable =
|
||||
tableRef.table || '';
|
||||
targetSchema =
|
||||
tableRef.schema ||
|
||||
tableRef.db ||
|
||||
'';
|
||||
}
|
||||
} else {
|
||||
targetTable =
|
||||
reference.table as string;
|
||||
|
||||
// Check if targetTable contains a schema prefix (schema.table)
|
||||
if (targetTable.includes('.')) {
|
||||
const parts =
|
||||
targetTable.split('.');
|
||||
targetSchema =
|
||||
parts[0].replace(
|
||||
/"/g,
|
||||
''
|
||||
);
|
||||
targetTable =
|
||||
parts[1].replace(
|
||||
/"/g,
|
||||
''
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no target schema was found, use default public schema
|
||||
if (!targetSchema) {
|
||||
targetSchema = 'public';
|
||||
}
|
||||
|
||||
let targetColumns: string[] = [];
|
||||
if (
|
||||
reference.columns &&
|
||||
Array.isArray(reference.columns)
|
||||
) {
|
||||
targetColumns =
|
||||
reference.columns.map(
|
||||
(
|
||||
col:
|
||||
| string
|
||||
| ColumnReference
|
||||
) => {
|
||||
const colName =
|
||||
typeof col ===
|
||||
'string'
|
||||
? col
|
||||
: extractColumnName(
|
||||
col
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
} else if (
|
||||
reference.definition &&
|
||||
Array.isArray(reference.definition)
|
||||
) {
|
||||
targetColumns =
|
||||
reference.definition.map(
|
||||
(col: ColumnReference) => {
|
||||
const colName =
|
||||
extractColumnName(
|
||||
col
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create relationships
|
||||
if (
|
||||
targetColumns.length > 0 &&
|
||||
targetTable
|
||||
) {
|
||||
for (
|
||||
let i = 0;
|
||||
i <
|
||||
Math.min(
|
||||
sourceColumns.length,
|
||||
targetColumns.length
|
||||
);
|
||||
i++
|
||||
) {
|
||||
// Look up target table ID using the helper function
|
||||
const targetTableId =
|
||||
getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
targetTable,
|
||||
targetSchema
|
||||
);
|
||||
|
||||
if (!targetTableId) {
|
||||
continue; // Skip this relationship if target table not found
|
||||
}
|
||||
|
||||
const fk: SQLForeignKey = {
|
||||
name:
|
||||
constraintDef.constraint_name ||
|
||||
`${tableName}_${sourceColumns[i]}_fkey`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn:
|
||||
sourceColumns[i],
|
||||
targetTable,
|
||||
targetSchema,
|
||||
targetColumn:
|
||||
targetColumns[i],
|
||||
sourceTableId: tableId,
|
||||
targetTableId,
|
||||
updateAction:
|
||||
reference.on_update,
|
||||
deleteAction:
|
||||
reference.on_delete,
|
||||
};
|
||||
|
||||
relationships.push(fk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create the table object
|
||||
const table: SQLTable = {
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
};
|
||||
|
||||
// Set comment if available (if exists in the parser's output)
|
||||
if (
|
||||
'comment' in createTableStmt &&
|
||||
typeof createTableStmt.comment === 'string'
|
||||
) {
|
||||
table.comment = createTableStmt.comment;
|
||||
}
|
||||
|
||||
tables.push(table);
|
||||
} else if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
// Handle CREATE INDEX statements
|
||||
const createIndexStmt = stmt as CreateIndexStatement;
|
||||
if (createIndexStmt.table) {
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (typeof createIndexStmt.table === 'string') {
|
||||
tableName = createIndexStmt.table;
|
||||
} else if (Array.isArray(createIndexStmt.table)) {
|
||||
if (createIndexStmt.table.length > 0) {
|
||||
tableName = createIndexStmt.table[0].table || '';
|
||||
schemaName = createIndexStmt.table[0].schema || '';
|
||||
}
|
||||
} else {
|
||||
// Direct object reference
|
||||
tableName = createIndexStmt.table.table || '';
|
||||
schemaName = createIndexStmt.table.schema || '';
|
||||
}
|
||||
|
||||
// Check if tableName contains a schema prefix (schema.table)
|
||||
if (!schemaName && tableName.includes('.')) {
|
||||
const parts = tableName.split('.');
|
||||
schemaName = parts[0].replace(/"/g, '');
|
||||
tableName = parts[1].replace(/"/g, '');
|
||||
}
|
||||
|
||||
// If still no schema, use public
|
||||
if (!schemaName) {
|
||||
schemaName = 'public';
|
||||
}
|
||||
|
||||
// Find the table in our collection using the helper function
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
|
||||
if (table) {
|
||||
// Extract column names from index columns
|
||||
let columns: string[] = [];
|
||||
|
||||
// Check different possible structures for index columns
|
||||
if (
|
||||
createIndexStmt.columns &&
|
||||
Array.isArray(createIndexStmt.columns)
|
||||
) {
|
||||
// Some PostgreSQL parsers use 'columns'
|
||||
columns = createIndexStmt.columns
|
||||
.map((col: ColumnReference) =>
|
||||
extractColumnName(col)
|
||||
)
|
||||
.filter((col: string) => col !== '');
|
||||
} else if (
|
||||
createIndexStmt.index_columns &&
|
||||
Array.isArray(createIndexStmt.index_columns)
|
||||
) {
|
||||
// Other parsers use 'index_columns'
|
||||
columns = createIndexStmt.index_columns
|
||||
.map(
|
||||
(
|
||||
col:
|
||||
| { column?: ColumnReference }
|
||||
| ColumnReference
|
||||
) => {
|
||||
const colRef =
|
||||
'column' in col ? col.column : col;
|
||||
const colName = extractColumnName(
|
||||
colRef || col
|
||||
);
|
||||
return colName;
|
||||
}
|
||||
)
|
||||
.filter((col: string) => col !== '');
|
||||
}
|
||||
|
||||
if (columns.length > 0) {
|
||||
const indexName =
|
||||
createIndexStmt.index ||
|
||||
createIndexStmt.index_name ||
|
||||
`idx_${tableName}_${columns.join('_')}`;
|
||||
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns,
|
||||
unique:
|
||||
createIndexStmt.index_type === 'unique' ||
|
||||
createIndexStmt.unique === true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (stmt.type === 'alter' && stmt.keyword === 'table') {
|
||||
// Process ALTER TABLE statements for foreign keys
|
||||
const alterTableStmt = stmt as AlterTableStatement;
|
||||
if (
|
||||
alterTableStmt.table &&
|
||||
alterTableStmt.expr &&
|
||||
alterTableStmt.expr.length > 0
|
||||
) {
|
||||
// Fix the table name extraction - table is an array in ALTER TABLE statements
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (
|
||||
Array.isArray(alterTableStmt.table) &&
|
||||
alterTableStmt.table.length > 0
|
||||
) {
|
||||
const tableObj = alterTableStmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
// Check for schema in both 'schema' and 'db' fields
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else if (typeof alterTableStmt.table === 'object') {
|
||||
const tableRef = alterTableStmt.table as TableReference;
|
||||
tableName = tableRef.table || '';
|
||||
// Check for schema in both 'schema' and 'db' fields
|
||||
schemaName = tableRef.schema || tableRef.db || '';
|
||||
} else {
|
||||
tableName = alterTableStmt.table;
|
||||
}
|
||||
|
||||
// Check if tableName contains a schema prefix (schema.table)
|
||||
if (!schemaName && tableName.includes('.')) {
|
||||
const parts = tableName.split('.');
|
||||
schemaName = parts[0].replace(/"/g, '');
|
||||
tableName = parts[1].replace(/"/g, '');
|
||||
}
|
||||
|
||||
// If still no schema, use default
|
||||
if (!schemaName) {
|
||||
schemaName = 'public';
|
||||
}
|
||||
|
||||
// Find this table in our collection using the helper function
|
||||
const table = findTableWithSchemaSupport(
|
||||
tables,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Process each expression in the ALTER TABLE
|
||||
alterTableStmt.expr.forEach((expr: AlterTableExprItem) => {
|
||||
// Check multiple variations of constraint format
|
||||
if (expr.action === 'add' && expr.create_definitions) {
|
||||
// Check for foreign key constraint
|
||||
if (
|
||||
expr.create_definitions.constraint_type ===
|
||||
'FOREIGN KEY' ||
|
||||
expr.create_definitions.constraint_type ===
|
||||
'foreign key'
|
||||
) {
|
||||
const createDefs = expr.create_definitions;
|
||||
|
||||
// Extract source columns
|
||||
let sourceColumns: string[] = [];
|
||||
if (
|
||||
createDefs.definition &&
|
||||
Array.isArray(createDefs.definition)
|
||||
) {
|
||||
sourceColumns = createDefs.definition.map(
|
||||
(col: ColumnReference) => {
|
||||
const colName =
|
||||
extractColumnName(col);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Extract target table and schema
|
||||
const reference =
|
||||
createDefs.reference_definition;
|
||||
|
||||
// Declare target variables
|
||||
let targetTable = '';
|
||||
let targetSchema = '';
|
||||
let targetColumns: string[] = [];
|
||||
|
||||
if (reference && reference.table) {
|
||||
if (typeof reference.table === 'object') {
|
||||
if (
|
||||
Array.isArray(reference.table) &&
|
||||
reference.table.length > 0
|
||||
) {
|
||||
targetTable =
|
||||
reference.table[0].table || '';
|
||||
targetSchema =
|
||||
reference.table[0].schema ||
|
||||
reference.table[0].db ||
|
||||
'';
|
||||
} else {
|
||||
const tableRef =
|
||||
reference.table as TableReference;
|
||||
targetTable = tableRef.table || '';
|
||||
targetSchema =
|
||||
tableRef.schema ||
|
||||
tableRef.db ||
|
||||
'';
|
||||
}
|
||||
} else {
|
||||
targetTable = reference.table as string;
|
||||
|
||||
// Check if targetTable contains a schema prefix (schema.table)
|
||||
if (targetTable.includes('.')) {
|
||||
const parts =
|
||||
targetTable.split('.');
|
||||
targetSchema = parts[0].replace(
|
||||
/"/g,
|
||||
''
|
||||
);
|
||||
targetTable = parts[1].replace(
|
||||
/"/g,
|
||||
''
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no target schema was found, use default schema
|
||||
if (!targetSchema) {
|
||||
targetSchema = 'public';
|
||||
}
|
||||
|
||||
// Extract target columns
|
||||
if (
|
||||
reference &&
|
||||
reference.definition &&
|
||||
Array.isArray(reference.definition)
|
||||
) {
|
||||
targetColumns = reference.definition.map(
|
||||
(col: ColumnReference) => {
|
||||
const colName =
|
||||
extractColumnName(col);
|
||||
return colName;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create relationships
|
||||
if (
|
||||
sourceColumns.length > 0 &&
|
||||
targetTable &&
|
||||
targetColumns.length > 0
|
||||
) {
|
||||
for (
|
||||
let i = 0;
|
||||
i <
|
||||
Math.min(
|
||||
sourceColumns.length,
|
||||
targetColumns.length
|
||||
);
|
||||
i++
|
||||
) {
|
||||
// Look up source and target table IDs
|
||||
const sourceTableId =
|
||||
getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
tableName,
|
||||
schemaName
|
||||
);
|
||||
const targetTableId =
|
||||
getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
targetTable,
|
||||
targetSchema
|
||||
);
|
||||
|
||||
if (!sourceTableId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!targetTableId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Safe to access properties with null check
|
||||
const updateAction =
|
||||
reference?.on_update;
|
||||
const deleteAction =
|
||||
reference?.on_delete;
|
||||
|
||||
const fk: SQLForeignKey = {
|
||||
name:
|
||||
'constraint' in createDefs
|
||||
? createDefs.constraint ||
|
||||
`${tableName}_${sourceColumns[i]}_fkey`
|
||||
: `${tableName}_${sourceColumns[i]}_fkey`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn: sourceColumns[i],
|
||||
targetTable,
|
||||
targetSchema,
|
||||
targetColumn: targetColumns[i],
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
updateAction,
|
||||
deleteAction,
|
||||
};
|
||||
|
||||
relationships.push(fk);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
'resource' in expr.create_definitions &&
|
||||
expr.create_definitions.resource ===
|
||||
'constraint'
|
||||
) {
|
||||
// For backward compatibility, keep the existing check
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Update table IDs in relationships and fix missing target table IDs
|
||||
relationships.forEach((rel) => {
|
||||
// Ensure schemas are set to 'public' if empty
|
||||
if (!rel.sourceSchema) rel.sourceSchema = 'public';
|
||||
if (!rel.targetSchema) rel.targetSchema = 'public';
|
||||
|
||||
// Only check/fix sourceTableId if not already set
|
||||
if (!rel.sourceTableId) {
|
||||
rel.sourceTableId =
|
||||
getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
rel.sourceTable,
|
||||
rel.sourceSchema
|
||||
) || '';
|
||||
}
|
||||
|
||||
// Check/fix targetTableId if not already set
|
||||
if (!rel.targetTableId) {
|
||||
rel.targetTableId =
|
||||
getTableIdWithSchemaSupport(
|
||||
tableMap,
|
||||
rel.targetTable,
|
||||
rel.targetSchema
|
||||
) || '';
|
||||
}
|
||||
});
|
||||
|
||||
// Filter out relationships with missing source table IDs or target table IDs
|
||||
const validRelationships = relationships.filter(
|
||||
(rel) => rel.sourceTableId && rel.targetTableId
|
||||
);
|
||||
|
||||
return { tables, relationships: validRelationships };
|
||||
} catch (error: unknown) {
|
||||
throw new Error(
|
||||
`Error parsing PostgreSQL SQL: ${(error as Error).message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
import type { SQLASTNode } from '../../common';
|
||||
|
||||
// Set up the SQL parser with SQLite dialect
|
||||
export const parserOpts = {
|
||||
database: 'sqlite',
|
||||
};
|
||||
|
||||
// Type definitions for SQLite AST nodes
|
||||
export interface TableReference {
|
||||
db?: string;
|
||||
schema?: string;
|
||||
table: string;
|
||||
}
|
||||
|
||||
export interface ColumnReference {
|
||||
expr?: {
|
||||
column?: string;
|
||||
table?: string;
|
||||
};
|
||||
column?: string;
|
||||
table?: string;
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
column: ColumnReference;
|
||||
dataType: {
|
||||
dataType: string;
|
||||
length?: number | number[];
|
||||
suffix?: string[];
|
||||
};
|
||||
primary_key?: boolean;
|
||||
autoIncrement?: boolean;
|
||||
notNull?: boolean;
|
||||
unique?: boolean;
|
||||
default_val?: {
|
||||
value: string | number;
|
||||
type: string;
|
||||
};
|
||||
reference?: {
|
||||
table: TableReference;
|
||||
columns: ColumnReference[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface ConstraintDefinition {
|
||||
constraint_type: string;
|
||||
columns?: ColumnReference[];
|
||||
definition?: {
|
||||
columns?: ColumnReference[];
|
||||
table?: TableReference;
|
||||
reference_columns?: ColumnReference[];
|
||||
reference?: {
|
||||
table: TableReference;
|
||||
columns: ColumnReference[];
|
||||
};
|
||||
};
|
||||
reference?: {
|
||||
table: TableReference;
|
||||
columns: ColumnReference[];
|
||||
};
|
||||
constraint_name?: string;
|
||||
}
|
||||
|
||||
export interface CreateTableStatement extends SQLASTNode {
|
||||
type: 'create';
|
||||
keyword: 'table';
|
||||
temporary?: boolean;
|
||||
table: TableReference | TableReference[];
|
||||
create_definitions?: (ColumnDefinition | ConstraintDefinition)[];
|
||||
as?: unknown;
|
||||
if_not_exists?: boolean;
|
||||
}
|
||||
|
||||
export interface CreateIndexStatement extends SQLASTNode {
|
||||
type: 'create';
|
||||
keyword: 'index';
|
||||
index: {
|
||||
name: string;
|
||||
type?: string;
|
||||
};
|
||||
table: TableReference | TableReference[];
|
||||
columns?: ColumnReference[];
|
||||
index_type?: string;
|
||||
unique?: boolean;
|
||||
concurrently?: boolean;
|
||||
if_not_exists?: boolean;
|
||||
}
|
||||
|
||||
export interface AlterTableExprItem {
|
||||
action: string;
|
||||
name?: ColumnReference;
|
||||
dataType?: {
|
||||
dataType: string;
|
||||
length?: number | number[];
|
||||
};
|
||||
expr?: {
|
||||
constraint_type?: string;
|
||||
columns?: ColumnReference[];
|
||||
reference?: {
|
||||
table: TableReference;
|
||||
columns: ColumnReference[];
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface AlterTableStatement extends SQLASTNode {
|
||||
type: 'alter';
|
||||
table: TableReference;
|
||||
expr?: AlterTableExprItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to extract column name from column reference
|
||||
*/
|
||||
export function extractColumnName(column: ColumnReference): string {
|
||||
if (typeof column === 'string') {
|
||||
return column;
|
||||
}
|
||||
|
||||
if (column.column) {
|
||||
return column.column;
|
||||
}
|
||||
|
||||
if (column.expr && column.expr.column) {
|
||||
return column.expr.column;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to extract type arguments (e.g., size, precision, scale)
|
||||
*/
|
||||
export function getTypeArgs(dataType?: {
|
||||
dataType: string;
|
||||
length?: number | number[];
|
||||
}): {
|
||||
size: number;
|
||||
precision?: number | undefined;
|
||||
scale?: number | undefined;
|
||||
} {
|
||||
const result = {
|
||||
size: 0,
|
||||
precision: undefined as number | undefined,
|
||||
scale: undefined as number | undefined,
|
||||
};
|
||||
|
||||
if (!dataType || !dataType.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (typeof dataType.length === 'number') {
|
||||
result.size = dataType.length;
|
||||
} else if (Array.isArray(dataType.length)) {
|
||||
if (dataType.length.length >= 1) {
|
||||
result.size = dataType.length[0] as number;
|
||||
}
|
||||
if (dataType.length.length >= 2) {
|
||||
result.precision = dataType.length[0] as number;
|
||||
result.scale = dataType.length[1] as number;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a table in the collection that matches the table name and schema name
|
||||
*/
|
||||
export function findTableWithSchemaSupport(
|
||||
tables: { id: string; name: string; schema?: string }[],
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): { id: string; name: string; schema?: string } | undefined {
|
||||
return tables.find(
|
||||
(t) =>
|
||||
t.name === tableName &&
|
||||
(!schemaName || t.schema === schemaName || !t.schema)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a table ID with schema support
|
||||
*/
|
||||
export function getTableIdWithSchemaSupport(
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): string {
|
||||
return schemaName ? `${schemaName}.${tableName}` : tableName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a foreign key relationship to ensure it refers to valid tables and columns
|
||||
*/
|
||||
export function isValidForeignKeyRelationship(
|
||||
relationship: {
|
||||
sourceTable: string;
|
||||
sourceColumn: string;
|
||||
targetTable: string;
|
||||
targetColumn: string;
|
||||
},
|
||||
tables: { id: string; name: string; schema?: string }[]
|
||||
): boolean {
|
||||
// Check for empty values
|
||||
if (
|
||||
!relationship.sourceTable ||
|
||||
!relationship.sourceColumn ||
|
||||
!relationship.targetTable ||
|
||||
!relationship.targetColumn
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for SQL keywords that might have been mistakenly captured
|
||||
const invalidKeywords = [
|
||||
'CREATE',
|
||||
'TABLE',
|
||||
'FOREIGN',
|
||||
'KEY',
|
||||
'REFERENCES',
|
||||
'PRIMARY',
|
||||
];
|
||||
if (
|
||||
invalidKeywords.includes(relationship.sourceColumn.toUpperCase()) ||
|
||||
invalidKeywords.includes(relationship.targetColumn.toUpperCase())
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Source table must exist in our schema
|
||||
const sourceTableExists = tables.some(
|
||||
(t) => t.name === relationship.sourceTable
|
||||
);
|
||||
if (!sourceTableExists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
584
src/lib/data/sql-import/dialect-importers/sqlite/sqlite.ts
Normal file
584
src/lib/data/sql-import/dialect-importers/sqlite/sqlite.ts
Normal file
@@ -0,0 +1,584 @@
|
||||
import type {
|
||||
SQLParserResult,
|
||||
SQLTable,
|
||||
SQLColumn,
|
||||
SQLIndex,
|
||||
SQLForeignKey,
|
||||
SQLASTNode,
|
||||
} from '../../common';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
CreateIndexStatement,
|
||||
AlterTableStatement,
|
||||
} from './sqlite-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
getTableIdWithSchemaSupport,
|
||||
isValidForeignKeyRelationship,
|
||||
} from './sqlite-common';
|
||||
|
||||
/**
|
||||
* SQLite-specific parsing logic
|
||||
*/
|
||||
export async function fromSQLite(sqlContent: string): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// Parse the SQL DDL statements
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
|
||||
const ast = parser.astify(
|
||||
sqlContent,
|
||||
parserOpts
|
||||
) as unknown as SQLASTNode[];
|
||||
|
||||
if (!Array.isArray(ast)) {
|
||||
throw new Error('Failed to parse SQL DDL - AST is not an array');
|
||||
}
|
||||
|
||||
// Process each statement
|
||||
ast.forEach((stmt: SQLASTNode) => {
|
||||
// Process CREATE TABLE statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'table') {
|
||||
processCreateTableStatement(
|
||||
stmt as CreateTableStatement,
|
||||
tables,
|
||||
relationships,
|
||||
tableMap
|
||||
);
|
||||
}
|
||||
// Process CREATE INDEX statements
|
||||
else if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndexStatement(
|
||||
stmt as CreateIndexStatement,
|
||||
tables
|
||||
);
|
||||
}
|
||||
// Process ALTER TABLE statements
|
||||
else if (stmt.type === 'alter' && stmt.table) {
|
||||
processAlterTableStatement(stmt as AlterTableStatement, tables);
|
||||
}
|
||||
});
|
||||
|
||||
// Use regex to find foreign keys that the parser might have missed
|
||||
findForeignKeysUsingRegex(sqlContent, tableMap, relationships);
|
||||
|
||||
// Create placeholder tables for any missing referenced tables
|
||||
addPlaceholderTablesForFKReferences(tables, relationships, tableMap);
|
||||
|
||||
// Filter out any invalid relationships
|
||||
const validRelationships = relationships.filter((rel) => {
|
||||
return isValidForeignKeyRelationship(rel, tables);
|
||||
});
|
||||
|
||||
return { tables, relationships: validRelationships };
|
||||
} catch (error) {
|
||||
console.error('Error parsing SQLite SQL:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE TABLE statement to extract table and column information
|
||||
*/
|
||||
function processCreateTableStatement(
|
||||
createTableStmt: CreateTableStatement,
|
||||
tables: SQLTable[],
|
||||
_: SQLForeignKey[],
|
||||
tableMap: Record<string, string>
|
||||
): void {
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (createTableStmt.table && typeof createTableStmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (
|
||||
Array.isArray(createTableStmt.table) &&
|
||||
createTableStmt.table.length > 0
|
||||
) {
|
||||
const tableObj = createTableStmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = createTableStmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || '';
|
||||
}
|
||||
}
|
||||
|
||||
// Skip if table name is empty
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = getTableIdWithSchemaSupport(tableName, schemaName);
|
||||
|
||||
// Store the table ID in the map for later reference
|
||||
tableMap[`${schemaName ? schemaName + '.' : ''}${tableName}`] = tableId;
|
||||
tableMap[tableName] = tableId; // Also store without schema for easier lookup
|
||||
|
||||
// Initialize column and index arrays
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
let primaryKeyColumns: string[] = [];
|
||||
|
||||
// Process column definitions and constraints
|
||||
if (
|
||||
createTableStmt.create_definitions &&
|
||||
Array.isArray(createTableStmt.create_definitions)
|
||||
) {
|
||||
createTableStmt.create_definitions.forEach((def) => {
|
||||
if ('column' in def) {
|
||||
// Process column definition
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(columnDef.column);
|
||||
|
||||
if (!columnName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract type information - handle nullable dataType field
|
||||
let typeName = 'text'; // Default to text if no type specified
|
||||
const typeArgs = {
|
||||
length: undefined as number | undefined,
|
||||
precision: undefined as number | undefined,
|
||||
scale: undefined as number | undefined,
|
||||
};
|
||||
|
||||
if (columnDef.dataType) {
|
||||
typeName = columnDef.dataType.dataType || 'text';
|
||||
const args = getTypeArgs(columnDef.dataType);
|
||||
typeArgs.length = args.size > 0 ? args.size : undefined;
|
||||
typeArgs.precision = args.precision;
|
||||
typeArgs.scale = args.scale;
|
||||
}
|
||||
|
||||
// Check if this column is part of the primary key
|
||||
const isPrimaryKey = !!columnDef.primary_key;
|
||||
if (isPrimaryKey) {
|
||||
primaryKeyColumns.push(columnName);
|
||||
}
|
||||
|
||||
// Process default value if present
|
||||
let defaultValue = '';
|
||||
if (columnDef.default_val) {
|
||||
defaultValue = String(columnDef.default_val.value);
|
||||
}
|
||||
|
||||
// Add the column to our collection
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: typeName,
|
||||
nullable: !columnDef.notNull,
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: !!columnDef.unique,
|
||||
default: defaultValue,
|
||||
increment:
|
||||
isPrimaryKey && typeName.toLowerCase() === 'integer',
|
||||
typeArgs:
|
||||
typeArgs.length || typeArgs.precision || typeArgs.scale
|
||||
? typeArgs
|
||||
: undefined,
|
||||
});
|
||||
} else if ('constraint_type' in def) {
|
||||
// Process constraint definition
|
||||
const constraintDef = def as ConstraintDefinition;
|
||||
|
||||
// Process PRIMARY KEY constraint
|
||||
if (
|
||||
constraintDef.constraint_type === 'primary key' &&
|
||||
constraintDef.columns
|
||||
) {
|
||||
primaryKeyColumns = constraintDef.columns
|
||||
.map(extractColumnName)
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
// Process UNIQUE constraint
|
||||
if (
|
||||
constraintDef.constraint_type === 'unique' &&
|
||||
constraintDef.columns
|
||||
) {
|
||||
const uniqueColumns = constraintDef.columns
|
||||
.map(extractColumnName)
|
||||
.filter(Boolean);
|
||||
|
||||
// Create a unique index for this constraint
|
||||
if (uniqueColumns.length > 0) {
|
||||
const uniqueIndexName =
|
||||
constraintDef.constraint_name ||
|
||||
`uk_${tableName}_${uniqueColumns.join('_')}`;
|
||||
indexes.push({
|
||||
name: uniqueIndexName,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update primary key flags in columns
|
||||
if (primaryKeyColumns.length > 0) {
|
||||
columns.forEach((column) => {
|
||||
if (primaryKeyColumns.includes(column.name)) {
|
||||
column.primaryKey = true;
|
||||
|
||||
// In SQLite, INTEGER PRIMARY KEY is automatically an alias for ROWID (auto-incrementing)
|
||||
if (column.type.toLowerCase() === 'integer') {
|
||||
column.increment = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Create the table object
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName || undefined,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE INDEX statement to extract index information
|
||||
*/
|
||||
function processCreateIndexStatement(
|
||||
createIndexStmt: CreateIndexStatement,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
if (!createIndexStmt.index || !createIndexStmt.table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract table and index information
|
||||
const indexName = createIndexStmt.index.name;
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (typeof createIndexStmt.table === 'object') {
|
||||
if (
|
||||
Array.isArray(createIndexStmt.table) &&
|
||||
createIndexStmt.table.length > 0
|
||||
) {
|
||||
tableName = createIndexStmt.table[0].table || '';
|
||||
schemaName = createIndexStmt.table[0].schema || '';
|
||||
} else {
|
||||
const tableRef = createIndexStmt.table as TableReference;
|
||||
tableName = tableRef.table || '';
|
||||
schemaName = tableRef.schema || '';
|
||||
}
|
||||
}
|
||||
|
||||
// Find the table in our collection
|
||||
const table = tables.find(
|
||||
(t) => t.name === tableName && (!schemaName || t.schema === schemaName)
|
||||
);
|
||||
|
||||
if (table) {
|
||||
// Extract column names from index columns
|
||||
let columns: string[] = [];
|
||||
|
||||
if (createIndexStmt.columns && Array.isArray(createIndexStmt.columns)) {
|
||||
columns = createIndexStmt.columns
|
||||
.map((col: ColumnReference) => extractColumnName(col))
|
||||
.filter((col: string) => col !== '');
|
||||
}
|
||||
|
||||
if (columns.length > 0) {
|
||||
// Create the index
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
columns: columns,
|
||||
unique: !!createIndexStmt.unique,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process an ALTER TABLE statement to extract changes to table structure
|
||||
*/
|
||||
function processAlterTableStatement(
|
||||
alterTableStmt: AlterTableStatement,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
if (!alterTableStmt.table || !alterTableStmt.expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tableName = alterTableStmt.table.table;
|
||||
const schemaName = alterTableStmt.table.schema || '';
|
||||
|
||||
// Find the target table
|
||||
const table = tables.find(
|
||||
(t) => t.name === tableName && (!schemaName || t.schema === schemaName)
|
||||
);
|
||||
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Note: We're relying on the regex approach to find foreign keys from ALTER TABLE statements
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses regular expressions to find foreign key relationships in the SQL content
|
||||
*/
|
||||
function findForeignKeysUsingRegex(
|
||||
sqlContent: string,
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
// Define patterns to find foreign keys
|
||||
const foreignKeyPatterns = [
|
||||
// Pattern for inline column references - REFERENCES table_name(column_name)
|
||||
/\b(\w+)\b\s+\w+(?:\([^)]*\))?\s+(?:NOT\s+NULL\s+)?(?:REFERENCES)\s+["'`]?(\w+)["'`]?\s*\(\s*["'`]?(\w+)["'`]?\s*\)/gi,
|
||||
|
||||
// Pattern: FOREIGN KEY (column_name) REFERENCES table_name(column_name)
|
||||
/FOREIGN\s+KEY\s*\(\s*["'`]?(\w+)["'`]?\s*\)\s+REFERENCES\s+["'`]?(\w+)["'`]?\s*\(\s*["'`]?(\w+)["'`]?\s*\)/gi,
|
||||
|
||||
// Pattern for quoted column names with optional ON DELETE/UPDATE clauses
|
||||
/["'`](\w+)["'`]\s+\w+(?:\([^)]*\))?\s+(?:NOT\s+NULL\s+)?REFERENCES\s+["'`]?(\w+)["'`]?\s*\(\s*["'`]?(\w+)["'`]?\s*\)(?:\s+ON\s+(?:DELETE|UPDATE)\s+[^,)]+)?/gi,
|
||||
];
|
||||
|
||||
// First pass: identify all tables
|
||||
const tableNamePattern =
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["'`]?(\w+)["'`]?/gi;
|
||||
let match;
|
||||
|
||||
tableNamePattern.lastIndex = 0;
|
||||
while ((match = tableNamePattern.exec(sqlContent)) !== null) {
|
||||
const tableName = match[1];
|
||||
|
||||
// Skip invalid table names
|
||||
if (!tableName || tableName === 'CREATE') continue;
|
||||
|
||||
// Ensure the table is in our tableMap
|
||||
if (!tableMap[tableName]) {
|
||||
const tableId = getTableIdWithSchemaSupport(tableName);
|
||||
tableMap[tableName] = tableId;
|
||||
}
|
||||
}
|
||||
|
||||
// Track already added relationships to avoid duplicates
|
||||
const addedRelationships = new Set<string>();
|
||||
|
||||
// Second pass: find foreign keys using regex
|
||||
for (const pattern of foreignKeyPatterns) {
|
||||
pattern.lastIndex = 0;
|
||||
while ((match = pattern.exec(sqlContent)) !== null) {
|
||||
const sourceColumn = match[1];
|
||||
const targetTable = match[2];
|
||||
const targetColumn = match[3];
|
||||
|
||||
// Skip if any required component is missing
|
||||
if (!sourceColumn || !targetTable || !targetColumn) continue;
|
||||
|
||||
// Skip invalid column names that might be SQL keywords
|
||||
if (
|
||||
sourceColumn.toUpperCase() === 'CREATE' ||
|
||||
sourceColumn.toUpperCase() === 'FOREIGN' ||
|
||||
sourceColumn.toUpperCase() === 'KEY'
|
||||
)
|
||||
continue;
|
||||
|
||||
// Find the source table by examining the CREATE TABLE statement
|
||||
const tableSection = sqlContent.substring(0, match.index);
|
||||
const lastCreateTablePos = tableSection.lastIndexOf('CREATE TABLE');
|
||||
|
||||
if (lastCreateTablePos === -1) continue; // Skip if not in a CREATE TABLE
|
||||
|
||||
const tableSubstring = tableSection.substring(lastCreateTablePos);
|
||||
const tableMatch =
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["'`]?(\w+)["'`]?/i.exec(
|
||||
tableSubstring
|
||||
);
|
||||
|
||||
if (!tableMatch || !tableMatch[1]) continue; // Skip if we can't determine the table
|
||||
|
||||
const sourceTable = tableMatch[1];
|
||||
|
||||
// Create a unique key to track this relationship
|
||||
const relationshipKey = `${sourceTable}.${sourceColumn}-${targetTable}.${targetColumn}`;
|
||||
|
||||
// Skip if we've already added this relationship
|
||||
if (addedRelationships.has(relationshipKey)) continue;
|
||||
addedRelationships.add(relationshipKey);
|
||||
|
||||
// Get table IDs
|
||||
const sourceTableId =
|
||||
tableMap[sourceTable] ||
|
||||
getTableIdWithSchemaSupport(sourceTable);
|
||||
const targetTableId =
|
||||
tableMap[targetTable] ||
|
||||
getTableIdWithSchemaSupport(targetTable);
|
||||
|
||||
// Add the relationship
|
||||
relationships.push({
|
||||
name: `FK_${sourceTable}_${sourceColumn}_${targetTable}`,
|
||||
sourceTable,
|
||||
sourceSchema: '',
|
||||
sourceColumn,
|
||||
targetTable,
|
||||
targetSchema: '',
|
||||
targetColumn,
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Look for additional foreign keys using a more specific pattern for multi-line declarations
|
||||
const multiLineFkPattern =
|
||||
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["'`]?(\w+)["'`]?[^;]+?FOREIGN\s+KEY\s*\(\s*["'`]?(\w+)["'`]?\s*\)\s+REFERENCES\s+["'`]?(\w+)["'`]?\s*\(\s*["'`]?(\w+)["'`]?\s*\)/gi;
|
||||
|
||||
multiLineFkPattern.lastIndex = 0;
|
||||
while ((match = multiLineFkPattern.exec(sqlContent)) !== null) {
|
||||
const sourceTable = match[1];
|
||||
const sourceColumn = match[2];
|
||||
const targetTable = match[3];
|
||||
const targetColumn = match[4];
|
||||
|
||||
// Skip if any part is invalid
|
||||
if (!sourceTable || !sourceColumn || !targetTable || !targetColumn)
|
||||
continue;
|
||||
|
||||
// Create a unique key to track this relationship
|
||||
const relationshipKey = `${sourceTable}.${sourceColumn}-${targetTable}.${targetColumn}`;
|
||||
|
||||
// Skip if we've already added this relationship
|
||||
if (addedRelationships.has(relationshipKey)) continue;
|
||||
addedRelationships.add(relationshipKey);
|
||||
|
||||
// Get table IDs
|
||||
const sourceTableId =
|
||||
tableMap[sourceTable] || getTableIdWithSchemaSupport(sourceTable);
|
||||
const targetTableId =
|
||||
tableMap[targetTable] || getTableIdWithSchemaSupport(targetTable);
|
||||
|
||||
// Add the relationship
|
||||
relationships.push({
|
||||
name: `FK_${sourceTable}_${sourceColumn}_${targetTable}`,
|
||||
sourceTable,
|
||||
sourceSchema: '',
|
||||
sourceColumn,
|
||||
targetTable,
|
||||
targetSchema: '',
|
||||
targetColumn,
|
||||
sourceTableId,
|
||||
targetTableId,
|
||||
});
|
||||
}
|
||||
|
||||
// Filter out relationships to non-existent tables
|
||||
const validRelationships = relationships.filter((rel) => {
|
||||
// Ensure source table exists
|
||||
if (!tableMap[rel.sourceTable]) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't filter out if the column name is suspicious
|
||||
if (
|
||||
rel.sourceColumn.toUpperCase() === 'CREATE' ||
|
||||
rel.sourceColumn.toUpperCase() === 'FOREIGN' ||
|
||||
rel.sourceColumn.toUpperCase() === 'KEY'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
// Replace the relationships array with the filtered list
|
||||
relationships.length = 0;
|
||||
validRelationships.forEach((rel) => relationships.push(rel));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds placeholder tables for tables referenced in foreign keys that don't exist in the schema
|
||||
*/
|
||||
function addPlaceholderTablesForFKReferences(
|
||||
tables: SQLTable[],
|
||||
relationships: SQLForeignKey[],
|
||||
tableMap: Record<string, string>
|
||||
): void {
|
||||
// Get all existing table names
|
||||
const existingTableNames = new Set(tables.map((t) => t.name));
|
||||
|
||||
// Find all target tables mentioned in relationships that don't exist
|
||||
const missingTableNames = new Set<string>();
|
||||
|
||||
relationships.forEach((rel) => {
|
||||
if (rel.targetTable && !existingTableNames.has(rel.targetTable)) {
|
||||
missingTableNames.add(rel.targetTable);
|
||||
}
|
||||
});
|
||||
|
||||
// Add placeholder tables for missing tables
|
||||
missingTableNames.forEach((tableName) => {
|
||||
// Generate a table ID
|
||||
const tableId = getTableIdWithSchemaSupport(tableName);
|
||||
|
||||
// Add to table map
|
||||
tableMap[tableName] = tableId;
|
||||
|
||||
// Create minimal placeholder table with the target column as PK
|
||||
const targetColumns = new Set<string>();
|
||||
|
||||
// Collect all referenced columns for this table
|
||||
relationships.forEach((rel) => {
|
||||
if (rel.targetTable === tableName) {
|
||||
targetColumns.add(rel.targetColumn);
|
||||
}
|
||||
});
|
||||
|
||||
// Create columns for the placeholder table
|
||||
const columns: SQLColumn[] = Array.from(targetColumns).map(
|
||||
(colName) => ({
|
||||
name: colName,
|
||||
type: 'unknown',
|
||||
primaryKey: true, // Assume it's a primary key since it's referenced
|
||||
unique: true,
|
||||
nullable: false,
|
||||
})
|
||||
);
|
||||
|
||||
// Add a generic ID column if no columns were found
|
||||
if (columns.length === 0) {
|
||||
columns.push({
|
||||
name: 'id',
|
||||
type: 'unknown',
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
nullable: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Add the placeholder table
|
||||
tables.push({
|
||||
id: getTableIdWithSchemaSupport(tableName),
|
||||
name: tableName,
|
||||
columns,
|
||||
indexes: [],
|
||||
order: tables.length,
|
||||
// This is a placeholder table for a missing referenced table
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,188 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type { SQLASTNode } from '../../common';
|
||||
|
||||
// Set up the SQL parser with SQL Server dialect
|
||||
export const parserOpts = {
|
||||
database: 'transactsql',
|
||||
};
|
||||
|
||||
// Type definitions for SQL Server AST
|
||||
export interface TableReference {
|
||||
db?: string;
|
||||
schema?: string;
|
||||
table: string;
|
||||
as?: string;
|
||||
}
|
||||
|
||||
export interface ColumnReference {
|
||||
type: 'column_ref';
|
||||
table?: string;
|
||||
column: string;
|
||||
}
|
||||
|
||||
export interface CreateTableStatement extends SQLASTNode {
|
||||
type: 'create';
|
||||
keyword: 'table';
|
||||
table: TableReference | TableReference[];
|
||||
create_definitions: (ColumnDefinition | ConstraintDefinition)[];
|
||||
table_options?: Record<string, unknown>[];
|
||||
if_not_exists?: boolean;
|
||||
}
|
||||
|
||||
export interface CreateIndexStatement extends SQLASTNode {
|
||||
type: 'create';
|
||||
keyword: 'index';
|
||||
index: string;
|
||||
table: TableReference | TableReference[];
|
||||
columns: ColumnReference[];
|
||||
constraint?: string;
|
||||
index_using?: string;
|
||||
index_options?: Record<string, unknown>[];
|
||||
}
|
||||
|
||||
export interface AlterTableStatement extends SQLASTNode {
|
||||
type: 'alter';
|
||||
keyword: 'table';
|
||||
table: TableReference | TableReference[];
|
||||
expr: AlterTableExprItem[];
|
||||
}
|
||||
|
||||
export interface AlterTableExprItem {
|
||||
action: string;
|
||||
column?: ColumnReference | string;
|
||||
definition?: Record<string, unknown>;
|
||||
resource?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
column: ColumnReference | string;
|
||||
definition?: {
|
||||
dataType: string;
|
||||
length?: number | string;
|
||||
width?: number | string;
|
||||
scale?: number;
|
||||
precision?: number;
|
||||
parentheses?: boolean;
|
||||
suffix?: string[];
|
||||
constraint?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
nullable?: { type: string };
|
||||
primary_key?: string;
|
||||
unique?: string;
|
||||
default_val?: unknown;
|
||||
auto_increment?: string;
|
||||
comment?: string;
|
||||
reference?: Record<string, unknown>;
|
||||
resource: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ConstraintDefinition {
|
||||
constraint_type: string;
|
||||
constraint?: string;
|
||||
definition?: Array<unknown> | Record<string, unknown>;
|
||||
resource: string;
|
||||
reference?: {
|
||||
table: TableReference;
|
||||
columns: ColumnReference[];
|
||||
on_delete?: string;
|
||||
on_update?: string;
|
||||
};
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract column name from a column reference
|
||||
*/
|
||||
export function extractColumnName(columnRef: ColumnReference | string): string {
|
||||
if (typeof columnRef === 'string') {
|
||||
return columnRef;
|
||||
}
|
||||
|
||||
if (columnRef.type === 'column_ref') {
|
||||
return columnRef.column;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract type arguments such as length, precision, scale
|
||||
*/
|
||||
export function getTypeArgs(
|
||||
definition?: ColumnDefinition['definition']
|
||||
): { length?: number; precision?: number; scale?: number } | undefined {
|
||||
if (!definition) return undefined;
|
||||
|
||||
const result: { length?: number; precision?: number; scale?: number } = {};
|
||||
|
||||
// Check if length/width is present
|
||||
if (definition.length !== undefined) {
|
||||
result.length = Number(definition.length);
|
||||
} else if (definition.width !== undefined) {
|
||||
result.length = Number(definition.width);
|
||||
}
|
||||
|
||||
// Check if precision is present
|
||||
if (definition.precision !== undefined) {
|
||||
result.precision = Number(definition.precision);
|
||||
}
|
||||
|
||||
// Check if scale is present
|
||||
if (definition.scale !== undefined) {
|
||||
result.scale = Number(definition.scale);
|
||||
}
|
||||
|
||||
return Object.keys(result).length > 0 ? result : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a table in the tables array with schema support
|
||||
*/
|
||||
export function findTableWithSchemaSupport(
|
||||
tables: Array<{ id: string; name: string; schema?: string }>,
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): { id: string; name: string; schema?: string } | undefined {
|
||||
// If schema is provided, search for exact match
|
||||
if (schemaName) {
|
||||
return tables.find(
|
||||
(t) => t.name === tableName && t.schema === schemaName
|
||||
);
|
||||
}
|
||||
|
||||
// No schema provided, first try to find exact match without schema
|
||||
const exactMatch = tables.find(
|
||||
(t) => t.name === tableName && (!t.schema || t.schema === 'dbo')
|
||||
);
|
||||
if (exactMatch) return exactMatch;
|
||||
|
||||
// Finally, look for any table with matching name regardless of schema
|
||||
return tables.find((t) => t.name === tableName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ID of a table with schema support, or generate a new ID if not found
|
||||
*/
|
||||
export function getTableIdWithSchemaSupport(
|
||||
tables: Array<{ id: string; name: string; schema?: string }>,
|
||||
tableMap: Record<string, string>,
|
||||
tableName: string,
|
||||
schemaName?: string
|
||||
): string {
|
||||
const table = findTableWithSchemaSupport(tables, tableName, schemaName);
|
||||
if (table) return table.id;
|
||||
|
||||
// If not found, check if we have an entry in tableMap
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
if (tableMap[tableKey]) {
|
||||
return tableMap[tableKey];
|
||||
}
|
||||
|
||||
// Generate a new ID
|
||||
const newId = generateId();
|
||||
tableMap[tableKey] = newId;
|
||||
return newId;
|
||||
}
|
||||
863
src/lib/data/sql-import/dialect-importers/sqlserver/sqlserver.ts
Normal file
863
src/lib/data/sql-import/dialect-importers/sqlserver/sqlserver.ts
Normal file
@@ -0,0 +1,863 @@
|
||||
import { generateId } from '@/lib/utils';
|
||||
import type {
|
||||
SQLParserResult,
|
||||
SQLTable,
|
||||
SQLColumn,
|
||||
SQLIndex,
|
||||
SQLForeignKey,
|
||||
SQLASTNode,
|
||||
} from '../../common';
|
||||
import { buildSQLFromAST } from '../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type {
|
||||
TableReference,
|
||||
ColumnReference,
|
||||
ColumnDefinition,
|
||||
ConstraintDefinition,
|
||||
CreateTableStatement,
|
||||
CreateIndexStatement,
|
||||
AlterTableStatement,
|
||||
} from './sqlserver-common';
|
||||
import {
|
||||
parserOpts,
|
||||
extractColumnName,
|
||||
getTypeArgs,
|
||||
findTableWithSchemaSupport,
|
||||
} from './sqlserver-common';
|
||||
|
||||
/**
|
||||
* Helper function to safely build SQL from AST nodes, handling null/undefined/invalid cases
|
||||
*/
|
||||
function safelyBuildSQLFromAST(ast: unknown): string | undefined {
|
||||
if (!ast) return undefined;
|
||||
|
||||
// Make sure it's a valid AST node with a 'type' property
|
||||
if (typeof ast === 'object' && ast !== null && 'type' in ast) {
|
||||
return buildSQLFromAST(ast as SQLASTNode, DatabaseType.SQL_SERVER);
|
||||
}
|
||||
|
||||
// Return string representation for non-AST objects
|
||||
if (ast !== null && (typeof ast === 'string' || typeof ast === 'number')) {
|
||||
return String(ast);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocess SQL Server script to remove or modify parts that the parser can't handle
|
||||
*/
|
||||
function preprocessSQLServerScript(sqlContent: string): string {
|
||||
// 1. Remove IF NOT EXISTS ... BEGIN ... END blocks (typically used for schema creation)
|
||||
sqlContent = sqlContent.replace(
|
||||
/IF\s+NOT\s+EXISTS\s*\([^)]+\)\s*BEGIN\s+[^;]+;\s*END;?/gi,
|
||||
''
|
||||
);
|
||||
|
||||
// 2. Remove any GO statements (batch separators)
|
||||
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
|
||||
|
||||
// 3. Remove any EXEC statements
|
||||
sqlContent = sqlContent.replace(/EXEC\s*\([^)]+\)\s*;?/gi, '');
|
||||
sqlContent = sqlContent.replace(/EXEC\s+[^;]+;/gi, '');
|
||||
|
||||
// 4. Replace any remaining procedural code blocks that might cause issues
|
||||
sqlContent = sqlContent.replace(
|
||||
/BEGIN\s+TRANSACTION|COMMIT\s+TRANSACTION|ROLLBACK\s+TRANSACTION/gi,
|
||||
'-- $&'
|
||||
);
|
||||
|
||||
// 5. Special handling for CREATE TABLE with reserved keywords as column names
|
||||
// Find CREATE TABLE statements
|
||||
const createTablePattern =
|
||||
/CREATE\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(([^;]*)\)/gi;
|
||||
|
||||
sqlContent = sqlContent.replace(
|
||||
createTablePattern,
|
||||
(_, schema, tableName, columnDefs) => {
|
||||
// Process column definitions to rename problematic columns
|
||||
let processedColumnDefs = columnDefs;
|
||||
|
||||
// Replace any column named "column" with "column_name"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[column\]/gi,
|
||||
'[column_name]'
|
||||
);
|
||||
|
||||
// Replace any column named "int" with "int_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[int\]/gi,
|
||||
'[int_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "time" with "time_col"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[time\]/gi,
|
||||
'[time_col]'
|
||||
);
|
||||
|
||||
// Replace any column named "order" with "order_column"
|
||||
processedColumnDefs = processedColumnDefs.replace(
|
||||
/\[order\]/gi,
|
||||
'[order_column]'
|
||||
);
|
||||
|
||||
// Rebuild the CREATE TABLE statement
|
||||
return `CREATE TABLE [${schema || 'dbo'}].[${tableName}] (${processedColumnDefs})`;
|
||||
}
|
||||
);
|
||||
|
||||
// 6. Handle default value expressions with functions - replace with simpler defaults
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
|
||||
sqlContent = sqlContent.replace(/DEFAULT\s+\([^)]+\)/gi, 'DEFAULT 0');
|
||||
|
||||
// 7. Split into individual statements to handle them separately
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
|
||||
// Filter to keep only CREATE TABLE, CREATE INDEX, and ALTER TABLE statements
|
||||
const filteredStatements = statements.filter((stmt) => {
|
||||
const trimmedStmt = stmt.trim().toUpperCase();
|
||||
return (
|
||||
trimmedStmt.startsWith('CREATE TABLE') ||
|
||||
trimmedStmt.startsWith('CREATE UNIQUE INDEX') ||
|
||||
trimmedStmt.startsWith('CREATE INDEX') ||
|
||||
trimmedStmt.startsWith('ALTER TABLE')
|
||||
);
|
||||
});
|
||||
|
||||
return filteredStatements.join(';') + ';';
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual parsing of ALTER TABLE ADD CONSTRAINT statements
|
||||
* This is a fallback for when the node-sql-parser fails to properly parse the constraints
|
||||
*/
|
||||
function parseAlterTableAddConstraint(statements: string[]): {
|
||||
fkData: SQLForeignKey[];
|
||||
tableMap: Record<string, string>;
|
||||
} {
|
||||
const fkData: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {};
|
||||
|
||||
// Regular expressions to extract information from ALTER TABLE statements
|
||||
const alterTableRegex =
|
||||
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s+REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/i;
|
||||
|
||||
for (const stmt of statements) {
|
||||
const match = stmt.match(alterTableRegex);
|
||||
if (match) {
|
||||
const [
|
||||
,
|
||||
sourceSchema = 'dbo',
|
||||
sourceTable,
|
||||
constraintName,
|
||||
sourceColumn,
|
||||
targetSchema = 'dbo',
|
||||
targetTable,
|
||||
targetColumn,
|
||||
] = match;
|
||||
|
||||
// Generate IDs for tables if they don't already exist
|
||||
const sourceTableKey = `${sourceSchema}.${sourceTable}`;
|
||||
const targetTableKey = `${targetSchema}.${targetTable}`;
|
||||
|
||||
if (!tableMap[sourceTableKey]) {
|
||||
tableMap[sourceTableKey] = generateId();
|
||||
}
|
||||
|
||||
if (!tableMap[targetTableKey]) {
|
||||
tableMap[targetTableKey] = generateId();
|
||||
}
|
||||
|
||||
fkData.push({
|
||||
name: constraintName,
|
||||
sourceTable: sourceTable,
|
||||
sourceSchema: sourceSchema,
|
||||
sourceColumn: sourceColumn,
|
||||
targetTable: targetTable,
|
||||
targetSchema: targetSchema,
|
||||
targetColumn: targetColumn,
|
||||
sourceTableId: tableMap[sourceTableKey],
|
||||
targetTableId: tableMap[targetTableKey],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { fkData, tableMap };
|
||||
}
|
||||
|
||||
/**
|
||||
* Map SQL Server data type strings to normalized types
|
||||
* This ensures consistent type handling across the application
|
||||
*/
|
||||
function normalizeSQLServerDataType(dataType: string): string {
|
||||
// Convert to lowercase for consistent comparison
|
||||
const lowerType = dataType.toLowerCase().trim();
|
||||
|
||||
// Handle SQL Server specific types
|
||||
switch (lowerType) {
|
||||
// Exact numeric types
|
||||
case 'tinyint':
|
||||
return 'tinyint';
|
||||
case 'smallint':
|
||||
return 'smallint';
|
||||
case 'int':
|
||||
return 'int';
|
||||
case 'bigint':
|
||||
return 'bigint';
|
||||
case 'decimal':
|
||||
case 'numeric':
|
||||
return lowerType;
|
||||
case 'money':
|
||||
case 'smallmoney':
|
||||
return lowerType;
|
||||
|
||||
// Approximate numeric types
|
||||
case 'float':
|
||||
case 'real':
|
||||
return lowerType;
|
||||
|
||||
// Date and time types
|
||||
case 'date':
|
||||
return 'date';
|
||||
case 'datetime':
|
||||
return 'datetime';
|
||||
case 'datetime2':
|
||||
return 'datetime2';
|
||||
case 'datetimeoffset':
|
||||
return 'datetimeoffset';
|
||||
case 'smalldatetime':
|
||||
return 'smalldatetime';
|
||||
case 'time':
|
||||
return 'time';
|
||||
|
||||
// Character strings
|
||||
case 'char':
|
||||
case 'varchar':
|
||||
case 'text':
|
||||
return lowerType;
|
||||
|
||||
// Unicode character strings
|
||||
case 'nchar':
|
||||
case 'nvarchar':
|
||||
case 'ntext':
|
||||
return lowerType;
|
||||
|
||||
// Binary strings
|
||||
case 'binary':
|
||||
case 'varbinary':
|
||||
case 'image':
|
||||
return lowerType;
|
||||
|
||||
// Other data types
|
||||
case 'bit':
|
||||
return 'bit';
|
||||
case 'uniqueidentifier':
|
||||
return 'uniqueidentifier';
|
||||
case 'xml':
|
||||
return 'xml';
|
||||
case 'json':
|
||||
return 'json';
|
||||
|
||||
// Default fallback
|
||||
default:
|
||||
return dataType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse SQL Server DDL scripts and extract database structure
|
||||
* @param sqlContent SQL Server DDL content as string
|
||||
* @returns Parsed structure including tables, columns, and relationships
|
||||
*/
|
||||
export async function fromSQLServer(
|
||||
sqlContent: string
|
||||
): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
const tableMap: Record<string, string> = {}; // Maps table name to its ID
|
||||
|
||||
try {
|
||||
// Preprocess the SQL content to handle T-SQL specific syntax
|
||||
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
|
||||
|
||||
const statements = sqlContent
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
const alterTableStatements = statements.filter(
|
||||
(stmt) =>
|
||||
stmt.trim().toUpperCase().startsWith('ALTER TABLE') &&
|
||||
stmt.includes('FOREIGN KEY')
|
||||
);
|
||||
|
||||
if (alterTableStatements.length > 0) {
|
||||
const { fkData, tableMap: fkTableMap } =
|
||||
parseAlterTableAddConstraint(alterTableStatements);
|
||||
|
||||
// Store table IDs from alter statements
|
||||
Object.assign(tableMap, fkTableMap);
|
||||
|
||||
// Store foreign key relationships for later processing
|
||||
relationships.push(...fkData);
|
||||
}
|
||||
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
let ast;
|
||||
try {
|
||||
ast = parser.astify(preprocessedSQL, parserOpts);
|
||||
} catch {
|
||||
// Fallback: Try to parse each statement individually
|
||||
const statements = preprocessedSQL
|
||||
.split(';')
|
||||
.filter((stmt) => stmt.trim().length > 0);
|
||||
ast = [];
|
||||
|
||||
for (const stmt of statements) {
|
||||
try {
|
||||
const stmtAst = parser.astify(stmt + ';', parserOpts);
|
||||
if (Array.isArray(stmtAst)) {
|
||||
ast.push(...stmtAst);
|
||||
} else if (stmtAst) {
|
||||
ast.push(stmtAst);
|
||||
}
|
||||
} catch {
|
||||
// Skip statements that can't be parsed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(ast) || ast.length === 0) {
|
||||
throw new Error('Failed to parse SQL DDL - Empty or invalid AST');
|
||||
}
|
||||
|
||||
// Process each statement
|
||||
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
|
||||
// Process CREATE TABLE statements
|
||||
if (stmt.type === 'create' && stmt.keyword === 'table') {
|
||||
processCreateTable(
|
||||
stmt as CreateTableStatement,
|
||||
tables,
|
||||
tableMap,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
// Process CREATE INDEX statements
|
||||
else if (stmt.type === 'create' && stmt.keyword === 'index') {
|
||||
processCreateIndex(stmt as CreateIndexStatement, tables);
|
||||
}
|
||||
// Process ALTER TABLE statements
|
||||
else if (stmt.type === 'alter' && stmt.keyword === 'table') {
|
||||
processAlterTable(
|
||||
stmt as AlterTableStatement,
|
||||
tables,
|
||||
relationships
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Link relationships to ensure all targetTableId and sourceTableId fields are filled
|
||||
const validRelationships = linkRelationships(
|
||||
tables,
|
||||
relationships,
|
||||
tableMap
|
||||
);
|
||||
|
||||
// Sort tables by dependency (for better visualization)
|
||||
const sortedTables = [...tables];
|
||||
sortedTables.sort((a, b) => a.order - b.order);
|
||||
|
||||
return {
|
||||
tables: sortedTables,
|
||||
relationships: validRelationships,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error parsing SQL Server DDL:', error);
|
||||
throw new Error(`Error parsing SQL Server DDL: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE TABLE statement
|
||||
*/
|
||||
function processCreateTable(
|
||||
stmt: CreateTableStatement,
|
||||
tables: SQLTable[],
|
||||
tableMap: Record<string, string>,
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
// Extract table name and schema
|
||||
if (stmt.table && typeof stmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
|
||||
const tableObj = stmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
// SQL Server uses 'schema' or 'db' field
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = stmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If no schema specified, use default 'dbo' schema for SQL Server
|
||||
if (!schemaName) {
|
||||
schemaName = 'dbo';
|
||||
}
|
||||
|
||||
// Generate a unique ID for the table
|
||||
const tableId = generateId();
|
||||
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
|
||||
tableMap[tableKey] = tableId;
|
||||
|
||||
// Process table columns
|
||||
const columns: SQLColumn[] = [];
|
||||
const indexes: SQLIndex[] = [];
|
||||
|
||||
if (stmt.create_definitions && Array.isArray(stmt.create_definitions)) {
|
||||
stmt.create_definitions.forEach(
|
||||
(def: ColumnDefinition | ConstraintDefinition) => {
|
||||
if (def.resource === 'column') {
|
||||
// Process column definition
|
||||
const columnDef = def as ColumnDefinition;
|
||||
const columnName = extractColumnName(columnDef.column);
|
||||
const rawDataType = columnDef.definition?.dataType || '';
|
||||
const normalizedDataType =
|
||||
normalizeSQLServerDataType(rawDataType);
|
||||
|
||||
if (columnName) {
|
||||
// Check for SQL Server specific column properties
|
||||
const isPrimaryKey =
|
||||
columnDef.primary_key === 'primary key';
|
||||
|
||||
// For SQL Server, check for IDENTITY property in suffixes
|
||||
const hasIdentity = columnDef.definition?.suffix?.some(
|
||||
(suffix) =>
|
||||
suffix.toLowerCase().includes('identity')
|
||||
);
|
||||
|
||||
columns.push({
|
||||
name: columnName,
|
||||
type: normalizedDataType,
|
||||
nullable: columnDef.nullable?.type !== 'not null',
|
||||
primaryKey: isPrimaryKey,
|
||||
unique: columnDef.unique === 'unique',
|
||||
typeArgs: getTypeArgs(columnDef.definition),
|
||||
default: columnDef.default_val
|
||||
? safelyBuildSQLFromAST(columnDef.default_val)
|
||||
: undefined,
|
||||
increment: hasIdentity,
|
||||
});
|
||||
}
|
||||
} else if (def.resource === 'constraint') {
|
||||
// Handle constraint definitions
|
||||
const constraintDef = def as ConstraintDefinition;
|
||||
|
||||
// Handle PRIMARY KEY constraints
|
||||
if (constraintDef.constraint_type === 'primary key') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
// Extract column names from primary key constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const pkColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
// Find and mark the column as primary key
|
||||
const column = columns.find(
|
||||
(col) => col.name === pkColumnName
|
||||
);
|
||||
if (column) {
|
||||
column.primaryKey = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle UNIQUE constraints
|
||||
else if (constraintDef.constraint_type === 'unique') {
|
||||
if (Array.isArray(constraintDef.definition)) {
|
||||
const uniqueColumns: string[] = [];
|
||||
// Extract column names from unique constraint
|
||||
for (const colDef of constraintDef.definition) {
|
||||
if (
|
||||
colDef &&
|
||||
typeof colDef === 'object' &&
|
||||
'type' in colDef &&
|
||||
colDef.type === 'column_ref' &&
|
||||
'column' in colDef
|
||||
) {
|
||||
const uniqueColumnName = extractColumnName(
|
||||
colDef as ColumnReference
|
||||
);
|
||||
uniqueColumns.push(uniqueColumnName);
|
||||
}
|
||||
}
|
||||
|
||||
// Add as an index
|
||||
if (uniqueColumns.length > 0) {
|
||||
indexes.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`unique_${tableName}_${uniqueColumns.join('_')}`,
|
||||
columns: uniqueColumns,
|
||||
unique: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle FOREIGN KEY constraints
|
||||
else if (
|
||||
constraintDef.constraint_type === 'foreign key' &&
|
||||
constraintDef.reference
|
||||
) {
|
||||
const reference = constraintDef.reference;
|
||||
if (
|
||||
reference &&
|
||||
reference.table &&
|
||||
reference.columns &&
|
||||
reference.columns.length > 0
|
||||
) {
|
||||
// Extract target table info
|
||||
const targetTable =
|
||||
reference.table as TableReference;
|
||||
const targetTableName = targetTable.table;
|
||||
const targetSchemaName =
|
||||
targetTable.schema || targetTable.db || 'dbo';
|
||||
|
||||
// Extract source column
|
||||
let sourceColumnName = '';
|
||||
if (
|
||||
Array.isArray(constraintDef.definition) &&
|
||||
constraintDef.definition.length > 0
|
||||
) {
|
||||
const sourceColDef =
|
||||
constraintDef.definition[0];
|
||||
if (
|
||||
sourceColDef &&
|
||||
typeof sourceColDef === 'object' &&
|
||||
'type' in sourceColDef &&
|
||||
sourceColDef.type === 'column_ref'
|
||||
) {
|
||||
sourceColumnName = extractColumnName(
|
||||
sourceColDef as ColumnReference
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract target column
|
||||
const targetColumnName = extractColumnName(
|
||||
reference.columns[0]
|
||||
);
|
||||
|
||||
if (
|
||||
sourceColumnName &&
|
||||
targetTableName &&
|
||||
targetColumnName
|
||||
) {
|
||||
// Create a foreign key relationship
|
||||
relationships.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`fk_${tableName}_${sourceColumnName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn: sourceColumnName,
|
||||
targetTable: targetTableName,
|
||||
targetSchema: targetSchemaName,
|
||||
targetColumn: targetColumnName,
|
||||
sourceTableId: tableId,
|
||||
targetTableId: '', // Will be filled later
|
||||
updateAction: reference.on_update,
|
||||
deleteAction: reference.on_delete,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Create the table object
|
||||
tables.push({
|
||||
id: tableId,
|
||||
name: tableName,
|
||||
schema: schemaName,
|
||||
columns,
|
||||
indexes,
|
||||
order: tables.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a CREATE INDEX statement
|
||||
*/
|
||||
function processCreateIndex(
|
||||
stmt: CreateIndexStatement,
|
||||
tables: SQLTable[]
|
||||
): void {
|
||||
if (!stmt.table || !stmt.columns || stmt.columns.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (typeof stmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
|
||||
const tableObj = stmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = stmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If no schema specified, use default 'dbo' schema for SQL Server
|
||||
if (!schemaName) {
|
||||
schemaName = 'dbo';
|
||||
}
|
||||
|
||||
// Find the table
|
||||
const table = findTableWithSchemaSupport(tables, tableName, schemaName);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract column names from the index definition
|
||||
const indexColumns = stmt.columns.map((col) => extractColumnName(col));
|
||||
if (indexColumns.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the index
|
||||
const indexName =
|
||||
stmt.index || `idx_${tableName}_${indexColumns.join('_')}`;
|
||||
const isUnique = stmt.constraint === 'unique';
|
||||
|
||||
// Add index to the table
|
||||
const tableObj = tables.find((t) => t.id === table.id);
|
||||
if (tableObj) {
|
||||
tableObj.indexes.push({
|
||||
name: indexName,
|
||||
columns: indexColumns,
|
||||
unique: isUnique,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process an ALTER TABLE statement
|
||||
*/
|
||||
function processAlterTable(
|
||||
stmt: AlterTableStatement,
|
||||
tables: SQLTable[],
|
||||
relationships: SQLForeignKey[]
|
||||
): void {
|
||||
if (!stmt.table || !stmt.expr || !Array.isArray(stmt.expr)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract table name and schema
|
||||
let tableName = '';
|
||||
let schemaName = '';
|
||||
|
||||
if (typeof stmt.table === 'object') {
|
||||
// Handle array of tables if needed
|
||||
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
|
||||
const tableObj = stmt.table[0];
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
} else {
|
||||
// Direct object reference
|
||||
const tableObj = stmt.table as TableReference;
|
||||
tableName = tableObj.table || '';
|
||||
schemaName = tableObj.schema || tableObj.db || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If no schema specified, use default 'dbo' schema for SQL Server
|
||||
if (!schemaName) {
|
||||
schemaName = 'dbo';
|
||||
}
|
||||
|
||||
// Find the table
|
||||
const table = findTableWithSchemaSupport(tables, tableName, schemaName);
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Process each expression in the ALTER TABLE statement
|
||||
for (const expr of stmt.expr) {
|
||||
const action = expr.action;
|
||||
|
||||
// Handle ADD CONSTRAINT for foreign keys
|
||||
if (action === 'add' && expr.resource === 'constraint') {
|
||||
const constraintDef = expr as unknown as ConstraintDefinition;
|
||||
|
||||
if (
|
||||
constraintDef.constraint_type === 'foreign key' &&
|
||||
constraintDef.reference
|
||||
) {
|
||||
const reference = constraintDef.reference;
|
||||
if (
|
||||
reference &&
|
||||
reference.table &&
|
||||
reference.columns &&
|
||||
reference.columns.length > 0
|
||||
) {
|
||||
// Extract target table info
|
||||
const targetTable = reference.table as TableReference;
|
||||
const targetTableName = targetTable.table;
|
||||
const targetSchemaName =
|
||||
targetTable.schema || targetTable.db || 'dbo';
|
||||
|
||||
// Extract source column
|
||||
let sourceColumnName = '';
|
||||
if (
|
||||
Array.isArray(constraintDef.definition) &&
|
||||
constraintDef.definition.length > 0
|
||||
) {
|
||||
const sourceColDef = constraintDef.definition[0];
|
||||
if (
|
||||
sourceColDef &&
|
||||
typeof sourceColDef === 'object' &&
|
||||
'type' in sourceColDef &&
|
||||
sourceColDef.type === 'column_ref'
|
||||
) {
|
||||
sourceColumnName = extractColumnName(
|
||||
sourceColDef as ColumnReference
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract target column
|
||||
const targetColumnName = extractColumnName(
|
||||
reference.columns[0]
|
||||
);
|
||||
|
||||
if (
|
||||
sourceColumnName &&
|
||||
targetTableName &&
|
||||
targetColumnName
|
||||
) {
|
||||
// Create a foreign key relationship
|
||||
relationships.push({
|
||||
name:
|
||||
constraintDef.constraint ||
|
||||
`fk_${tableName}_${sourceColumnName}`,
|
||||
sourceTable: tableName,
|
||||
sourceSchema: schemaName,
|
||||
sourceColumn: sourceColumnName,
|
||||
targetTable: targetTableName,
|
||||
targetSchema: targetSchemaName,
|
||||
targetColumn: targetColumnName,
|
||||
sourceTableId: table.id,
|
||||
targetTableId: '', // Will be filled later
|
||||
updateAction: reference.on_update,
|
||||
deleteAction: reference.on_delete,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-process the tables and relationships to ensure all targetTableId and sourceTableId fields are filled
|
||||
*/
|
||||
function linkRelationships(
|
||||
tables: SQLTable[],
|
||||
relationships: SQLForeignKey[],
|
||||
tableMap: Record<string, string>
|
||||
): SQLForeignKey[] {
|
||||
// First, ensure all table keys are normalized
|
||||
const normalizedTableMap: Record<string, string> = {};
|
||||
for (const [key, id] of Object.entries(tableMap)) {
|
||||
// Normalize key format to ensure consistent lookups
|
||||
let normalizedKey = key;
|
||||
if (!key.includes('.')) {
|
||||
normalizedKey = `dbo.${key}`;
|
||||
}
|
||||
normalizedTableMap[normalizedKey.toLowerCase()] = id;
|
||||
|
||||
// Also add without schema for fallback
|
||||
const tableName = key.includes('.') ? key.split('.')[1] : key;
|
||||
normalizedTableMap[tableName.toLowerCase()] = id;
|
||||
}
|
||||
|
||||
// Add all tables to the normalized map
|
||||
for (const table of tables) {
|
||||
const tableKey = `${table.schema || 'dbo'}.${table.name}`;
|
||||
normalizedTableMap[tableKey.toLowerCase()] = table.id;
|
||||
normalizedTableMap[table.name.toLowerCase()] = table.id;
|
||||
}
|
||||
|
||||
// Process all relationships
|
||||
const validRelationships = relationships.filter((rel) => {
|
||||
// Normalize keys for lookup
|
||||
const sourceTableKey = `${rel.sourceSchema || 'dbo'}.${rel.sourceTable}`;
|
||||
const targetTableKey = `${rel.targetSchema || 'dbo'}.${rel.targetTable}`;
|
||||
|
||||
// Get the source table ID if it's not already set
|
||||
if (!rel.sourceTableId || rel.sourceTableId === '') {
|
||||
const sourceId =
|
||||
normalizedTableMap[sourceTableKey.toLowerCase()] ||
|
||||
normalizedTableMap[rel.sourceTable.toLowerCase()];
|
||||
|
||||
if (sourceId) {
|
||||
rel.sourceTableId = sourceId;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the target table ID
|
||||
if (!rel.targetTableId || rel.targetTableId === '') {
|
||||
const targetId =
|
||||
normalizedTableMap[targetTableKey.toLowerCase()] ||
|
||||
normalizedTableMap[rel.targetTable.toLowerCase()];
|
||||
|
||||
if (targetId) {
|
||||
rel.targetTableId = targetId;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
return validRelationships;
|
||||
}
|
||||
333
src/lib/data/sql-import/index.ts
Normal file
333
src/lib/data/sql-import/index.ts
Normal file
@@ -0,0 +1,333 @@
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { fromPostgres } from './dialect-importers/postgresql/postgresql';
|
||||
import { fromPostgresDump } from './dialect-importers/postgresql/postgresql-dump';
|
||||
|
||||
import { fromSQLServer } from './dialect-importers/sqlserver/sqlserver';
|
||||
import { fromSQLite } from './dialect-importers/sqlite/sqlite';
|
||||
import type { SQLParserResult } from './common';
|
||||
import { convertToChartDBDiagram } from './common';
|
||||
import { adjustTablePositions } from '@/lib/domain/db-table';
|
||||
import { fromMySQL, isMySQLFormat } from './dialect-importers/mysql/mysql';
|
||||
|
||||
/**
|
||||
* Detect if SQL content is from pg_dump format
|
||||
* @param sqlContent SQL content as string
|
||||
* @returns boolean indicating if the SQL is likely from pg_dump
|
||||
*/
|
||||
function isPgDumpFormat(sqlContent: string): boolean {
|
||||
// pg_dump output often contains specific markers
|
||||
const pgDumpMarkers = [
|
||||
'SET statement_timeout',
|
||||
'SET lock_timeout',
|
||||
'SET client_encoding',
|
||||
'SET standard_conforming_strings',
|
||||
'SELECT pg_catalog.set_config',
|
||||
'ALTER TABLE ONLY',
|
||||
'COMMENT ON EXTENSION',
|
||||
];
|
||||
|
||||
// Check for specific pg_dump patterns
|
||||
for (const marker of pgDumpMarkers) {
|
||||
if (sqlContent.includes(marker)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for other pg_dump patterns like COPY statements or specific comments
|
||||
if (
|
||||
(sqlContent.includes('COPY') && sqlContent.includes('FROM stdin')) ||
|
||||
sqlContent.match(/--\s+Name:.*Type:/i)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if SQL content is from SQL Server DDL format
|
||||
* @param sqlContent SQL content as string
|
||||
* @returns boolean indicating if the SQL is likely from SQL Server
|
||||
*/
|
||||
function isSQLServerFormat(sqlContent: string): boolean {
|
||||
// SQL Server output often contains specific markers
|
||||
const sqlServerMarkers = [
|
||||
'SET ANSI_NULLS ON',
|
||||
'SET QUOTED_IDENTIFIER ON',
|
||||
'SET ANSI_PADDING ON',
|
||||
'CREATE PROCEDURE',
|
||||
'EXEC sys.sp_',
|
||||
'EXECUTE sys.sp_',
|
||||
'[dbo].',
|
||||
'IDENTITY(',
|
||||
'NVARCHAR',
|
||||
'UNIQUEIDENTIFIER',
|
||||
'ALTER TABLE [',
|
||||
'CREATE TABLE [dbo]',
|
||||
'CREATE INDEX [dbo_',
|
||||
'datetime2',
|
||||
];
|
||||
|
||||
// Check for specific SQL Server patterns
|
||||
for (const marker of sqlServerMarkers) {
|
||||
if (sqlContent.includes(marker)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Also check for brackets used in SQL Server syntax - [dbo].[TableName]
|
||||
if (sqlContent.match(/\[[^\]]+\]\.\[[^\]]+\]/)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if SQL content is from SQLite format
|
||||
* @param sqlContent SQL content as string
|
||||
* @returns boolean indicating if the SQL is likely from SQLite
|
||||
*/
|
||||
function isSQLiteFormat(sqlContent: string): boolean {
|
||||
// SQLite output often contains specific markers
|
||||
const sqliteMarkers = [
|
||||
'PRAGMA',
|
||||
'INTEGER PRIMARY KEY AUTOINCREMENT',
|
||||
'DEFAULT (datetime(',
|
||||
'sqlite_sequence',
|
||||
'CREATE TRIGGER',
|
||||
'BEGIN',
|
||||
'END;',
|
||||
];
|
||||
|
||||
// Check for specific SQLite patterns
|
||||
for (const marker of sqliteMarkers) {
|
||||
if (sqlContent.includes(marker)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-detect database type from SQL content
|
||||
* @param sqlContent SQL content as string
|
||||
* @returns Detected database type or null if can't determine
|
||||
*/
|
||||
export function detectDatabaseType(sqlContent: string): DatabaseType | null {
|
||||
// First check for PostgreSQL dump format
|
||||
if (isPgDumpFormat(sqlContent)) {
|
||||
return DatabaseType.POSTGRESQL;
|
||||
}
|
||||
|
||||
// Check for SQL Server format
|
||||
if (isSQLServerFormat(sqlContent)) {
|
||||
return DatabaseType.SQL_SERVER;
|
||||
}
|
||||
|
||||
// Check for MySQL dump format
|
||||
if (isMySQLFormat(sqlContent)) {
|
||||
return DatabaseType.MYSQL;
|
||||
}
|
||||
|
||||
// Check for SQLite format
|
||||
if (isSQLiteFormat(sqlContent)) {
|
||||
return DatabaseType.SQLITE;
|
||||
}
|
||||
|
||||
// Look for database-specific keywords
|
||||
if (
|
||||
sqlContent.includes('SERIAL PRIMARY KEY') ||
|
||||
sqlContent.includes('CREATE EXTENSION') ||
|
||||
sqlContent.includes('WITH (OIDS') ||
|
||||
sqlContent.includes('RETURNS SETOF')
|
||||
) {
|
||||
return DatabaseType.POSTGRESQL;
|
||||
}
|
||||
|
||||
if (
|
||||
sqlContent.includes('AUTO_INCREMENT') ||
|
||||
sqlContent.includes('ENGINE=InnoDB') ||
|
||||
sqlContent.includes('DEFINER=')
|
||||
) {
|
||||
return DatabaseType.MYSQL;
|
||||
}
|
||||
|
||||
// Could not determine the database type
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse SQL statements and convert to a Diagram object
|
||||
* @param sqlContent SQL content as string
|
||||
* @param sourceDatabaseType Source database type
|
||||
* @param targetDatabaseType Target database type for the diagram
|
||||
* @returns Diagram object
|
||||
*/
|
||||
export async function sqlImportToDiagram({
|
||||
sqlContent,
|
||||
sourceDatabaseType,
|
||||
targetDatabaseType = DatabaseType.GENERIC,
|
||||
}: {
|
||||
sqlContent: string;
|
||||
sourceDatabaseType: DatabaseType;
|
||||
targetDatabaseType: DatabaseType;
|
||||
}): Promise<Diagram> {
|
||||
// If source database type is GENERIC, try to auto-detect the type
|
||||
if (sourceDatabaseType === DatabaseType.GENERIC) {
|
||||
const detectedType = detectDatabaseType(sqlContent);
|
||||
if (detectedType) {
|
||||
sourceDatabaseType = detectedType;
|
||||
} else {
|
||||
sourceDatabaseType = DatabaseType.POSTGRESQL;
|
||||
}
|
||||
}
|
||||
|
||||
let parserResult: SQLParserResult;
|
||||
|
||||
// Select the appropriate parser based on database type
|
||||
switch (sourceDatabaseType) {
|
||||
case DatabaseType.POSTGRESQL:
|
||||
// Check if the SQL is from pg_dump and use the appropriate parser
|
||||
if (isPgDumpFormat(sqlContent)) {
|
||||
parserResult = await fromPostgresDump(sqlContent);
|
||||
} else {
|
||||
parserResult = await fromPostgres(sqlContent);
|
||||
}
|
||||
break;
|
||||
case DatabaseType.MYSQL:
|
||||
// Check if the SQL is from MySQL dump and use the appropriate parser
|
||||
parserResult = await fromMySQL(sqlContent);
|
||||
|
||||
break;
|
||||
case DatabaseType.SQL_SERVER:
|
||||
parserResult = await fromSQLServer(sqlContent);
|
||||
break;
|
||||
case DatabaseType.SQLITE:
|
||||
parserResult = await fromSQLite(sqlContent);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported database type: ${sourceDatabaseType}`);
|
||||
}
|
||||
|
||||
// Convert the parsed SQL to a diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
sourceDatabaseType,
|
||||
targetDatabaseType
|
||||
);
|
||||
|
||||
const adjustedTables = adjustTablePositions({
|
||||
tables: diagram.tables ?? [],
|
||||
relationships: diagram.relationships ?? [],
|
||||
mode: 'perSchema',
|
||||
});
|
||||
|
||||
const sortedTables = adjustedTables.sort((a, b) => {
|
||||
if (a.isView === b.isView) {
|
||||
// Both are either tables or views, so sort alphabetically by name
|
||||
return a.name.localeCompare(b.name);
|
||||
}
|
||||
// If one is a view and the other is not, put tables first
|
||||
return a.isView ? 1 : -1;
|
||||
});
|
||||
|
||||
return {
|
||||
...diagram,
|
||||
tables: sortedTables,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse SQL and identify any errors
|
||||
* @param sqlContent SQL content as string
|
||||
* @param sourceDatabaseType Source database type
|
||||
* @returns Object with success status and error information
|
||||
*/
|
||||
export async function parseSQLError({
|
||||
sqlContent,
|
||||
sourceDatabaseType,
|
||||
}: {
|
||||
sqlContent: string;
|
||||
sourceDatabaseType: DatabaseType;
|
||||
}): Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
line?: number;
|
||||
column?: number;
|
||||
}> {
|
||||
try {
|
||||
// Validate SQL based on the database type
|
||||
switch (sourceDatabaseType) {
|
||||
case DatabaseType.POSTGRESQL:
|
||||
// PostgreSQL validation - check format and use appropriate parser
|
||||
if (isPgDumpFormat(sqlContent)) {
|
||||
await fromPostgresDump(sqlContent);
|
||||
} else {
|
||||
await fromPostgres(sqlContent);
|
||||
}
|
||||
break;
|
||||
case DatabaseType.MYSQL:
|
||||
await fromMySQL(sqlContent);
|
||||
|
||||
break;
|
||||
case DatabaseType.SQL_SERVER:
|
||||
// SQL Server validation
|
||||
await fromSQLServer(sqlContent);
|
||||
break;
|
||||
case DatabaseType.SQLITE:
|
||||
// SQLite validation
|
||||
await fromSQLite(sqlContent);
|
||||
break;
|
||||
// Add more database types here
|
||||
default:
|
||||
throw new Error(
|
||||
`Unsupported database type: ${sourceDatabaseType}`
|
||||
);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
} catch (error: unknown) {
|
||||
// Extract line and column information from the error message
|
||||
let line: number | undefined;
|
||||
let column: number | undefined;
|
||||
let errorMessage: string;
|
||||
|
||||
// Type guard to check if error is an object with a message property
|
||||
if (error instanceof Error) {
|
||||
errorMessage = error.message;
|
||||
|
||||
// Parse error location if available
|
||||
const lineMatch = error.message.match(/line\s*(\d+)/i);
|
||||
if (lineMatch && lineMatch[1]) {
|
||||
line = parseInt(lineMatch[1], 10);
|
||||
}
|
||||
|
||||
const columnMatch = error.message.match(/column\s*(\d+)/i);
|
||||
if (columnMatch && columnMatch[1]) {
|
||||
column = parseInt(columnMatch[1], 10);
|
||||
}
|
||||
|
||||
// Clean up error message if needed
|
||||
if (error.message.includes('Error parsing')) {
|
||||
// Extract everything after the colon using regex
|
||||
const match = error.message.match(/Error parsing[^:]*:(.*)/);
|
||||
if (match && match[1]) {
|
||||
errorMessage = match[1].trim();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Fallback for non-Error objects
|
||||
errorMessage = String(error);
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
line,
|
||||
column,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,20 @@
|
||||
import { DatabaseType } from './database-type';
|
||||
import { DatabaseEdition } from './database-edition';
|
||||
|
||||
export enum DatabaseClient {
|
||||
// PostgreSQL
|
||||
POSTGRESQL_PSQL = 'psql',
|
||||
|
||||
// SQLite
|
||||
SQLITE_WRANGLER = 'wrangler',
|
||||
}
|
||||
|
||||
export const databaseClientToLabelMap: Record<DatabaseClient, string> = {
|
||||
// PostgreSQL
|
||||
[DatabaseClient.POSTGRESQL_PSQL]: 'PSQL',
|
||||
|
||||
// SQLite
|
||||
[DatabaseClient.SQLITE_WRANGLER]: 'Wrangler',
|
||||
};
|
||||
|
||||
export const databaseTypeToClientsMap: Record<DatabaseType, DatabaseClient[]> =
|
||||
@@ -21,3 +28,21 @@ export const databaseTypeToClientsMap: Record<DatabaseType, DatabaseClient[]> =
|
||||
[DatabaseType.CLICKHOUSE]: [],
|
||||
[DatabaseType.COCKROACHDB]: [],
|
||||
};
|
||||
|
||||
export const databaseEditionToClientsMap: Record<
|
||||
DatabaseEdition,
|
||||
DatabaseClient[]
|
||||
> = {
|
||||
// PostgreSQL
|
||||
[DatabaseEdition.POSTGRESQL_SUPABASE]: [],
|
||||
[DatabaseEdition.POSTGRESQL_TIMESCALE]: [],
|
||||
|
||||
// MySQL
|
||||
[DatabaseEdition.MYSQL_5_7]: [],
|
||||
|
||||
// SQL Server
|
||||
[DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: [],
|
||||
|
||||
// SQLite
|
||||
[DatabaseEdition.SQLITE_CLOUDFLARE_D1]: [DatabaseClient.SQLITE_WRANGLER],
|
||||
};
|
||||
|
||||
@@ -3,6 +3,7 @@ import SupabaseImage from '@/assets/supabase.png';
|
||||
import TimescaleImage from '@/assets/timescale.png';
|
||||
import MySql5_7Image from '@/assets/mysql_5_7.png';
|
||||
import SqlServerImage from '@/assets/sql_server_logo_2.png';
|
||||
import CloudflareD1Image from '@/assets/cloudflare_d1.png';
|
||||
|
||||
export enum DatabaseEdition {
|
||||
// PostgreSQL
|
||||
@@ -14,6 +15,9 @@ export enum DatabaseEdition {
|
||||
|
||||
// SQL Server
|
||||
SQL_SERVER_2016_AND_BELOW = 'sql_server_2016_and_below',
|
||||
|
||||
// SQLite
|
||||
SQLITE_CLOUDFLARE_D1 = 'cloudflare_d1',
|
||||
}
|
||||
|
||||
export const databaseEditionToLabelMap: Record<DatabaseEdition, string> = {
|
||||
@@ -26,6 +30,9 @@ export const databaseEditionToLabelMap: Record<DatabaseEdition, string> = {
|
||||
|
||||
// SQL Server
|
||||
[DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: '2016 and below',
|
||||
|
||||
// SQLite
|
||||
[DatabaseEdition.SQLITE_CLOUDFLARE_D1]: 'Cloudflare D1',
|
||||
};
|
||||
|
||||
export const databaseEditionToImageMap: Record<DatabaseEdition, string> = {
|
||||
@@ -38,6 +45,9 @@ export const databaseEditionToImageMap: Record<DatabaseEdition, string> = {
|
||||
|
||||
// SQL Server
|
||||
[DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: SqlServerImage,
|
||||
|
||||
// SQLite
|
||||
[DatabaseEdition.SQLITE_CLOUDFLARE_D1]: CloudflareD1Image,
|
||||
};
|
||||
|
||||
export const databaseTypeToEditionMap: Record<DatabaseType, DatabaseEdition[]> =
|
||||
@@ -48,7 +58,7 @@ export const databaseTypeToEditionMap: Record<DatabaseType, DatabaseEdition[]> =
|
||||
],
|
||||
[DatabaseType.MYSQL]: [DatabaseEdition.MYSQL_5_7],
|
||||
[DatabaseType.SQL_SERVER]: [DatabaseEdition.SQL_SERVER_2016_AND_BELOW],
|
||||
[DatabaseType.SQLITE]: [],
|
||||
[DatabaseType.SQLITE]: [DatabaseEdition.SQLITE_CLOUDFLARE_D1],
|
||||
[DatabaseType.GENERIC]: [],
|
||||
[DatabaseType.MARIADB]: [],
|
||||
[DatabaseType.CLICKHOUSE]: [],
|
||||
|
||||
@@ -92,10 +92,10 @@ export const createFieldsFromMetadata = ({
|
||||
idx.columns.length === 1 &&
|
||||
idx.columns[0].name === col.name
|
||||
),
|
||||
nullable: col.nullable,
|
||||
nullable: Boolean(col.nullable),
|
||||
...(col.character_maximum_length &&
|
||||
col.character_maximum_length !== 'null'
|
||||
? { character_maximum_length: col.character_maximum_length }
|
||||
? { characterMaximumLength: col.character_maximum_length }
|
||||
: {}),
|
||||
...(col.precision?.precision
|
||||
? { precision: col.precision.precision }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user