diff --git a/README.md b/README.md
index 36a4eddd..1b2621b8 100644
--- a/README.md
+++ b/README.md
@@ -49,13 +49,13 @@ Instantly visualize your database schema with a single **"Smart Query."** Custom
**What it does**:
-- **Instant Schema Import**
- Run a single query to instantly retrieve your database schema as JSON. This makes it incredibly fast to visualize your database schema, whether for documentation, team discussions, or simply understanding your data better.
+- **Instant Schema Import**
+ Run a single query to instantly retrieve your database schema as JSON. This makes it incredibly fast to visualize your database schema, whether for documentation, team discussions, or simply understanding your data better.
-- **AI-Powered Export for Easy Migration**
- Our AI-driven export feature allows you to generate the DDL script in the dialect of your choice. Whether you’re migrating from MySQL to PostgreSQL or from SQLite to MariaDB, ChartDB simplifies the process by providing the necessary scripts tailored to your target database.
-- **Interactive Editing**
- Fine-tune your database schema using our intuitive editor. Easily make adjustments or annotations to better visualize complex structures.
+- **AI-Powered Export for Easy Migration**
+ Our AI-driven export feature allows you to generate the DDL script in the dialect of your choice. Whether you're migrating from MySQL to PostgreSQL or from SQLite to MariaDB, ChartDB simplifies the process by providing the necessary scripts tailored to your target database.
+- **Interactive Editing**
+ Fine-tune your database schema using our intuitive editor. Easily make adjustments or annotations to better visualize complex structures.
### Status
@@ -63,13 +63,13 @@ ChartDB is currently in Public Beta. Star and watch this repository to get notif
### Supported Databases
-- ✅ PostgreSQL (
+
+
)
-- ✅ MySQL
-- ✅ SQL Server
-- ✅ MariaDB
-- ✅ SQLite
-- ✅ CockroachDB
-- ✅ ClickHouse
+- ✅ PostgreSQL (
+
+
)
+- ✅ MySQL
+- ✅ SQL Server
+- ✅ MariaDB
+- ✅ SQLite (
+
Cloudflare D1)
+- ✅ CockroachDB
+- ✅ ClickHouse
## Getting Started
@@ -91,17 +91,19 @@ npm run build
Or like this if you want to have AI capabilities:
-```
+```bash
npm install
VITE_OPENAI_API_KEY= npm run build
```
### Run the Docker Container
+
```bash
docker run -e OPENAI_API_KEY= -p 8080:80 ghcr.io/chartdb/chartdb:latest
```
#### Build and Run locally
+
```bash
docker build -t chartdb .
docker run -e OPENAI_API_KEY= -p 8080:80 chartdb
@@ -145,9 +147,9 @@ VITE_LLM_MODEL_NAME=Qwen/Qwen2.5-32B-Instruct-AWQ
## 💚 Community & Support
-- [Discord](https://discord.gg/QeFwyWSKwC) (For live discussion with the community and the ChartDB team)
-- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
-- [Twitter](https://x.com/chartdb_io) (Get news fast)
+- [Discord](https://discord.gg/QeFwyWSKwC) (For live discussion with the community and the ChartDB team)
+- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
+- [Twitter](https://x.com/chartdb_io) (Get news fast)
## Contributing
diff --git a/src/assets/cloudflare_d1.png b/src/assets/cloudflare_d1.png
new file mode 100644
index 00000000..8bd0c9b4
Binary files /dev/null and b/src/assets/cloudflare_d1.png differ
diff --git a/src/dialogs/common/import-database/import-database.tsx b/src/dialogs/common/import-database/import-database.tsx
index a15ab9e6..fdb0ee0e 100644
--- a/src/dialogs/common/import-database/import-database.tsx
+++ b/src/dialogs/common/import-database/import-database.tsx
@@ -1,4 +1,4 @@
-import React, { useCallback, useEffect, useState } from 'react';
+import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { Button } from '@/components/button/button';
import {
DialogClose,
@@ -31,6 +31,7 @@ import type { DatabaseClient } from '@/lib/domain/database-clients';
import {
databaseClientToLabelMap,
databaseTypeToClientsMap,
+ databaseEditionToClientsMap,
} from '@/lib/domain/database-clients';
import type { ImportMetadataScripts } from '@/lib/data/import-metadata/scripts/scripts';
import { ZoomableImage } from '@/components/zoomable-image/zoomable-image';
@@ -71,7 +72,15 @@ export const ImportDatabase: React.FC = ({
keepDialogAfterImport,
title,
}) => {
- const databaseClients = databaseTypeToClientsMap[databaseType];
+ const databaseClients = useMemo(
+ () => [
+ ...databaseTypeToClientsMap[databaseType],
+ ...(databaseEdition
+ ? databaseEditionToClientsMap[databaseEdition]
+ : []),
+ ],
+ [databaseType, databaseEdition]
+ );
const [errorMessage, setErrorMessage] = useState('');
const [databaseClient, setDatabaseClient] = useState<
DatabaseClient | undefined
@@ -258,7 +267,7 @@ export const ImportDatabase: React.FC = ({
/>
)}
- {databaseTypeToClientsMap[databaseType].length > 0 ? (
+ {databaseClients.length > 0 ? (
= ({
const [diagramNumber, setDiagramNumber] = useState(1);
const navigate = useNavigate();
+ useEffect(() => {
+ setDatabaseEdition(undefined);
+ }, [databaseType]);
+
useEffect(() => {
const fetchDiagrams = async () => {
const diagrams = await listDiagrams();
diff --git a/src/dialogs/import-database-dialog/import-database-dialog.tsx b/src/dialogs/import-database-dialog/import-database-dialog.tsx
index a2b1f4c2..d0ecbf3c 100644
--- a/src/dialogs/import-database-dialog/import-database-dialog.tsx
+++ b/src/dialogs/import-database-dialog/import-database-dialog.tsx
@@ -43,6 +43,10 @@ export const ImportDatabaseDialog: React.FC = ({
DatabaseEdition | undefined
>();
+ useEffect(() => {
+ setDatabaseEdition(undefined);
+ }, [databaseType]);
+
useEffect(() => {
if (!dialog.open) return;
setDatabaseEdition(undefined);
diff --git a/src/lib/data/import-metadata/scripts/scripts.ts b/src/lib/data/import-metadata/scripts/scripts.ts
index 1ad8af9c..cfc2c4c2 100644
--- a/src/lib/data/import-metadata/scripts/scripts.ts
+++ b/src/lib/data/import-metadata/scripts/scripts.ts
@@ -1,7 +1,7 @@
import { DatabaseType } from '@/lib/domain/database-type';
import { getPostgresQuery } from './postgres-script';
import { getMySQLQuery } from './mysql-script';
-import { sqliteQuery } from './sqlite-script';
+import { getSQLiteQuery } from './sqlite-script';
import { getSqlServerQuery } from './sqlserver-script';
import { mariaDBQuery } from './maria-script';
import type { DatabaseEdition } from '@/lib/domain/database-edition';
@@ -21,7 +21,7 @@ export const importMetadataScripts: ImportMetadataScripts = {
[DatabaseType.GENERIC]: () => '',
[DatabaseType.POSTGRESQL]: getPostgresQuery,
[DatabaseType.MYSQL]: getMySQLQuery,
- [DatabaseType.SQLITE]: () => sqliteQuery,
+ [DatabaseType.SQLITE]: getSQLiteQuery,
[DatabaseType.SQL_SERVER]: getSqlServerQuery,
[DatabaseType.MARIADB]: () => mariaDBQuery,
[DatabaseType.CLICKHOUSE]: () => clickhouseQuery,
diff --git a/src/lib/data/import-metadata/scripts/sqlite-script.ts b/src/lib/data/import-metadata/scripts/sqlite-script.ts
index 23081727..dcba3d55 100644
--- a/src/lib/data/import-metadata/scripts/sqlite-script.ts
+++ b/src/lib/data/import-metadata/scripts/sqlite-script.ts
@@ -1,4 +1,8 @@
-export const sqliteQuery = `WITH fk_info AS (
+import { DatabaseEdition } from '@/lib/domain/database-edition';
+import { DatabaseClient } from '@/lib/domain/database-clients';
+
+const sqliteQuery = `${`/* Standard SQLite */`}
+WITH fk_info AS (
SELECT
json_group_array(
json_object(
@@ -163,3 +167,225 @@ replace(replace(replace(
'\\"', '"'),'"[', '['), ']"', ']'
) AS metadata_json_to_import;
`;
+
+const cloudflareD1Query = `${`/* Cloudflare D1 SQLite */`}
+WITH fk_info AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'table', m.name,
+ 'column', fk.[from],
+ 'foreign_key_name',
+ 'fk_' || m.name || '_' || fk.[from] || '_' || fk.[table] || '_' || fk.[to],
+ 'reference_schema', '',
+ 'reference_table', fk.[table],
+ 'reference_column', fk.[to],
+ 'fk_def',
+ 'FOREIGN KEY (' || fk.[from] || ') REFERENCES ' || fk.[table] || '(' || fk.[to] || ')' ||
+ ' ON UPDATE ' || fk.on_update || ' ON DELETE ' || fk.on_delete
+ )
+ ) AS fk_metadata
+ FROM
+ sqlite_master m
+ JOIN
+ pragma_foreign_key_list(m.name) fk
+ ON
+ m.type = 'table'
+ WHERE
+ m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+), pk_info AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'table', pk.table_name,
+ 'field_count', pk.field_count,
+ 'column', pk.pk_column,
+ 'pk_def', 'PRIMARY KEY (' || pk.pk_column || ')'
+ )
+ ) AS pk_metadata
+ FROM
+ (
+ SELECT
+ m.name AS table_name,
+ COUNT(p.name) AS field_count,
+ GROUP_CONCAT(p.name) AS pk_column
+ FROM
+ sqlite_master m
+ JOIN
+ pragma_table_info(m.name) p
+ ON
+ m.type = 'table' AND p.pk > 0
+ WHERE
+ m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+ GROUP BY
+ m.name
+ ) pk
+), indexes_metadata AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'table', m.name,
+ 'name', idx.name,
+ 'column', ic.name,
+ 'index_type', 'B-TREE',
+ 'cardinality', '',
+ 'size', '',
+ 'unique', (CASE WHEN idx.[unique] = 1 THEN 'true' ELSE 'false' END),
+ 'direction', '',
+ 'column_position', ic.seqno + 1
+ )
+ ) AS indexes_metadata
+ FROM
+ sqlite_master m
+ JOIN
+ pragma_index_list(m.name) idx
+ ON
+ m.type = 'table'
+ JOIN
+ pragma_index_info(idx.name) ic
+ WHERE
+ m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+), cols AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'table', m.name,
+ 'name', p.name,
+ 'type',
+ CASE
+ WHEN INSTR(LOWER(p.type), '(') > 0 THEN
+ SUBSTR(LOWER(p.type), 1, INSTR(LOWER(p.type), '(') - 1)
+ ELSE LOWER(p.type)
+ END,
+ 'ordinal_position', p.cid,
+ 'nullable', (CASE WHEN p.[notnull] = 0 THEN true ELSE false END),
+ 'collation', '',
+ 'character_maximum_length',
+ CASE
+ WHEN LOWER(p.type) LIKE 'char%' OR LOWER(p.type) LIKE 'varchar%' THEN
+ CASE
+ WHEN INSTR(p.type, '(') > 0 THEN
+ REPLACE(SUBSTR(p.type, INSTR(p.type, '(') + 1, LENGTH(p.type) - INSTR(p.type, '(') - 1), ')', '')
+ ELSE 'null'
+ END
+ ELSE 'null'
+ END,
+ 'precision',
+ CASE
+ WHEN LOWER(p.type) LIKE 'decimal%' OR LOWER(p.type) LIKE 'numeric%' THEN
+ CASE
+ WHEN instr(p.type, '(') > 0 THEN
+ json_object(
+ 'precision', substr(p.type, instr(p.type, '(') + 1, instr(p.type, ',') - instr(p.type, '(') - 1),
+ 'scale', substr(p.type, instr(p.type, ',') + 1, instr(p.type, ')') - instr(p.type, ',') - 1)
+ )
+ ELSE 'null'
+ END
+ ELSE 'null'
+ END,
+ 'default', COALESCE(REPLACE(p.dflt_value, '"', '\\"'), '')
+ )
+ ) AS cols_metadata
+ FROM
+ sqlite_master m
+ JOIN
+ pragma_table_info(m.name) p
+ ON
+ m.type in ('table', 'view')
+ WHERE
+ m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+), tbls AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'table', m.name,
+ 'rows', -1,
+ 'type', 'table',
+ 'engine', '',
+ 'collation', ''
+ )
+ ) AS tbls_metadata
+ FROM
+ sqlite_master m
+ WHERE
+ m.type in ('table', 'view') AND m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+), views AS (
+ SELECT
+ json_group_array(
+ json_object(
+ 'schema', '',
+ 'view_name', m.name
+ )
+ ) AS views_metadata
+ FROM
+ sqlite_master m
+ WHERE
+ m.type = 'view' AND m.name NOT LIKE '\\_cf\\_%' ESCAPE '\\'
+)
+SELECT
+replace(replace(replace(
+ json_object(
+ 'fk_info', (SELECT fk_metadata FROM fk_info),
+ 'pk_info', (SELECT pk_metadata FROM pk_info),
+ 'columns', (SELECT cols_metadata FROM cols),
+ 'indexes', (SELECT indexes_metadata FROM indexes_metadata),
+ 'tables', (SELECT tbls_metadata FROM tbls),
+ 'views', (SELECT views_metadata FROM views),
+ 'database_name', 'sqlite',
+ 'version', ''
+ ),
+ '\\"', '"'),'"[', '['), ']"', ']'
+) AS metadata_json_to_import;
+`;
+
+// Generate Wrangler CLI command wrapper around the D1 query
+const generateWranglerCommand = (): string => {
+ return `# Cloudflare D1 (via Wrangler CLI) Import Script
+# ------------------------------------------------------
+# This query will extract your D1 database schema using Cloudflare's Wrangler CLI
+#
+# Prerequisites:
+# 1. Install Wrangler CLI if you haven't already: npm install -g wrangler
+# 2. Login to your Cloudflare account: wrangler login
+# 3. Make sure that your wrangler.jsonc or wrangler.toml file has the following:
+# [d1_databases]
+# [d1_databases.DB]
+# database_name = "YOUR_DB_NAME"
+# database_id = "YOUR_DB_ID"
+# 4. Replace YOUR_DB_NAME with your actual D1 database name
+# 5. Replace YOUR_DB_ID with your actual D1 database ID
+
+# Step 1: Write the query to a file
+wrangler d1 execute YOUR_DB_NAME --command $'WITH fk_info AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'column\\', fk.[from], \\'foreign_key_name\\', \\'fk_\\' || m.name || \\'_\\' || fk.[from] || \\'_\\' || fk.[table] || \\'_\\' || fk.[to], \\'reference_schema\\', \\'\\', \\'reference_table\\', fk.[table], \\'reference_column\\', fk.[to], \\'fk_def\\', \\'FOREIGN KEY (\\' || fk.[from] || \\') REFERENCES \\' || fk.[table] || \\'(\\' || fk.[to] || \\')\\' || \\' ON UPDATE \\' || fk.on_update || \\' ON DELETE \\' || fk.on_delete ) ) AS fk_metadata FROM sqlite_master m JOIN pragma_foreign_key_list(m.name) fk ON m.type = \\'table\\' WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), pk_info AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', pk.table_name, \\'field_count\\', pk.field_count, \\'column\\', pk.pk_column, \\'pk_def\\', \\'PRIMARY KEY (\\' || pk.pk_column || \\')\\' ) ) AS pk_metadata FROM ( SELECT m.name AS table_name, COUNT(p.name) AS field_count, GROUP_CONCAT(p.name) AS pk_column FROM sqlite_master m JOIN pragma_table_info(m.name) p ON m.type = \\'table\\' AND p.pk > 0 WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' GROUP BY m.name ) pk ), indexes_metadata AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'name\\', idx.name, \\'column\\', ic.name, \\'index_type\\', \\'B-TREE\\', \\'cardinality\\', \\'\\', \\'size\\', \\'\\', \\'unique\\', CASE WHEN idx.[unique] = 1 THEN \\'true\\' ELSE \\'false\\' END, \\'direction\\', \\'\\', \\'column_position\\', ic.seqno + 1 ) ) AS indexes_metadata FROM sqlite_master m JOIN pragma_index_list(m.name) idx ON m.type = \\'table\\' JOIN pragma_index_info(idx.name) ic WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), cols AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'name\\', p.name, \\'type\\', CASE WHEN INSTR(LOWER(p.type), \\'(\\') > 0 THEN SUBSTR(LOWER(p.type), 1, INSTR(LOWER(p.type), \\'(\\') - 1) ELSE LOWER(p.type) END, \\'ordinal_position\\', p.cid, \\'nullable\\', CASE WHEN p.[notnull] = 0 THEN true ELSE false END, \\'collation\\', \\'\\', \\'character_maximum_length\\', CASE WHEN LOWER(p.type) LIKE \\'char%\\' OR LOWER(p.type) LIKE \\'varchar%\\' THEN CASE WHEN INSTR(p.type, \\'(\\') > 0 THEN REPLACE( SUBSTR(p.type, INSTR(p.type, \\'(\\') + 1, LENGTH(p.type) - INSTR(p.type, \\'(\\') - 1), \\')\\', \\'\\' ) ELSE \\'null\\' END ELSE \\'null\\' END, \\'precision\\', CASE WHEN LOWER(p.type) LIKE \\'decimal%\\' OR LOWER(p.type) LIKE \\'numeric%\\' THEN CASE WHEN instr(p.type, \\'(\\') > 0 THEN json_object( \\'precision\\', substr(p.type, instr(p.type, \\'(\\') + 1, instr(p.type, \\',\\') - instr(p.type, \\'(\\') - 1), \\'scale\\', substr(p.type, instr(p.type, \\',\\') + 1, instr(p.type, \\')\\') - instr(p.type, \\',\\') - 1) ) ELSE \\'null\\' END ELSE \\'null\\' END, \\'default\\', COALESCE(REPLACE(p.dflt_value, \\'"\\', \\'\\\\\\"\\'), \\'\\') ) ) AS cols_metadata FROM sqlite_master m JOIN pragma_table_info(m.name) p ON m.type in (\\'table\\', \\'view\\') WHERE m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), tbls AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'table\\', m.name, \\'rows\\', -1, \\'type\\', \\'table\\', \\'engine\\', \\'\\', \\'collation\\', \\'\\' ) ) AS tbls_metadata FROM sqlite_master m WHERE m.type in (\\'table\\', \\'view\\') AND m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ), views AS ( SELECT json_group_array( json_object( \\'schema\\', \\'\\', \\'view_name\\', m.name ) ) AS views_metadata FROM sqlite_master m WHERE m.type = \\'view\\' AND m.name NOT LIKE \\'\\\\_cf\\\\_%\\' ESCAPE \\'\\\\\\' ) SELECT json_object( \\'fk_info\\', json((SELECT fk_metadata FROM fk_info)), \\'pk_info\\', json((SELECT pk_metadata FROM pk_info)), \\'columns\\', json((SELECT cols_metadata FROM cols)), \\'indexes\\', json((SELECT indexes_metadata FROM indexes_metadata)), \\'tables\\', json((SELECT tbls_metadata FROM tbls)), \\'views\\', json((SELECT views_metadata FROM views)), \\'database_name\\', \\'sqlite\\', \\'version\\', \\'\\' ) AS metadata_json_to_import;' --remote
+
+# Step 2: Copy the output of the command above and paste it into app.chartdb.io
+`;
+};
+
+export const getSQLiteQuery = (
+ options: {
+ databaseEdition?: DatabaseEdition;
+ databaseClient?: DatabaseClient;
+ } = {}
+): string => {
+ // For Cloudflare D1 edition, return the D1 script
+ if (options.databaseEdition === DatabaseEdition.SQLITE_CLOUDFLARE_D1) {
+ // Generate the Wrangler CLI command based on client
+ const isWranglerClient =
+ options?.databaseClient === DatabaseClient.SQLITE_WRANGLER;
+
+ if (isWranglerClient) {
+ return generateWranglerCommand();
+ }
+
+ return cloudflareD1Query;
+ }
+
+ // Default SQLite script
+ return sqliteQuery;
+};
diff --git a/src/lib/domain/database-clients.ts b/src/lib/domain/database-clients.ts
index 22e857d1..848f8efa 100644
--- a/src/lib/domain/database-clients.ts
+++ b/src/lib/domain/database-clients.ts
@@ -1,13 +1,20 @@
import { DatabaseType } from './database-type';
+import { DatabaseEdition } from './database-edition';
export enum DatabaseClient {
// PostgreSQL
POSTGRESQL_PSQL = 'psql',
+
+ // SQLite
+ SQLITE_WRANGLER = 'wrangler',
}
export const databaseClientToLabelMap: Record = {
// PostgreSQL
[DatabaseClient.POSTGRESQL_PSQL]: 'PSQL',
+
+ // SQLite
+ [DatabaseClient.SQLITE_WRANGLER]: 'Wrangler',
};
export const databaseTypeToClientsMap: Record =
@@ -21,3 +28,21 @@ export const databaseTypeToClientsMap: Record =
[DatabaseType.CLICKHOUSE]: [],
[DatabaseType.COCKROACHDB]: [],
};
+
+export const databaseEditionToClientsMap: Record<
+ DatabaseEdition,
+ DatabaseClient[]
+> = {
+ // PostgreSQL
+ [DatabaseEdition.POSTGRESQL_SUPABASE]: [],
+ [DatabaseEdition.POSTGRESQL_TIMESCALE]: [],
+
+ // MySQL
+ [DatabaseEdition.MYSQL_5_7]: [],
+
+ // SQL Server
+ [DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: [],
+
+ // SQLite
+ [DatabaseEdition.SQLITE_CLOUDFLARE_D1]: [DatabaseClient.SQLITE_WRANGLER],
+};
diff --git a/src/lib/domain/database-edition.ts b/src/lib/domain/database-edition.ts
index 6e338cd4..0bcb69d7 100644
--- a/src/lib/domain/database-edition.ts
+++ b/src/lib/domain/database-edition.ts
@@ -3,6 +3,7 @@ import SupabaseImage from '@/assets/supabase.png';
import TimescaleImage from '@/assets/timescale.png';
import MySql5_7Image from '@/assets/mysql_5_7.png';
import SqlServerImage from '@/assets/sql_server_logo_2.png';
+import CloudflareD1Image from '@/assets/cloudflare_d1.png';
export enum DatabaseEdition {
// PostgreSQL
@@ -14,6 +15,9 @@ export enum DatabaseEdition {
// SQL Server
SQL_SERVER_2016_AND_BELOW = 'sql_server_2016_and_below',
+
+ // SQLite
+ SQLITE_CLOUDFLARE_D1 = 'cloudflare_d1',
}
export const databaseEditionToLabelMap: Record = {
@@ -26,6 +30,9 @@ export const databaseEditionToLabelMap: Record = {
// SQL Server
[DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: '2016 and below',
+
+ // SQLite
+ [DatabaseEdition.SQLITE_CLOUDFLARE_D1]: 'Cloudflare D1',
};
export const databaseEditionToImageMap: Record = {
@@ -38,6 +45,9 @@ export const databaseEditionToImageMap: Record = {
// SQL Server
[DatabaseEdition.SQL_SERVER_2016_AND_BELOW]: SqlServerImage,
+
+ // SQLite
+ [DatabaseEdition.SQLITE_CLOUDFLARE_D1]: CloudflareD1Image,
};
export const databaseTypeToEditionMap: Record =
@@ -48,7 +58,7 @@ export const databaseTypeToEditionMap: Record =
],
[DatabaseType.MYSQL]: [DatabaseEdition.MYSQL_5_7],
[DatabaseType.SQL_SERVER]: [DatabaseEdition.SQL_SERVER_2016_AND_BELOW],
- [DatabaseType.SQLITE]: [],
+ [DatabaseType.SQLITE]: [DatabaseEdition.SQLITE_CLOUDFLARE_D1],
[DatabaseType.GENERIC]: [],
[DatabaseType.MARIADB]: [],
[DatabaseType.CLICKHOUSE]: [],