fix(sql-server): improvment for sql-server import via sql script (#789)

* feat: improvment for sql-server import via sql script

* fix for test

* some fixes

* some fixes

---------

Co-authored-by: Guy Ben-Aharon <baguy3@gmail.com>
This commit is contained in:
Jonathan Fishner
2025-07-25 19:16:35 +03:00
committed by GitHub
parent 745bdee86d
commit 79b885502e
12 changed files with 2678 additions and 442 deletions

View File

@@ -22,8 +22,10 @@ export interface FieldAttributeRange {
interface FieldAttributes {
hasCharMaxLength?: boolean;
hasCharMaxLengthOption?: boolean;
precision?: FieldAttributeRange;
scale?: FieldAttributeRange;
maxLength?: number;
}
export interface DataTypeData extends DataType {

View File

@@ -7,13 +7,21 @@ export const sqlServerDataTypes: readonly DataTypeData[] = [
{
name: 'varchar',
id: 'varchar',
fieldAttributes: { hasCharMaxLength: true },
fieldAttributes: {
hasCharMaxLength: true,
hasCharMaxLengthOption: true,
maxLength: 8000,
},
usageLevel: 1,
},
{
name: 'nvarchar',
id: 'nvarchar',
fieldAttributes: { hasCharMaxLength: true },
fieldAttributes: {
hasCharMaxLength: true,
hasCharMaxLengthOption: true,
maxLength: 4000,
},
usageLevel: 1,
},
{ name: 'text', id: 'text', usageLevel: 1 },
@@ -77,7 +85,11 @@ export const sqlServerDataTypes: readonly DataTypeData[] = [
{
name: 'varbinary',
id: 'varbinary',
fieldAttributes: { hasCharMaxLength: true },
fieldAttributes: {
hasCharMaxLength: true,
hasCharMaxLengthOption: true,
maxLength: 8000,
},
},
{ name: 'image', id: 'image' },
{ name: 'datetimeoffset', id: 'datetimeoffset' },

View File

@@ -18,11 +18,14 @@ export interface SQLColumn {
nullable: boolean;
primaryKey: boolean;
unique: boolean;
typeArgs?: {
length?: number;
precision?: number;
scale?: number;
};
typeArgs?:
| {
length?: number;
precision?: number;
scale?: number;
}
| number[]
| string;
comment?: string;
default?: string;
increment?: boolean;
@@ -559,6 +562,38 @@ export function convertToChartDBDiagram(
id: column.type.toLowerCase(),
name: column.type,
};
}
// Handle SQL Server types specifically
else if (
sourceDatabaseType === DatabaseType.SQL_SERVER &&
targetDatabaseType === DatabaseType.SQL_SERVER
) {
const normalizedType = column.type.toLowerCase();
// Preserve SQL Server specific types when target is also SQL Server
if (
normalizedType === 'nvarchar' ||
normalizedType === 'nchar' ||
normalizedType === 'ntext' ||
normalizedType === 'uniqueidentifier' ||
normalizedType === 'datetime2' ||
normalizedType === 'datetimeoffset' ||
normalizedType === 'money' ||
normalizedType === 'smallmoney' ||
normalizedType === 'bit' ||
normalizedType === 'xml' ||
normalizedType === 'hierarchyid' ||
normalizedType === 'geography' ||
normalizedType === 'geometry'
) {
mappedType = { id: normalizedType, name: normalizedType };
} else {
// Use the standard mapping for other types
mappedType = mapSQLTypeToGenericType(
column.type,
sourceDatabaseType
);
}
} else {
// Use the standard mapping for other types
mappedType = mapSQLTypeToGenericType(
@@ -581,22 +616,68 @@ export function convertToChartDBDiagram(
// Add type arguments if present
if (column.typeArgs) {
// Transfer length for varchar/char types
if (
column.typeArgs.length !== undefined &&
(field.type.id === 'varchar' || field.type.id === 'char')
) {
field.characterMaximumLength =
column.typeArgs.length.toString();
// Handle string typeArgs (e.g., 'max' for varchar(max))
if (typeof column.typeArgs === 'string') {
if (
(field.type.id === 'varchar' ||
field.type.id === 'nvarchar') &&
column.typeArgs === 'max'
) {
field.characterMaximumLength = 'max';
}
}
// Transfer precision/scale for numeric types
if (
column.typeArgs.precision !== undefined &&
(field.type.id === 'numeric' || field.type.id === 'decimal')
// Handle array typeArgs (SQL Server format)
else if (
Array.isArray(column.typeArgs) &&
column.typeArgs.length > 0
) {
field.precision = column.typeArgs.precision;
field.scale = column.typeArgs.scale;
if (
field.type.id === 'varchar' ||
field.type.id === 'nvarchar' ||
field.type.id === 'char' ||
field.type.id === 'nchar'
) {
field.characterMaximumLength =
column.typeArgs[0].toString();
} else if (
(field.type.id === 'numeric' ||
field.type.id === 'decimal') &&
column.typeArgs.length >= 2
) {
field.precision = column.typeArgs[0];
field.scale = column.typeArgs[1];
}
}
// Handle object typeArgs (standard format)
else if (
typeof column.typeArgs === 'object' &&
!Array.isArray(column.typeArgs)
) {
const typeArgsObj = column.typeArgs as {
length?: number;
precision?: number;
scale?: number;
};
// Transfer length for varchar/char types
if (
typeArgsObj.length !== undefined &&
(field.type.id === 'varchar' ||
field.type.id === 'char')
) {
field.characterMaximumLength =
typeArgsObj.length.toString();
}
// Transfer precision/scale for numeric types
if (
typeArgsObj.precision !== undefined &&
(field.type.id === 'numeric' ||
field.type.id === 'decimal')
) {
field.precision = typeArgsObj.precision;
field.scale = typeArgsObj.scale;
}
}
}

View File

@@ -0,0 +1,350 @@
import { describe, it, expect } from 'vitest';
import { fromSQLServer } from '../sqlserver';
describe('SQL Server Core Parser Tests', () => {
it('should parse basic tables', async () => {
const sql = `
CREATE TABLE wizards (
id INT PRIMARY KEY,
name NVARCHAR(255) NOT NULL
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
expect(result.tables[0].name).toBe('wizards');
expect(result.tables[0].columns).toHaveLength(2);
});
it('should parse tables with schemas', async () => {
const sql = `
CREATE TABLE [magic].[spells] (
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
name NVARCHAR(100) NOT NULL,
level INT NOT NULL
);
CREATE TABLE [dbo].[wizards] (
id INT IDENTITY(1,1) PRIMARY KEY,
name NVARCHAR(255) NOT NULL
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.tables.find((t) => t.name === 'spells')).toBeDefined();
expect(result.tables.find((t) => t.name === 'spells')?.schema).toBe(
'magic'
);
expect(result.tables.find((t) => t.name === 'wizards')?.schema).toBe(
'dbo'
);
});
it('should parse foreign key relationships', async () => {
const sql = `
CREATE TABLE guilds (id INT PRIMARY KEY);
CREATE TABLE mages (
id INT PRIMARY KEY,
guild_id INT FOREIGN KEY REFERENCES guilds(id)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.relationships).toHaveLength(1);
expect(result.relationships[0].sourceTable).toBe('mages');
expect(result.relationships[0].targetTable).toBe('guilds');
expect(result.relationships[0].sourceColumn).toBe('guild_id');
expect(result.relationships[0].targetColumn).toBe('id');
});
it('should parse foreign keys with schema references', async () => {
const sql = `
CREATE TABLE [magic].[schools] (
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
name NVARCHAR(100) NOT NULL
);
CREATE TABLE [magic].[towers] (
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
school_id UNIQUEIDENTIFIER NOT NULL,
name NVARCHAR(100) NOT NULL,
CONSTRAINT FK_towers_schools FOREIGN KEY (school_id) REFERENCES [magic].[schools](id)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.relationships).toHaveLength(1);
expect(result.relationships[0].sourceTable).toBe('towers');
expect(result.relationships[0].targetTable).toBe('schools');
expect(result.relationships[0].sourceSchema).toBe('magic');
expect(result.relationships[0].targetSchema).toBe('magic');
});
it('should handle GO statements and SQL Server specific syntax', async () => {
const sql = `
USE [MagicalRealm]
GO
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[enchantments] (
[Id] [uniqueidentifier] NOT NULL,
[Name] [nvarchar](max) NOT NULL,
[Power] [decimal](18, 2) NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
CONSTRAINT [PK_enchantments] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
expect(result.tables[0].name).toBe('enchantments');
expect(result.tables[0].columns).toHaveLength(4);
expect(
result.tables[0].columns.find((c) => c.name === 'Power')?.type
).toBe('decimal');
});
it('should parse ALTER TABLE ADD CONSTRAINT for foreign keys', async () => {
const sql = `
CREATE TABLE [calibration].[Calibration] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Average] [decimal](18, 2) NOT NULL
);
CREATE TABLE [calibration].[CalibrationProcess] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[CalibrationId] [uniqueidentifier] NOT NULL
);
ALTER TABLE [calibration].[CalibrationProcess]
ADD CONSTRAINT [FK_CalibrationProcess_Calibration]
FOREIGN KEY ([CalibrationId])
REFERENCES [calibration].[Calibration]([Id]);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.relationships).toHaveLength(1);
expect(result.relationships[0].sourceTable).toBe('CalibrationProcess');
expect(result.relationships[0].targetTable).toBe('Calibration');
expect(result.relationships[0].name).toBe(
'FK_CalibrationProcess_Calibration'
);
});
it('should handle multiple schemas from the test file', async () => {
const sql = `
CREATE SCHEMA [magic]
GO
CREATE SCHEMA [artifacts]
GO
CREATE TABLE [magic].[wizards] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Name] [nvarchar](255) NOT NULL
);
CREATE TABLE [artifacts].[wands] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[WizardId] [uniqueidentifier] NOT NULL,
[WoodType] [nvarchar](50) NOT NULL,
CONSTRAINT [FK_wands_wizards] FOREIGN KEY ([WizardId]) REFERENCES [magic].[wizards]([Id])
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.tables.find((t) => t.schema === 'magic')).toBeDefined();
expect(
result.tables.find((t) => t.schema === 'artifacts')
).toBeDefined();
expect(result.relationships).toHaveLength(1);
expect(result.relationships[0].sourceSchema).toBe('artifacts');
expect(result.relationships[0].targetSchema).toBe('magic');
});
it('should handle SQL Server data types correctly', async () => {
const sql = `
CREATE TABLE [magic].[spell_components] (
[Id] [uniqueidentifier] NOT NULL,
[Name] [nvarchar](255) NOT NULL,
[Quantity] [int] NOT NULL,
[Weight] [decimal](10, 2) NOT NULL,
[IsPowerful] [bit] NOT NULL,
[DiscoveredAt] [datetime2](7) NOT NULL,
[Description] [nvarchar](max) NULL,
[RarityLevel] [tinyint] NOT NULL,
[MarketValue] [money] NOT NULL,
[AlchemicalFormula] [xml] NULL,
PRIMARY KEY ([Id])
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const columns = result.tables[0].columns;
expect(columns.find((c) => c.name === 'Id')?.type).toBe(
'uniqueidentifier'
);
expect(columns.find((c) => c.name === 'Name')?.type).toBe('nvarchar');
expect(columns.find((c) => c.name === 'Quantity')?.type).toBe('int');
expect(columns.find((c) => c.name === 'Weight')?.type).toBe('decimal');
expect(columns.find((c) => c.name === 'IsPowerful')?.type).toBe('bit');
expect(columns.find((c) => c.name === 'DiscoveredAt')?.type).toBe(
'datetime2'
);
expect(columns.find((c) => c.name === 'Description')?.type).toBe(
'nvarchar'
);
expect(columns.find((c) => c.name === 'RarityLevel')?.type).toBe(
'tinyint'
);
expect(columns.find((c) => c.name === 'MarketValue')?.type).toBe(
'money'
);
expect(columns.find((c) => c.name === 'AlchemicalFormula')?.type).toBe(
'xml'
);
});
it('should handle IDENTITY columns', async () => {
const sql = `
CREATE TABLE [dbo].[magical_creatures] (
[Id] [int] IDENTITY(1,1) NOT NULL PRIMARY KEY,
[Name] [nvarchar](100) NOT NULL,
[PowerLevel] [int] NOT NULL
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const idColumn = result.tables[0].columns.find((c) => c.name === 'Id');
expect(idColumn?.increment).toBe(true);
});
it('should parse composite primary keys', async () => {
const sql = `
CREATE TABLE [magic].[spell_ingredients] (
[SpellId] [uniqueidentifier] NOT NULL,
[IngredientId] [uniqueidentifier] NOT NULL,
[Quantity] [int] NOT NULL,
CONSTRAINT [PK_spell_ingredients] PRIMARY KEY CLUSTERED
(
[SpellId] ASC,
[IngredientId] ASC
)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
expect(table.columns.filter((c) => c.primaryKey)).toHaveLength(2);
expect(
table.columns.find((c) => c.name === 'SpellId')?.primaryKey
).toBe(true);
expect(
table.columns.find((c) => c.name === 'IngredientId')?.primaryKey
).toBe(true);
});
it('should handle unique constraints', async () => {
const sql = `
CREATE TABLE [dbo].[arcane_libraries] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Code] [nvarchar](50) NOT NULL,
[Name] [nvarchar](255) NOT NULL,
CONSTRAINT [UQ_arcane_libraries_code] UNIQUE ([Code])
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
expect(result.tables[0].indexes).toHaveLength(1);
expect(result.tables[0].indexes[0].name).toBe(
'UQ_arcane_libraries_code'
);
expect(result.tables[0].indexes[0].unique).toBe(true);
expect(result.tables[0].indexes[0].columns).toContain('Code');
});
it('should handle default values', async () => {
const sql = `
CREATE TABLE [dbo].[potion_recipes] (
[Id] [uniqueidentifier] NOT NULL DEFAULT NEWID(),
[Name] [nvarchar](255) NOT NULL,
[IsActive] [bit] NOT NULL DEFAULT 1,
[CreatedAt] [datetime2](7) NOT NULL DEFAULT GETDATE(),
[Difficulty] [int] NOT NULL DEFAULT 5,
PRIMARY KEY ([Id])
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const columns = result.tables[0].columns;
expect(columns.find((c) => c.name === 'Id')?.default).toBeDefined();
expect(columns.find((c) => c.name === 'IsActive')?.default).toBe('1');
expect(
columns.find((c) => c.name === 'CreatedAt')?.default
).toBeDefined();
expect(columns.find((c) => c.name === 'Difficulty')?.default).toBe('5');
});
it('should parse indexes created separately', async () => {
const sql = `
CREATE TABLE [dbo].[spell_books] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Title] [nvarchar](255) NOT NULL,
[Author] [nvarchar](255) NOT NULL,
[PublishedYear] [int] NOT NULL
);
CREATE INDEX [IX_spell_books_author] ON [dbo].[spell_books] ([Author]);
CREATE UNIQUE INDEX [UIX_spell_books_title] ON [dbo].[spell_books] ([Title]);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
expect(result.tables[0].indexes).toHaveLength(2);
const authorIndex = result.tables[0].indexes.find(
(i) => i.name === 'IX_spell_books_author'
);
expect(authorIndex?.unique).toBe(false);
expect(authorIndex?.columns).toContain('Author');
const titleIndex = result.tables[0].indexes.find(
(i) => i.name === 'UIX_spell_books_title'
);
expect(titleIndex?.unique).toBe(true);
expect(titleIndex?.columns).toContain('Title');
});
});

View File

@@ -0,0 +1,478 @@
import { describe, it, expect } from 'vitest';
import { fromSQLServer } from '../sqlserver';
describe('SQL Server Real-World Examples', () => {
describe('Magical Academy Example', () => {
it('should parse the magical academy example with all 16 tables', async () => {
const sql = `
CREATE TABLE [dbo].[schools](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[name] [nvarchar](255) NOT NULL,
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE()
);
CREATE TABLE [dbo].[towers](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[name] [nvarchar](255) NOT NULL,
CONSTRAINT [FK_towers_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[ranks](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[name] [nvarchar](255) NOT NULL,
CONSTRAINT [FK_ranks_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[spell_permissions](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[spell_type] [nvarchar](255) NOT NULL,
[casting_level] [nvarchar](255) NOT NULL
);
CREATE TABLE [dbo].[rank_spell_permissions](
[rank_id] [uniqueidentifier] NOT NULL,
[spell_permission_id] [uniqueidentifier] NOT NULL,
PRIMARY KEY ([rank_id], [spell_permission_id]),
CONSTRAINT [FK_rsp_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_rsp_permissions] FOREIGN KEY ([spell_permission_id]) REFERENCES [dbo].[spell_permissions]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[grimoire_types](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[name] [nvarchar](255) NOT NULL,
CONSTRAINT [FK_grimoire_types_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[wizards](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[wizard_name] [nvarchar](255) NOT NULL,
[email] [nvarchar](255) NOT NULL,
CONSTRAINT [FK_wizards_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_wizards_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
CONSTRAINT [UQ_wizards_school_name] UNIQUE ([school_id], [wizard_name])
);
CREATE TABLE [dbo].[wizard_ranks](
[wizard_id] [uniqueidentifier] NOT NULL,
[rank_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[assigned_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
PRIMARY KEY ([wizard_id], [rank_id], [tower_id]),
CONSTRAINT [FK_wr_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_wr_ranks] FOREIGN KEY ([rank_id]) REFERENCES [dbo].[ranks]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_wr_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[apprentices](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[first_name] [nvarchar](255) NOT NULL,
[last_name] [nvarchar](255) NOT NULL,
[enrollment_date] [date] NOT NULL,
[primary_mentor] [uniqueidentifier] NULL,
[sponsoring_wizard] [uniqueidentifier] NULL,
CONSTRAINT [FK_apprentices_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_apprentices_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_apprentices_mentor] FOREIGN KEY ([primary_mentor]) REFERENCES [dbo].[wizards]([id]),
CONSTRAINT [FK_apprentices_sponsor] FOREIGN KEY ([sponsoring_wizard]) REFERENCES [dbo].[wizards]([id])
);
CREATE TABLE [dbo].[spell_lessons](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[apprentice_id] [uniqueidentifier] NOT NULL,
[instructor_id] [uniqueidentifier] NOT NULL,
[lesson_date] [datetime2](7) NOT NULL,
CONSTRAINT [FK_sl_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_sl_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_sl_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_sl_instructors] FOREIGN KEY ([instructor_id]) REFERENCES [dbo].[wizards]([id])
);
CREATE TABLE [dbo].[grimoires](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[apprentice_id] [uniqueidentifier] NOT NULL,
[grimoire_type_id] [uniqueidentifier] NOT NULL,
[author_wizard_id] [uniqueidentifier] NOT NULL,
[content] [nvarchar](max) NOT NULL,
CONSTRAINT [FK_g_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_g_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_g_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_g_types] FOREIGN KEY ([grimoire_type_id]) REFERENCES [dbo].[grimoire_types]([id]),
CONSTRAINT [FK_g_authors] FOREIGN KEY ([author_wizard_id]) REFERENCES [dbo].[wizards]([id])
);
CREATE TABLE [dbo].[tuition_scrolls](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[school_id] [uniqueidentifier] NOT NULL,
[tower_id] [uniqueidentifier] NOT NULL,
[apprentice_id] [uniqueidentifier] NOT NULL,
[total_amount] [decimal](10,2) NOT NULL,
[status] [nvarchar](50) NOT NULL,
CONSTRAINT [FK_ts_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_ts_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_ts_apprentices] FOREIGN KEY ([apprentice_id]) REFERENCES [dbo].[apprentices]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[tuition_items](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
[description] [nvarchar](max) NOT NULL,
[amount] [decimal](10,2) NOT NULL,
CONSTRAINT [FK_ti_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[patron_sponsorships](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
[patron_house] [nvarchar](255) NOT NULL,
[sponsorship_code] [nvarchar](50) NOT NULL,
[status] [nvarchar](50) NOT NULL,
CONSTRAINT [FK_ps_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[gold_payments](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[tuition_scroll_id] [uniqueidentifier] NOT NULL,
[amount] [decimal](10,2) NOT NULL,
[payment_date] [datetime2](7) NOT NULL DEFAULT GETDATE(),
CONSTRAINT [FK_gp_scrolls] FOREIGN KEY ([tuition_scroll_id]) REFERENCES [dbo].[tuition_scrolls]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[arcane_logs](
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
[school_id] [uniqueidentifier] NULL,
[wizard_id] [uniqueidentifier] NULL,
[tower_id] [uniqueidentifier] NULL,
[table_name] [nvarchar](255) NOT NULL,
[operation] [nvarchar](50) NOT NULL,
[record_id] [uniqueidentifier] NULL,
[changes] [nvarchar](max) NULL,
[created_at] [datetime2](7) NOT NULL DEFAULT GETDATE(),
CONSTRAINT [FK_al_schools] FOREIGN KEY ([school_id]) REFERENCES [dbo].[schools]([id]) ON DELETE SET NULL,
CONSTRAINT [FK_al_wizards] FOREIGN KEY ([wizard_id]) REFERENCES [dbo].[wizards]([id]) ON DELETE SET NULL,
CONSTRAINT [FK_al_towers] FOREIGN KEY ([tower_id]) REFERENCES [dbo].[towers]([id]) ON DELETE SET NULL
);
`;
const result = await fromSQLServer(sql);
// Should find all 16 tables
const expectedTables = [
'apprentices',
'arcane_logs',
'gold_payments',
'grimoire_types',
'grimoires',
'patron_sponsorships',
'rank_spell_permissions',
'ranks',
'schools',
'spell_lessons',
'spell_permissions',
'towers',
'tuition_items',
'tuition_scrolls',
'wizard_ranks',
'wizards',
];
expect(result.tables).toHaveLength(16);
expect(result.tables.map((t) => t.name).sort()).toEqual(
expectedTables
);
// Verify key relationships exist
const relationships = result.relationships;
// Check some critical relationships
expect(
relationships.some(
(r) =>
r.sourceTable === 'wizards' &&
r.targetTable === 'schools' &&
r.sourceColumn === 'school_id'
)
).toBe(true);
expect(
relationships.some(
(r) =>
r.sourceTable === 'wizard_ranks' &&
r.targetTable === 'wizards' &&
r.sourceColumn === 'wizard_id'
)
).toBe(true);
expect(
relationships.some(
(r) =>
r.sourceTable === 'apprentices' &&
r.targetTable === 'wizards' &&
r.sourceColumn === 'primary_mentor'
)
).toBe(true);
});
});
describe('Enchanted Bazaar Example', () => {
it('should parse the enchanted bazaar example with complex features', async () => {
const sql = `
-- Enchanted Bazaar tables with complex features
CREATE TABLE [dbo].[merchants](
[id] [int] IDENTITY(1,1) PRIMARY KEY,
[name] [nvarchar](255) NOT NULL,
[email] [nvarchar](255) NOT NULL,
[created_at] [datetime] DEFAULT GETDATE(),
CONSTRAINT [UQ_merchants_email] UNIQUE ([email])
);
CREATE TABLE [dbo].[artifacts](
[id] [int] IDENTITY(1,1) PRIMARY KEY,
[merchant_id] [int] NOT NULL,
[name] [nvarchar](255) NOT NULL,
[price] [decimal](10, 2) NOT NULL CHECK ([price] >= 0),
[enchantment_charges] [int] DEFAULT 0 CHECK ([enchantment_charges] >= 0),
CONSTRAINT [FK_artifacts_merchants] FOREIGN KEY ([merchant_id]) REFERENCES [dbo].[merchants]([id]) ON DELETE CASCADE
);
CREATE TABLE [dbo].[trades](
[id] [int] IDENTITY(1,1) PRIMARY KEY,
[created_at] [datetime] DEFAULT GETDATE(),
[status] [varchar](50) DEFAULT 'negotiating'
);
CREATE TABLE [dbo].[trade_items](
[trade_id] [int] NOT NULL,
[artifact_id] [int] NOT NULL,
[quantity] [int] NOT NULL CHECK ([quantity] > 0),
[agreed_price] [decimal](10, 2) NOT NULL,
PRIMARY KEY ([trade_id], [artifact_id]),
CONSTRAINT [FK_ti_trades] FOREIGN KEY ([trade_id]) REFERENCES [dbo].[trades]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_ti_artifacts] FOREIGN KEY ([artifact_id]) REFERENCES [dbo].[artifacts]([id])
);
-- Create indexes
CREATE INDEX [IX_artifacts_merchant_id] ON [dbo].[artifacts] ([merchant_id]);
CREATE INDEX [IX_artifacts_price] ON [dbo].[artifacts] ([price] DESC);
CREATE UNIQUE INDEX [UIX_artifacts_name_merchant] ON [dbo].[artifacts] ([name], [merchant_id]);
`;
const result = await fromSQLServer(sql);
// Should parse all tables
expect(result.tables.length).toBeGreaterThanOrEqual(4);
// Check for specific tables
const tableNames = result.tables.map((t) => t.name);
expect(tableNames).toContain('merchants');
expect(tableNames).toContain('artifacts');
expect(tableNames).toContain('trades');
expect(tableNames).toContain('trade_items');
// Check relationships
expect(
result.relationships.some(
(r) =>
r.sourceTable === 'artifacts' &&
r.targetTable === 'merchants'
)
).toBe(true);
expect(
result.relationships.some(
(r) =>
r.sourceTable === 'trade_items' &&
r.targetTable === 'trades'
)
).toBe(true);
// Check indexes were created
const artifactsTable = result.tables.find(
(t) => t.name === 'artifacts'
);
expect(artifactsTable?.indexes.length).toBeGreaterThanOrEqual(2);
expect(
artifactsTable?.indexes.some(
(i) => i.name === 'IX_artifacts_merchant_id'
)
).toBe(true);
expect(
artifactsTable?.indexes.some(
(i) => i.unique && i.name === 'UIX_artifacts_name_merchant'
)
).toBe(true);
});
});
describe('Complex SQL Server Schema Example', () => {
it('should parse complex multi-schema database with various SQL Server features', async () => {
const sql = `
CREATE SCHEMA [magic];
GO
CREATE SCHEMA [inventory];
GO
CREATE SCHEMA [academy];
GO
-- Magic schema tables
CREATE TABLE [magic].[spell_categories](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
[name] [nvarchar](100) NOT NULL,
[description] [nvarchar](max) NULL,
[is_forbidden] [bit] NOT NULL DEFAULT 0,
[created_at] [datetime2](7) NOT NULL DEFAULT SYSDATETIME()
);
CREATE TABLE [magic].[spells](
[id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWSEQUENTIALID(),
[category_id] [uniqueidentifier] NOT NULL,
[name] [nvarchar](200) NOT NULL,
[mana_cost] [smallint] NOT NULL CHECK ([mana_cost] > 0),
[damage_output] [decimal](10,2) NULL,
[cast_time_ms] [int] NOT NULL DEFAULT 1000,
[is_active] [bit] NOT NULL DEFAULT 1,
[metadata] [xml] NULL,
CONSTRAINT [FK_spells_categories] FOREIGN KEY ([category_id])
REFERENCES [magic].[spell_categories]([id]) ON DELETE CASCADE,
CONSTRAINT [UQ_spells_name] UNIQUE ([name])
);
-- Inventory schema tables
CREATE TABLE [inventory].[item_types](
[id] [int] IDENTITY(1,1) PRIMARY KEY,
[type_code] [char](3) NOT NULL UNIQUE,
[type_name] [varchar](50) NOT NULL,
[max_stack_size] [tinyint] NOT NULL DEFAULT 99
);
CREATE TABLE [inventory].[magical_items](
[id] [bigint] IDENTITY(1,1) PRIMARY KEY,
[item_type_id] [int] NOT NULL,
[item_name] [nvarchar](255) NOT NULL,
[rarity] [varchar](20) NOT NULL,
[weight_kg] [float] NOT NULL,
[base_value] [money] NOT NULL,
[enchantment_level] [tinyint] NULL CHECK ([enchantment_level] BETWEEN 0 AND 10),
[discovered_date] [date] NULL,
[discovered_time] [time](7) NULL,
[full_discovered_at] [datetimeoffset](7) NULL,
CONSTRAINT [FK_items_types] FOREIGN KEY ([item_type_id])
REFERENCES [inventory].[item_types]([id])
);
-- Academy schema tables
CREATE TABLE [academy].[courses](
[course_id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[course_code] [nvarchar](10) NOT NULL UNIQUE,
[course_name] [nvarchar](200) NOT NULL,
[credits] [decimal](3,1) NOT NULL,
[prerequisite_spell_id] [uniqueidentifier] NULL,
CONSTRAINT [FK_courses_spells] FOREIGN KEY ([prerequisite_spell_id])
REFERENCES [magic].[spells]([id])
);
CREATE TABLE [academy].[enrollments](
[enrollment_id] [bigint] IDENTITY(1,1) PRIMARY KEY,
[student_id] [uniqueidentifier] NOT NULL,
[course_id] [uniqueidentifier] NOT NULL,
[enrollment_date] [datetime2](0) NOT NULL DEFAULT GETDATE(),
[grade] [decimal](4,2) NULL CHECK ([grade] >= 0 AND [grade] <= 100),
[completion_status] [nvarchar](20) NOT NULL DEFAULT 'enrolled',
CONSTRAINT [FK_enrollments_courses] FOREIGN KEY ([course_id])
REFERENCES [academy].[courses]([course_id]) ON DELETE CASCADE,
CONSTRAINT [UQ_enrollment] UNIQUE ([student_id], [course_id])
);
-- Cross-schema relationships
CREATE TABLE [inventory].[spell_reagents](
[spell_id] [uniqueidentifier] NOT NULL,
[item_id] [bigint] NOT NULL,
[quantity_required] [smallint] NOT NULL DEFAULT 1,
PRIMARY KEY ([spell_id], [item_id]),
CONSTRAINT [FK_reagents_spells] FOREIGN KEY ([spell_id])
REFERENCES [magic].[spells]([id]) ON DELETE CASCADE,
CONSTRAINT [FK_reagents_items] FOREIGN KEY ([item_id])
REFERENCES [inventory].[magical_items]([id]) ON DELETE CASCADE
);
-- Additional indexes
CREATE INDEX [IX_spells_category] ON [magic].[spells] ([category_id]);
CREATE INDEX [IX_items_type_rarity] ON [inventory].[magical_items] ([item_type_id], [rarity]);
CREATE UNIQUE INDEX [UIX_items_name_type] ON [inventory].[magical_items] ([item_name], [item_type_id]);
`;
const result = await fromSQLServer(sql);
// Verify all tables are parsed
expect(result.tables).toHaveLength(7);
// Check schema assignment
expect(
result.tables.filter((t) => t.schema === 'magic')
).toHaveLength(2);
expect(
result.tables.filter((t) => t.schema === 'inventory')
).toHaveLength(3);
expect(
result.tables.filter((t) => t.schema === 'academy')
).toHaveLength(2);
// Verify cross-schema relationships
const crossSchemaRel = result.relationships.find(
(r) => r.sourceTable === 'courses' && r.targetTable === 'spells'
);
expect(crossSchemaRel).toBeDefined();
expect(crossSchemaRel?.sourceSchema).toBe('academy');
expect(crossSchemaRel?.targetSchema).toBe('magic');
// Check various SQL Server data types
const spellsTable = result.tables.find((t) => t.name === 'spells');
expect(
spellsTable?.columns.find((c) => c.name === 'mana_cost')?.type
).toBe('smallint');
expect(
spellsTable?.columns.find((c) => c.name === 'metadata')?.type
).toBe('xml');
const itemsTable = result.tables.find(
(t) => t.name === 'magical_items'
);
expect(
itemsTable?.columns.find((c) => c.name === 'weight_kg')?.type
).toBe('float');
expect(
itemsTable?.columns.find((c) => c.name === 'base_value')?.type
).toBe('money');
expect(
itemsTable?.columns.find((c) => c.name === 'discovered_date')
?.type
).toBe('date');
expect(
itemsTable?.columns.find((c) => c.name === 'discovered_time')
?.type
).toBe('time');
expect(
itemsTable?.columns.find((c) => c.name === 'full_discovered_at')
?.type
).toBe('datetimeoffset');
// Verify IDENTITY columns
const itemTypesTable = result.tables.find(
(t) => t.name === 'item_types'
);
expect(
itemTypesTable?.columns.find((c) => c.name === 'id')?.increment
).toBe(true);
});
});
});

View File

@@ -0,0 +1,675 @@
import { describe, it, expect } from 'vitest';
import { fromSQLServer } from '../sqlserver';
describe('SQL Server Fantasy Database Import Tests', () => {
it('should parse the magical realm database correctly', async () => {
// Fantasy-themed SQL Server database with multiple schemas
const sql = `
USE [MagicalRealmDB]
GO
/****** Object: Schema [spellcasting] Script Date: 25.7.2025. 9:42:07 ******/
CREATE SCHEMA [spellcasting]
GO
/****** Object: Schema [enchantments] Script Date: 25.7.2025. 9:42:07 ******/
CREATE SCHEMA [enchantments]
GO
/****** Object: Schema [artifacts] Script Date: 25.7.2025. 9:42:07 ******/
CREATE SCHEMA [artifacts]
GO
/****** Object: Schema [wizards] Script Date: 25.7.2025. 9:42:07 ******/
CREATE SCHEMA [wizards]
GO
/****** Object: Table [spellcasting].[Spell] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [spellcasting].[Spell](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[RealmId] [uniqueidentifier] NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedById] [uniqueidentifier] NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[UpdatedBy] [uniqueidentifier] NULL,
[UpdatedAt] [datetime2](7) NULL,
[PowerLevel] [decimal](18, 2) NOT NULL,
[Incantation] [nvarchar](max) NULL,
[ParentId] [uniqueidentifier] NULL,
[Name] [nvarchar](255) NOT NULL,
[Description] [nvarchar](max) NOT NULL,
[RunicInscription] [varchar](max) NULL,
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [spellcasting].[SpellCasting] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [spellcasting].[SpellCasting](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[RealmId] [uniqueidentifier] NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedById] [uniqueidentifier] NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[UpdatedBy] [uniqueidentifier] NULL,
[UpdatedAt] [datetime2](7) NULL,
[WizardLevel] [int] NOT NULL,
[ManaCost] [decimal](18, 2) NOT NULL,
[CastingTime] [decimal](18, 2) NULL,
[Components] [nvarchar](max) NULL,
[CastingNumber] [int] NULL,
[SuccessRate] [decimal](18, 2) NULL,
[CriticalChance] [decimal](18, 2) NULL,
[ExtendedDuration] [decimal](18, 2) NULL,
[Status] [int] NULL,
[SpellId] [uniqueidentifier] NOT NULL,
[CastingNotes] [nvarchar](max) NULL,
[ParentId] [uniqueidentifier] NULL,
CONSTRAINT [PK_SpellCasting] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [enchantments].[MagicalItem] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [enchantments].[MagicalItem](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[WandId] [uniqueidentifier] NOT NULL,
[EnchanterId] [uniqueidentifier] NOT NULL,
[OrderNumber] [nvarchar](max) NOT NULL,
[EnchantmentDate] [datetime2](7) NOT NULL,
[IsCertified] [bit] NOT NULL,
[CertificationCode] [nvarchar](max) NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedById] [uniqueidentifier] NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[ParentId] [uniqueidentifier] NULL,
[ReasonForAction] [nvarchar](max) NULL,
[EnchantmentLevel] [int] NOT NULL,
CONSTRAINT [PK_MagicalItem] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [enchantments].[EnchantmentFormula] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [enchantments].[EnchantmentFormula](
[Id] [uniqueidentifier] NOT NULL,
[RealmId] [uniqueidentifier] NOT NULL,
[ParentId] [uniqueidentifier] NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedById] [uniqueidentifier] NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[ReasonForAction] [nvarchar](max) NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[FormulaTypeId] [int] NOT NULL,
[Definition] [nvarchar](max) NOT NULL,
[Name] [nvarchar](max) NOT NULL,
[HasMultipleApplications] [bit] NOT NULL,
[StepNumber] [int] NOT NULL,
[Identifier] [int] NOT NULL,
CONSTRAINT [PK_EnchantmentFormula] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [wizards].[Wizard] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [wizards].[Wizard](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedById] [uniqueidentifier] NULL,
[ParentId] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[Name] [nvarchar](255) NOT NULL,
[Title] [nvarchar](255) NULL,
[Biography] [nvarchar](max) NULL,
[SpecialtySchool] [nvarchar](100) NULL,
[PowerLevel] [int] NOT NULL,
[JoinedGuildDate] [datetime2](7) NOT NULL,
[IsActive] [bit] NOT NULL,
[MagicalSignature] [nvarchar](max) NOT NULL,
[TowerId] [uniqueidentifier] NOT NULL,
[MentorId] [uniqueidentifier] NULL,
[SpellbookNotes] [varchar](max) NULL,
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
CONSTRAINT [AK_Wizard_HelpId] UNIQUE NONCLUSTERED
(
[HelpId] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [wizards].[WizardSpellbook] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [wizards].[WizardSpellbook](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[DeletedByEmail] [nvarchar](max) NULL,
[DeletedByFullName] [nvarchar](max) NULL,
[DeletedById] [uniqueidentifier] NULL,
[SuccessRate] [decimal](18, 2) NOT NULL,
[ManaCostReduction] [decimal](18, 2) NOT NULL,
[CriticalBonus] [decimal](18, 2) NOT NULL,
[PageNumber] [int] NOT NULL,
[WizardId] [uniqueidentifier] NOT NULL,
[TowerId] [uniqueidentifier] NOT NULL,
[ParentId] [uniqueidentifier] NULL,
[ReasonForAction] [nvarchar](max) NULL,
[SpellId] [uniqueidentifier] NOT NULL,
[EnchanterId] [uniqueidentifier] NOT NULL,
[OrderNumber] [nvarchar](max) NOT NULL,
[LearnedDate] [datetime2](7) NOT NULL,
[IsMastered] [bit] NOT NULL,
[MasteryCertificate] [nvarchar](max) NOT NULL,
CONSTRAINT [PK_WizardSpellbook] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [artifacts].[MagicSchool] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [artifacts].[MagicSchool](
[Id] [int] IDENTITY(1,1) NOT NULL,
[IsDeleted] [bit] NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[Value] [nvarchar](max) NOT NULL,
CONSTRAINT [PK_MagicSchool] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [artifacts].[ArtifactType] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [artifacts].[ArtifactType](
[Id] [int] IDENTITY(1,1) NOT NULL,
[IsDeleted] [bit] NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[Name] [nvarchar](max) NOT NULL,
[Key] [nvarchar](max) NOT NULL,
[ItemCategoryId] [int] NOT NULL,
CONSTRAINT [PK_ArtifactType] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [artifacts].[AncientRelic] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [artifacts].[AncientRelic](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[DiscoveryDate] [datetime2](7) NULL,
[VaultId] [uniqueidentifier] NULL,
[AppraiserId] [uniqueidentifier] NULL,
[NumberOfRunes] [int] NULL,
[MagicalAura] [decimal](18, 2) NULL,
[AuraReadingDeviceId] [uniqueidentifier] NULL,
[PowerOutput] [decimal](18, 2) NULL,
[PowerGaugeTypeId] [int] NULL,
[AgeInCenturies] [decimal](18, 2) NULL,
[CarbonDatingDeviceId] [uniqueidentifier] NULL,
[HistoricalEra] [nvarchar](max) NULL,
[EraVerificationMethod] [int] NULL,
[Curse] [nvarchar](max) NULL,
[CurseDetectorId] [uniqueidentifier] NULL,
[CurseStrength] [decimal](18, 2) NULL,
[ProtectionLevel] [int] NULL,
[MagicalResonance] [decimal](18, 2) NULL,
[ResonanceWithAdjustment] [decimal](18, 2) NULL,
[AuthenticityVerified] [bit] NOT NULL,
[VerificationWizardId] [uniqueidentifier] NULL,
[RestorationNeeded] [bit] NOT NULL,
[RestorationCost] [decimal](18, 2) NULL,
[EstimatedValue] [decimal](18, 2) NULL,
[MarketDemand] [decimal](18, 2) NULL,
[ArtifactCatalogId] [uniqueidentifier] NULL,
[OriginRealm] [nvarchar](max) NULL,
[CreatorWizard] [nvarchar](max) NULL,
[LegendaryStatus] [bit] NOT NULL,
[ParentId] [uniqueidentifier] NULL,
[IsSealed] [bit] NOT NULL,
CONSTRAINT [PK_AncientRelic] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
CONSTRAINT [AK_AncientRelic_HelpId] UNIQUE NONCLUSTERED
(
[HelpId] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [artifacts].[RelicPowerMeasurements] Script Date: 25.7.2025. 9:42:07 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [artifacts].[RelicPowerMeasurements](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[ParentId] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
[AuraIntensityCorrection] [decimal](31, 15) NULL,
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
[MagicalEnergyCalculated] [decimal](31, 15) NULL,
[AuraIntensityCalculated] [decimal](31, 15) NULL,
[ResonanceFrequencyCalculated] [decimal](31, 15) NULL,
[DimensionalFluxCalculated] [decimal](31, 15) NULL,
[MagicalEnergyUncertainty] [decimal](31, 15) NULL,
[AuraIntensityUncertainty] [decimal](31, 15) NULL,
[ResonanceFrequencyUncertainty] [decimal](31, 15) NULL,
[DimensionalFluxUncertainty] [decimal](31, 15) NULL,
[MagicalEnergyDrift] [decimal](31, 15) NULL,
[AuraIntensityDrift] [decimal](31, 15) NULL,
[ResonanceFrequencyDrift] [decimal](31, 15) NULL,
[DimensionalFluxDrift] [decimal](31, 15) NULL,
[AncientRelicId] [uniqueidentifier] NULL,
CONSTRAINT [PK_RelicPowerMeasurements] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
`;
const result = await fromSQLServer(sql);
// Get unique schemas from parsed tables
const foundSchemas = [
...new Set(result.tables.map((t) => t.schema || 'dbo')),
];
// Verify we found tables in multiple schemas
expect(foundSchemas.length).toBeGreaterThan(1);
expect(foundSchemas).toContain('spellcasting');
expect(foundSchemas).toContain('enchantments');
expect(foundSchemas).toContain('wizards');
expect(foundSchemas).toContain('artifacts');
// Check for some specific tables we know should exist
expect(
result.tables.some(
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
)
).toBe(true);
expect(
result.tables.some(
(t) => t.name === 'SpellCasting' && t.schema === 'spellcasting'
)
).toBe(true);
expect(
result.tables.some(
(t) => t.name === 'Wizard' && t.schema === 'wizards'
)
).toBe(true);
// Check data types are handled correctly
const spellTable = result.tables.find(
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
);
expect(spellTable).toBeDefined();
if (spellTable) {
expect(spellTable.columns.find((c) => c.name === 'Id')?.type).toBe(
'uniqueidentifier'
);
expect(
spellTable.columns.find((c) => c.name === 'PowerLevel')?.type
).toBe('decimal');
expect(
spellTable.columns.find((c) => c.name === 'IsDeleted')?.type
).toBe('bit');
expect(
spellTable.columns.find((c) => c.name === 'CreatedAt')?.type
).toBe('datetime2');
// Check nvarchar(max) fields
const incantationField = spellTable.columns.find(
(c) => c.name === 'Incantation'
);
expect(incantationField?.type).toBe('nvarchar');
expect(incantationField?.typeArgs).toBe('max');
// Check varchar(max) fields
const runicField = spellTable.columns.find(
(c) => c.name === 'RunicInscription'
);
expect(runicField?.type).toBe('varchar');
expect(runicField?.typeArgs).toBe('max');
}
// Check IDENTITY columns
const magicSchoolTable = result.tables.find(
(t) => t.name === 'MagicSchool' && t.schema === 'artifacts'
);
expect(magicSchoolTable).toBeDefined();
if (magicSchoolTable) {
const idColumn = magicSchoolTable.columns.find(
(c) => c.name === 'Id'
);
expect(idColumn?.increment).toBe(true);
expect(idColumn?.type).toBe('int');
}
// Check unique constraints converted to indexes
const wizardTable = result.tables.find(
(t) => t.name === 'Wizard' && t.schema === 'wizards'
);
expect(wizardTable).toBeDefined();
if (wizardTable) {
expect(wizardTable.indexes).toHaveLength(1);
expect(wizardTable.indexes[0].unique).toBe(true);
expect(wizardTable.indexes[0].columns).toContain('HelpId');
expect(wizardTable.indexes[0].name).toBe('AK_Wizard_HelpId');
}
});
it('should handle ALTER TABLE ADD CONSTRAINT statements for magical artifacts', async () => {
const sql = `
CREATE TABLE [artifacts].[MagicalArtifact] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Name] [nvarchar](255) NOT NULL,
[PowerLevel] [int] NOT NULL
);
CREATE TABLE [enchantments].[ArtifactEnchantment] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[PrimaryArtifactId] [uniqueidentifier] NOT NULL,
[SecondaryArtifactId] [uniqueidentifier] NOT NULL,
[EnchantmentStrength] [decimal](18, 2) NOT NULL
);
ALTER TABLE [enchantments].[ArtifactEnchantment]
ADD CONSTRAINT [FK_ArtifactEnchantment_Primary]
FOREIGN KEY ([PrimaryArtifactId])
REFERENCES [artifacts].[MagicalArtifact]([Id]);
ALTER TABLE [enchantments].[ArtifactEnchantment]
ADD CONSTRAINT [FK_ArtifactEnchantment_Secondary]
FOREIGN KEY ([SecondaryArtifactId])
REFERENCES [artifacts].[MagicalArtifact]([Id]);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.relationships).toHaveLength(2);
// Check both foreign keys were parsed
const primaryRel = result.relationships.find(
(r) =>
r.sourceColumn === 'PrimaryArtifactId' &&
r.name === 'FK_ArtifactEnchantment_Primary'
);
expect(primaryRel).toBeDefined();
expect(primaryRel?.sourceTable).toBe('ArtifactEnchantment');
expect(primaryRel?.targetTable).toBe('MagicalArtifact');
const secondaryRel = result.relationships.find(
(r) =>
r.sourceColumn === 'SecondaryArtifactId' &&
r.name === 'FK_ArtifactEnchantment_Secondary'
);
expect(secondaryRel).toBeDefined();
expect(secondaryRel?.sourceTable).toBe('ArtifactEnchantment');
expect(secondaryRel?.targetTable).toBe('MagicalArtifact');
});
it('should handle tables with many columns including nvarchar(max)', async () => {
const sql = `
CREATE TABLE [wizards].[SpellResearchEnvironment](
[Id] [uniqueidentifier] NOT NULL,
[HelpId] [uniqueidentifier] NOT NULL,
[IsDeleted] [bit] NOT NULL,
[CreatedAt] [datetime2](7) NOT NULL,
[CreatedById] [uniqueidentifier] NULL,
[CreatedByUsername] [nvarchar](max) NOT NULL,
[ResearchDate] [datetime2](7) NULL,
[LaboratoryId] [uniqueidentifier] NULL,
[EvaluationCriteriaId] [uniqueidentifier] NULL,
[NumberOfExperiments] [int] NULL,
[ManaLevelStart] [decimal](18, 2) NULL,
[ManaGaugeId] [uniqueidentifier] NULL,
[ManaLevelEnd] [decimal](18, 2) NULL,
[ManaGaugeTypeId] [int] NULL,
[AetherDensityStart] [decimal](18, 2) NULL,
[AetherGaugeId] [uniqueidentifier] NULL,
[AetherDensityEnd] [decimal](18, 2) NULL,
[AetherGaugeTypeId] [int] NULL,
[MagicalFieldStart] [decimal](18, 2) NULL,
[MagicalFieldGaugeId] [uniqueidentifier] NULL,
[MagicalFieldEnd] [decimal](18, 2) NULL,
[MagicalFieldGaugeTypeId] [int] NULL,
[MagicalFieldWithCorrection] [decimal](18, 2) NULL,
[AetherDensityWithCorrection] [decimal](18, 2) NULL,
[ElementalBalanceStart] [decimal](18, 2) NULL,
[ElementalBalanceGaugeId] [uniqueidentifier] NULL,
[ElementalBalanceEnd] [decimal](18, 2) NULL,
[ElementalBalanceGaugeTypeId] [int] NULL,
[ManaLevelWithCorrection] [decimal](18, 2) NULL,
[ElementalBalanceWithCorrection] [decimal](18, 2) NULL,
[SpellResearchId] [uniqueidentifier] NULL,
[AetherDensityValue] [decimal](18, 2) NULL,
[MagicalFieldValue] [decimal](18, 2) NULL,
[ManaLevelValue] [decimal](18, 2) NULL,
[ElementalBalanceValue] [decimal](18, 2) NULL,
[ParentId] [uniqueidentifier] NULL,
[IsLocked] [bit] NOT NULL,
CONSTRAINT [PK_SpellResearchEnvironment] PRIMARY KEY CLUSTERED ([Id] ASC),
CONSTRAINT [AK_SpellResearchEnvironment_HelpId] UNIQUE NONCLUSTERED ([HelpId] ASC)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
// Should have all columns
expect(table.columns.length).toBeGreaterThan(30);
// Check nvarchar(max) handling
expect(
table.columns.find((c) => c.name === 'CreatedByUsername')?.type
).toBe('nvarchar');
// Check decimal precision handling
const decimalColumn = table.columns.find(
(c) => c.name === 'ManaLevelStart'
);
expect(decimalColumn?.type).toBe('decimal');
expect(decimalColumn?.typeArgs).toEqual([18, 2]);
// Check unique constraint was converted to index
expect(table.indexes).toHaveLength(1);
expect(table.indexes[0].name).toBe(
'AK_SpellResearchEnvironment_HelpId'
);
expect(table.indexes[0].unique).toBe(true);
expect(table.indexes[0].columns).toContain('HelpId');
});
it('should handle complex decimal types like decimal(31, 15)', async () => {
const sql = `
CREATE TABLE [artifacts].[RelicPowerCalculatedValues](
[Id] [uniqueidentifier] NOT NULL,
[MagicalEnergyMeasured] [decimal](31, 15) NOT NULL,
[AuraIntensityMeasured] [decimal](31, 15) NOT NULL,
[ResonanceFrequencyMeasured] [decimal](31, 15) NOT NULL,
[DimensionalFluxMeasured] [decimal](31, 15) NOT NULL,
[MagicalEnergyCorrection] [decimal](31, 15) NULL,
[AuraIntensityCorrection] [decimal](31, 15) NULL,
[ResonanceFrequencyCorrection] [decimal](31, 15) NULL,
[DimensionalFluxCorrection] [decimal](31, 15) NULL,
CONSTRAINT [PK_RelicPowerCalculatedValues] PRIMARY KEY CLUSTERED ([Id] ASC)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
// Check high precision decimal handling
const magicalEnergyColumn = table.columns.find(
(c) => c.name === 'MagicalEnergyMeasured'
);
expect(magicalEnergyColumn?.type).toBe('decimal');
expect(magicalEnergyColumn?.typeArgs).toEqual([31, 15]);
});
it('should handle IDENTITY columns in artifact lookup tables', async () => {
const sql = `
CREATE TABLE [artifacts].[SpellComponent](
[Id] [int] IDENTITY(1,1) NOT NULL,
[IsDeleted] [bit] NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[ComponentName] [nvarchar](max) NOT NULL,
CONSTRAINT [PK_SpellComponent] PRIMARY KEY CLUSTERED ([Id] ASC)
);
CREATE TABLE [artifacts].[RuneType](
[Id] [int] IDENTITY(1,1) NOT NULL,
[IsDeleted] [bit] NOT NULL,
[DeletedAt] [datetime2](7) NULL,
[Name] [nvarchar](max) NOT NULL,
[Symbol] [nvarchar](max) NOT NULL,
[MagicSchoolId] [int] NOT NULL,
CONSTRAINT [PK_RuneType] PRIMARY KEY CLUSTERED ([Id] ASC)
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
// Both tables should have IDENTITY columns
result.tables.forEach((table) => {
const idColumn = table.columns.find((c) => c.name === 'Id');
expect(idColumn?.increment).toBe(true);
expect(idColumn?.type).toBe('int');
});
});
it('should parse all table constraints with complex WITH options', async () => {
const sql = `
CREATE TABLE [dbo].[MagicalRegistry](
[Id] [uniqueidentifier] NOT NULL,
[RegistrationCode] [nvarchar](50) NOT NULL,
[PowerLevel] [int] NOT NULL,
CONSTRAINT [PK_MagicalRegistry] PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY],
CONSTRAINT [UQ_MagicalRegistry_Code] UNIQUE NONCLUSTERED
(
[RegistrationCode] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
// Primary key should be set
expect(table.columns.find((c) => c.name === 'Id')?.primaryKey).toBe(
true
);
// Unique constraint should be converted to index
expect(table.indexes).toHaveLength(1);
expect(table.indexes[0].unique).toBe(true);
expect(table.indexes[0].columns).toContain('RegistrationCode');
});
});

View File

@@ -0,0 +1,253 @@
import { describe, it, expect } from 'vitest';
import { fromSQLServer } from '../sqlserver';
describe('SQL Server Foreign Key Relationship Tests', () => {
it('should properly link foreign key relationships with correct table IDs', async () => {
const sql = `
CREATE TABLE [magic].[schools] (
[id] [uniqueidentifier] PRIMARY KEY,
[name] [nvarchar](100) NOT NULL
);
CREATE TABLE [magic].[wizards] (
[id] [uniqueidentifier] PRIMARY KEY,
[school_id] [uniqueidentifier] NOT NULL,
[name] [nvarchar](100) NOT NULL
);
ALTER TABLE [magic].[wizards] WITH CHECK ADD CONSTRAINT [FK_wizards_schools]
FOREIGN KEY ([school_id]) REFERENCES [magic].[schools]([id]);
`;
const result = await fromSQLServer(sql);
// Check tables are parsed
expect(result.tables).toHaveLength(2);
const schoolsTable = result.tables.find((t) => t.name === 'schools');
const wizardsTable = result.tables.find((t) => t.name === 'wizards');
expect(schoolsTable).toBeDefined();
expect(wizardsTable).toBeDefined();
// Check relationship is parsed
expect(result.relationships).toHaveLength(1);
const rel = result.relationships[0];
// Verify the relationship has proper table IDs
expect(rel.sourceTableId).toBe(wizardsTable!.id);
expect(rel.targetTableId).toBe(schoolsTable!.id);
// Verify other relationship properties
expect(rel.sourceTable).toBe('wizards');
expect(rel.targetTable).toBe('schools');
expect(rel.sourceColumn).toBe('school_id');
expect(rel.targetColumn).toBe('id');
expect(rel.sourceSchema).toBe('magic');
expect(rel.targetSchema).toBe('magic');
});
it('should handle cross-schema foreign key relationships', async () => {
const sql = `
CREATE TABLE [users].[accounts] (
[id] [int] PRIMARY KEY,
[username] [nvarchar](50) NOT NULL
);
CREATE TABLE [orders].[purchases] (
[id] [int] PRIMARY KEY,
[account_id] [int] NOT NULL
);
ALTER TABLE [orders].[purchases] ADD CONSTRAINT [FK_purchases_accounts]
FOREIGN KEY ([account_id]) REFERENCES [users].[accounts]([id]);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
expect(result.relationships).toHaveLength(1);
const rel = result.relationships[0];
const accountsTable = result.tables.find(
(t) => t.name === 'accounts' && t.schema === 'users'
);
const purchasesTable = result.tables.find(
(t) => t.name === 'purchases' && t.schema === 'orders'
);
// Verify cross-schema relationship IDs are properly linked
expect(rel.sourceTableId).toBe(purchasesTable!.id);
expect(rel.targetTableId).toBe(accountsTable!.id);
});
it('should parse complex foreign keys from magical realm database with proper table IDs', async () => {
// Fantasy-themed SQL with multiple schemas and relationships
const sql = `
-- Spell casting schema
CREATE SCHEMA [spellcasting];
GO
-- Create spell table
CREATE TABLE [spellcasting].[Spell] (
[Id] [uniqueidentifier] NOT NULL,
[Name] [nvarchar](255) NOT NULL,
[School] [nvarchar](100) NOT NULL,
[Level] [int] NOT NULL,
[Description] [nvarchar](max) NOT NULL,
CONSTRAINT [PK_Spell] PRIMARY KEY CLUSTERED ([Id] ASC)
);
GO
-- Create spell casting process table
CREATE TABLE [spellcasting].[SpellCastingProcess] (
[Id] [uniqueidentifier] NOT NULL,
[SpellId] [uniqueidentifier] NOT NULL,
[WizardId] [uniqueidentifier] NOT NULL,
[CastingDate] [datetime2](7) NOT NULL,
[SuccessRate] [decimal](18, 2) NOT NULL,
[ManaCost] [int] NOT NULL,
[Notes] [nvarchar](max) NULL,
CONSTRAINT [PK_SpellCastingProcess] PRIMARY KEY CLUSTERED ([Id] ASC)
);
GO
-- Wizards schema
CREATE SCHEMA [wizards];
GO
-- Create wizard table
CREATE TABLE [wizards].[Wizard] (
[Id] [uniqueidentifier] NOT NULL,
[Name] [nvarchar](255) NOT NULL,
[Title] [nvarchar](100) NULL,
[Level] [int] NOT NULL,
[Specialization] [nvarchar](100) NULL,
CONSTRAINT [PK_Wizard] PRIMARY KEY CLUSTERED ([Id] ASC)
);
GO
-- Create wizard apprentice table
CREATE TABLE [wizards].[Apprentice] (
[Id] [uniqueidentifier] NOT NULL,
[WizardId] [uniqueidentifier] NOT NULL,
[MentorId] [uniqueidentifier] NOT NULL,
[StartDate] [datetime2](7) NOT NULL,
[EndDate] [datetime2](7) NULL,
CONSTRAINT [PK_Apprentice] PRIMARY KEY CLUSTERED ([Id] ASC)
);
GO
-- Add foreign key constraints
ALTER TABLE [spellcasting].[SpellCastingProcess]
ADD CONSTRAINT [FK_SpellCastingProcess_Spell]
FOREIGN KEY ([SpellId])
REFERENCES [spellcasting].[Spell]([Id]);
GO
ALTER TABLE [spellcasting].[SpellCastingProcess]
ADD CONSTRAINT [FK_SpellCastingProcess_Wizard]
FOREIGN KEY ([WizardId])
REFERENCES [wizards].[Wizard]([Id]);
GO
ALTER TABLE [wizards].[Apprentice]
ADD CONSTRAINT [FK_Apprentice_Wizard]
FOREIGN KEY ([WizardId])
REFERENCES [wizards].[Wizard]([Id]);
GO
ALTER TABLE [wizards].[Apprentice]
ADD CONSTRAINT [FK_Apprentice_Mentor]
FOREIGN KEY ([MentorId])
REFERENCES [wizards].[Wizard]([Id]);
GO
`;
const result = await fromSQLServer(sql);
// Debug output
console.log('Total tables:', result.tables.length);
console.log('Total relationships:', result.relationships.length);
// Check if we have the expected number of tables and relationships
expect(result.tables).toHaveLength(4);
expect(result.relationships).toHaveLength(4);
// Check a specific relationship we know should exist
const spellCastingRel = result.relationships.find(
(r) =>
r.sourceTable === 'SpellCastingProcess' &&
r.targetTable === 'Spell' &&
r.sourceColumn === 'SpellId'
);
expect(spellCastingRel).toBeDefined();
if (spellCastingRel) {
// Find the corresponding tables
const spellTable = result.tables.find(
(t) => t.name === 'Spell' && t.schema === 'spellcasting'
);
const spellCastingProcessTable = result.tables.find(
(t) =>
t.name === 'SpellCastingProcess' &&
t.schema === 'spellcasting'
);
console.log('SpellCastingProcess relationship:', {
sourceTableId: spellCastingRel.sourceTableId,
targetTableId: spellCastingRel.targetTableId,
spellCastingProcessTableId: spellCastingProcessTable?.id,
spellTableId: spellTable?.id,
isSourceIdValid:
spellCastingRel.sourceTableId ===
spellCastingProcessTable?.id,
isTargetIdValid:
spellCastingRel.targetTableId === spellTable?.id,
});
// Verify the IDs are properly linked
expect(spellCastingRel.sourceTableId).toBeTruthy();
expect(spellCastingRel.targetTableId).toBeTruthy();
expect(spellCastingRel.sourceTableId).toBe(
spellCastingProcessTable!.id
);
expect(spellCastingRel.targetTableId).toBe(spellTable!.id);
}
// Check the apprentice self-referencing relationships
const apprenticeWizardRel = result.relationships.find(
(r) =>
r.sourceTable === 'Apprentice' &&
r.targetTable === 'Wizard' &&
r.sourceColumn === 'WizardId'
);
const apprenticeMentorRel = result.relationships.find(
(r) =>
r.sourceTable === 'Apprentice' &&
r.targetTable === 'Wizard' &&
r.sourceColumn === 'MentorId'
);
expect(apprenticeWizardRel).toBeDefined();
expect(apprenticeMentorRel).toBeDefined();
// Check that all relationships have valid table IDs
const relationshipsWithMissingIds = result.relationships.filter(
(r) =>
!r.sourceTableId ||
!r.targetTableId ||
r.sourceTableId === '' ||
r.targetTableId === ''
);
if (relationshipsWithMissingIds.length > 0) {
console.log(
'Relationships with missing IDs:',
relationshipsWithMissingIds.slice(0, 5)
);
}
expect(relationshipsWithMissingIds).toHaveLength(0);
});
});

View File

@@ -0,0 +1,198 @@
import { describe, it, expect } from 'vitest';
import { fromSQLServer } from '../sqlserver';
import { convertToChartDBDiagram } from '../../../common';
import { DatabaseType } from '@/lib/domain/database-type';
describe('SQL Server varchar(max) and nvarchar(max) preservation', () => {
it('should preserve varchar(max) and nvarchar(max) in column definitions', async () => {
const sql = `
CREATE TABLE [dbo].[magical_texts] (
[Id] [uniqueidentifier] NOT NULL PRIMARY KEY,
[Title] [nvarchar](255) NOT NULL,
[Description] [nvarchar](max) NULL,
[Content] [varchar](max) NOT NULL,
[ShortNote] [varchar](100) NULL,
[Metadata] [nvarchar](4000) NULL
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
expect(table.columns).toHaveLength(6);
// Check that max is preserved in typeArgs
const descriptionCol = table.columns.find(
(c) => c.name === 'Description'
);
expect(descriptionCol).toBeDefined();
expect(descriptionCol?.type).toBe('nvarchar');
expect(descriptionCol?.typeArgs).toBe('max');
const contentCol = table.columns.find((c) => c.name === 'Content');
expect(contentCol).toBeDefined();
expect(contentCol?.type).toBe('varchar');
expect(contentCol?.typeArgs).toBe('max');
// Check that numeric lengths are preserved as arrays
const titleCol = table.columns.find((c) => c.name === 'Title');
expect(titleCol).toBeDefined();
expect(titleCol?.type).toBe('nvarchar');
expect(titleCol?.typeArgs).toEqual([255]);
const shortNoteCol = table.columns.find((c) => c.name === 'ShortNote');
expect(shortNoteCol).toBeDefined();
expect(shortNoteCol?.type).toBe('varchar');
expect(shortNoteCol?.typeArgs).toEqual([100]);
});
it('should convert varchar(max) to characterMaximumLength field in diagram', async () => {
const sql = `
CREATE TABLE [dbo].[spell_scrolls] (
[Id] [int] IDENTITY(1,1) PRIMARY KEY,
[SpellName] [nvarchar](50) NOT NULL,
[Incantation] [nvarchar](max) NOT NULL,
[Instructions] [varchar](max) NULL,
[PowerLevel] [decimal](10, 2) NOT NULL
);
`;
const result = await fromSQLServer(sql);
const diagram = convertToChartDBDiagram(
result,
DatabaseType.SQL_SERVER,
DatabaseType.SQL_SERVER
);
expect(diagram.tables).toBeDefined();
expect(diagram.tables).toHaveLength(1);
const table = diagram.tables![0];
// Check that 'max' is preserved in characterMaximumLength
const incantationField = table.fields.find(
(f) => f.name === 'Incantation'
);
expect(incantationField).toBeDefined();
expect(incantationField?.characterMaximumLength).toBe('max');
const instructionsField = table.fields.find(
(f) => f.name === 'Instructions'
);
expect(instructionsField).toBeDefined();
expect(instructionsField?.characterMaximumLength).toBe('max');
// Check that numeric lengths are preserved
const spellNameField = table.fields.find((f) => f.name === 'SpellName');
expect(spellNameField).toBeDefined();
expect(spellNameField?.characterMaximumLength).toBe('50');
// Check decimal precision/scale
const powerLevelField = table.fields.find(
(f) => f.name === 'PowerLevel'
);
expect(powerLevelField).toBeDefined();
expect(powerLevelField?.precision).toBe(10);
expect(powerLevelField?.scale).toBe(2);
});
it('should handle mixed varchar types with schema and relationships', async () => {
const sql = `
CREATE TABLE [content].[authors] (
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[Name] [nvarchar](100) NOT NULL,
[Bio] [nvarchar](max) NULL
);
CREATE TABLE [content].[books] (
[Id] [uniqueidentifier] PRIMARY KEY DEFAULT NEWID(),
[AuthorId] [uniqueidentifier] NOT NULL,
[Title] [nvarchar](500) NOT NULL,
[Summary] [nvarchar](max) NULL,
[FullText] [varchar](max) NOT NULL,
[ISBN] [varchar](13) NULL,
CONSTRAINT [FK_books_authors] FOREIGN KEY ([AuthorId]) REFERENCES [content].[authors]([Id])
);
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(2);
// Check authors table
const authorsTable = result.tables.find((t) => t.name === 'authors');
expect(authorsTable).toBeDefined();
const bioCol = authorsTable?.columns.find((c) => c.name === 'Bio');
expect(bioCol?.typeArgs).toBe('max');
// Check books table
const booksTable = result.tables.find((t) => t.name === 'books');
expect(booksTable).toBeDefined();
const summaryCol = booksTable?.columns.find(
(c) => c.name === 'Summary'
);
expect(summaryCol?.typeArgs).toBe('max');
const fullTextCol = booksTable?.columns.find(
(c) => c.name === 'FullText'
);
expect(fullTextCol?.typeArgs).toBe('max');
const isbnCol = booksTable?.columns.find((c) => c.name === 'ISBN');
expect(isbnCol?.typeArgs).toEqual([13]);
// Verify relationship is preserved
expect(result.relationships).toHaveLength(1);
expect(result.relationships[0].sourceTable).toBe('books');
expect(result.relationships[0].targetTable).toBe('authors');
});
it('should handle complex table with various SQL Server features including varchar(max)', async () => {
const sql = `
CREATE TABLE [reporting].[wizard_performance](\
[Id] [bigint] IDENTITY(1,1) NOT NULL,
[WizardId] [uniqueidentifier] NOT NULL,
[EvaluationDate] [datetime2](7) NOT NULL,
[PerformanceScore] [decimal](5, 2) NOT NULL,
[Comments] [nvarchar](max) NULL,
[DetailedReport] [varchar](max) NULL,
[Signature] [varbinary](max) NULL,
[ReviewerNotes] [text] NULL,
[IsActive] [bit] NOT NULL DEFAULT 1,
CONSTRAINT [PK_wizard_performance] PRIMARY KEY CLUSTERED ([Id] ASC)
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY];
`;
const result = await fromSQLServer(sql);
expect(result.tables).toHaveLength(1);
const table = result.tables[0];
// Check varchar(max) columns
const commentsCol = table.columns.find((c) => c.name === 'Comments');
expect(commentsCol?.type).toBe('nvarchar');
expect(commentsCol?.typeArgs).toBe('max');
const reportCol = table.columns.find(
(c) => c.name === 'DetailedReport'
);
expect(reportCol?.type).toBe('varchar');
expect(reportCol?.typeArgs).toBe('max');
// Note: varbinary(max) should also be preserved but might need special handling
const signatureCol = table.columns.find((c) => c.name === 'Signature');
expect(signatureCol?.type).toBe('varbinary');
// varbinary(max) handling might differ
// Check other column types
const scoreCol = table.columns.find(
(c) => c.name === 'PerformanceScore'
);
expect(scoreCol?.typeArgs).toEqual([5, 2]);
const idCol = table.columns.find((c) => c.name === 'Id');
expect(idCol?.increment).toBe(true);
});
});

View File

@@ -7,111 +7,126 @@ import type {
SQLForeignKey,
SQLASTNode,
} from '../../common';
import { buildSQLFromAST } from '../../common';
import { DatabaseType } from '@/lib/domain/database-type';
import type {
TableReference,
ColumnReference,
ColumnDefinition,
ConstraintDefinition,
CreateTableStatement,
CreateIndexStatement,
AlterTableStatement,
} from './sqlserver-common';
import {
parserOpts,
extractColumnName,
getTypeArgs,
findTableWithSchemaSupport,
} from './sqlserver-common';
/**
* Helper function to safely build SQL from AST nodes, handling null/undefined/invalid cases
*/
function safelyBuildSQLFromAST(ast: unknown): string | undefined {
if (!ast) return undefined;
// Make sure it's a valid AST node with a 'type' property
if (typeof ast === 'object' && ast !== null && 'type' in ast) {
return buildSQLFromAST(ast as SQLASTNode, DatabaseType.SQL_SERVER);
}
// Return string representation for non-AST objects
if (ast !== null && (typeof ast === 'string' || typeof ast === 'number')) {
return String(ast);
}
return undefined;
}
/**
* Preprocess SQL Server script to remove or modify parts that the parser can't handle
*/
function preprocessSQLServerScript(sqlContent: string): string {
// 1. Remove IF NOT EXISTS ... BEGIN ... END blocks (typically used for schema creation)
// 1. Remove USE statements
sqlContent = sqlContent.replace(/USE\s+\[[^\]]+\]\s*;?/gi, '');
// 2. Remove SET statements
sqlContent = sqlContent.replace(/SET\s+\w+\s+\w+\s*;?/gi, '');
// 3. Remove GO statements (batch separators)
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
// 4. Remove CREATE SCHEMA statements
sqlContent = sqlContent.replace(/CREATE\s+SCHEMA\s+\[[^\]]+\]\s*;?/gi, '');
// 5. Remove IF NOT EXISTS ... BEGIN ... END blocks
sqlContent = sqlContent.replace(
/IF\s+NOT\s+EXISTS\s*\([^)]+\)\s*BEGIN\s+[^;]+;\s*END;?/gi,
''
);
// 2. Remove any GO statements (batch separators)
sqlContent = sqlContent.replace(/\bGO\b/gi, ';');
// 3. Remove any EXEC statements
// 6. Remove any EXEC statements
sqlContent = sqlContent.replace(/EXEC\s*\([^)]+\)\s*;?/gi, '');
sqlContent = sqlContent.replace(/EXEC\s+[^;]+;/gi, '');
// 4. Replace any remaining procedural code blocks that might cause issues
// 7. Replace any remaining procedural code blocks
sqlContent = sqlContent.replace(
/BEGIN\s+TRANSACTION|COMMIT\s+TRANSACTION|ROLLBACK\s+TRANSACTION/gi,
'-- $&'
);
// 5. Special handling for CREATE TABLE with reserved keywords as column names
// Find CREATE TABLE statements
const createTablePattern =
/CREATE\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(([^;]*)\)/gi;
// 8. Remove square brackets (SQL Server specific)
sqlContent = sqlContent.replace(/\[/g, '');
sqlContent = sqlContent.replace(/\]/g, '');
// 9. Remove ON PRIMARY and TEXTIMAGE_ON PRIMARY clauses
sqlContent = sqlContent.replace(
createTablePattern,
(_, schema, tableName, columnDefs) => {
// Process column definitions to rename problematic columns
let processedColumnDefs = columnDefs;
// Replace any column named "column" with "column_name"
processedColumnDefs = processedColumnDefs.replace(
/\[column\]/gi,
'[column_name]'
);
// Replace any column named "int" with "int_col"
processedColumnDefs = processedColumnDefs.replace(
/\[int\]/gi,
'[int_col]'
);
// Replace any column named "time" with "time_col"
processedColumnDefs = processedColumnDefs.replace(
/\[time\]/gi,
'[time_col]'
);
// Replace any column named "order" with "order_column"
processedColumnDefs = processedColumnDefs.replace(
/\[order\]/gi,
'[order_column]'
);
// Rebuild the CREATE TABLE statement
return `CREATE TABLE [${schema || 'dbo'}].[${tableName}] (${processedColumnDefs})`;
}
/ON\s+PRIMARY(\s+TEXTIMAGE_ON\s+PRIMARY)?/gi,
''
);
// 6. Handle default value expressions with functions - replace with simpler defaults
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
sqlContent = sqlContent.replace(/DEFAULT\s+\([^)]+\)/gi, 'DEFAULT 0');
// 10. Remove WITH options from constraints
sqlContent = sqlContent.replace(/WITH\s*\([^)]+\)/gi, '');
// 7. Split into individual statements to handle them separately
// 11. Handle default value expressions with functions
sqlContent = sqlContent.replace(/DEFAULT\s+NEWID\(\)/gi, "DEFAULT 'newid'");
sqlContent = sqlContent.replace(
/DEFAULT\s+NEWSEQUENTIALID\(\)/gi,
"DEFAULT 'newsequentialid'"
);
sqlContent = sqlContent.replace(
/DEFAULT\s+GETDATE\(\)/gi,
"DEFAULT 'getdate'"
);
sqlContent = sqlContent.replace(
/DEFAULT\s+SYSDATETIME\(\)/gi,
"DEFAULT 'sysdatetime'"
);
// Don't replace numeric defaults or simple values
sqlContent = sqlContent.replace(/DEFAULT\s+'\([^)]+\)'/gi, "DEFAULT '0'");
// Only replace function calls in DEFAULT, not numeric literals
sqlContent = sqlContent.replace(
/DEFAULT\s+(\w+)\s*\([^)]*\)/gi,
"DEFAULT '0'"
);
// 12. Replace SQL Server specific data types with standard types
// Note: We preserve varchar(max) and nvarchar(max) for accurate export
sqlContent = sqlContent.replace(/\buniqueid\b/gi, 'uniqueidentifier'); // Fix common typo
sqlContent = sqlContent.replace(
/\bdatetime2\s*\(\s*\d+\s*\)/gi,
'datetime2'
);
sqlContent = sqlContent.replace(/\btime\s*\(\s*\d+\s*\)/gi, 'time');
sqlContent = sqlContent.replace(
/\bdatetimeoffset\s*\(\s*\d+\s*\)/gi,
'datetimeoffset'
);
// 13. Handle IDENTITY columns - convert to a simpler format
sqlContent = sqlContent.replace(
/IDENTITY\s*\(\s*\d+\s*,\s*\d+\s*\)/gi,
'AUTO_INCREMENT'
);
sqlContent = sqlContent.replace(/IDENTITY/gi, 'AUTO_INCREMENT');
// 14. Replace CHECK constraints with comments (parser doesn't handle well)
sqlContent = sqlContent.replace(
/CHECK\s*\([^)]+\)/gi,
'/* CHECK CONSTRAINT */'
);
// 15. Handle FOREIGN KEY constraints within CREATE TABLE
// Convert inline foreign key syntax to be more parser-friendly
sqlContent = sqlContent.replace(
/(\w+)\s+(\w+(?:\s*\(\s*\d+(?:\s*,\s*\d+)?\s*\))?)\s+(?:NOT\s+NULL\s+)?FOREIGN\s+KEY\s+REFERENCES\s+(\w+)\.?(\w+)\s*\((\w+)\)/gi,
'$1 $2 /* FK TO $3.$4($5) */'
);
// Handle standalone FOREIGN KEY constraints
sqlContent = sqlContent.replace(
/CONSTRAINT\s+(\w+)\s+FOREIGN\s+KEY\s*\((\w+)\)\s+REFERENCES\s+(\w+)\.?(\w+)?\s*\((\w+)\)(?:\s+ON\s+DELETE\s+(\w+))?(?:\s+ON\s+UPDATE\s+(\w+))?/gi,
'/* CONSTRAINT $1 FK($2) REF $3.$4($5) */'
);
// 16. Split into individual statements to handle them separately
const statements = sqlContent
.split(';')
.filter((stmt) => stmt.trim().length > 0);
@@ -120,30 +135,27 @@ function preprocessSQLServerScript(sqlContent: string): string {
const filteredStatements = statements.filter((stmt) => {
const trimmedStmt = stmt.trim().toUpperCase();
return (
trimmedStmt.startsWith('CREATE TABLE') ||
trimmedStmt.startsWith('CREATE UNIQUE INDEX') ||
trimmedStmt.startsWith('CREATE INDEX') ||
trimmedStmt.startsWith('ALTER TABLE')
trimmedStmt.includes('CREATE TABLE') ||
trimmedStmt.includes('CREATE UNIQUE INDEX') ||
trimmedStmt.includes('CREATE INDEX') ||
trimmedStmt.includes('ALTER TABLE')
);
});
return filteredStatements.join(';') + ';';
return filteredStatements.join(';\n') + ';';
}
/**
* Manual parsing of ALTER TABLE ADD CONSTRAINT statements
* This is a fallback for when the node-sql-parser fails to properly parse the constraints
*/
function parseAlterTableAddConstraint(statements: string[]): {
fkData: SQLForeignKey[];
tableMap: Record<string, string>;
} {
function parseAlterTableAddConstraint(statements: string[]): SQLForeignKey[] {
const fkData: SQLForeignKey[] = [];
const tableMap: Record<string, string> = {};
// Regular expressions to extract information from ALTER TABLE statements
// Handle multi-line ALTER TABLE statements
const alterTableRegex =
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s+REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/i;
/ALTER\s+TABLE\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s+(?:WITH\s+CHECK\s+)?ADD\s+CONSTRAINT\s+\[?([^\]]*)\]?\s+FOREIGN\s+KEY\s*\(\[?([^\]]*)\]?\)\s*REFERENCES\s+\[?([^\]]*)\]?\.?\[?([^\]]*)\]?\s*\(\[?([^\]]*)\]?\)/is;
for (const stmt of statements) {
const match = stmt.match(alterTableRegex);
@@ -159,18 +171,6 @@ function parseAlterTableAddConstraint(statements: string[]): {
targetColumn,
] = match;
// Generate IDs for tables if they don't already exist
const sourceTableKey = `${sourceSchema}.${sourceTable}`;
const targetTableKey = `${targetSchema}.${targetTable}`;
if (!tableMap[sourceTableKey]) {
tableMap[sourceTableKey] = generateId();
}
if (!tableMap[targetTableKey]) {
tableMap[targetTableKey] = generateId();
}
fkData.push({
name: constraintName,
sourceTable: sourceTable,
@@ -179,13 +179,13 @@ function parseAlterTableAddConstraint(statements: string[]): {
targetTable: targetTable,
targetSchema: targetSchema,
targetColumn: targetColumn,
sourceTableId: tableMap[sourceTableKey],
targetTableId: tableMap[targetTableKey],
sourceTableId: '', // Will be filled by linkRelationships
targetTableId: '', // Will be filled by linkRelationships
});
}
}
return { fkData, tableMap };
return fkData;
}
/**
@@ -267,6 +267,239 @@ function normalizeSQLServerDataType(dataType: string): string {
}
}
/**
* Manual parsing of CREATE TABLE statements when node-sql-parser fails
*/
function parseCreateTableManually(
statement: string,
tables: SQLTable[],
tableMap: Record<string, string>,
relationships: SQLForeignKey[]
): void {
// Extract table name and schema (handling square brackets)
const tableMatch = statement.match(
/CREATE\s+TABLE\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(/i
);
if (!tableMatch) return;
const [, schema = 'dbo', tableName] = tableMatch;
// Generate table ID
const tableId = generateId();
const tableKey = `${schema}.${tableName}`;
tableMap[tableKey] = tableId;
// Extract column definitions
const columns: SQLColumn[] = [];
const indexes: SQLIndex[] = [];
// Find the content between the parentheses
const tableContentMatch = statement.match(
/CREATE\s+TABLE\s+[^(]+\(([\s\S]*)\)\s*(?:ON\s+|$)/i
);
if (!tableContentMatch) return;
const tableContent = tableContentMatch[1];
// Split table content by commas but not within parentheses
const parts = [];
let current = '';
let parenDepth = 0;
for (let i = 0; i < tableContent.length; i++) {
const char = tableContent[i];
if (char === '(') parenDepth++;
else if (char === ')') parenDepth--;
else if (char === ',' && parenDepth === 0) {
parts.push(current.trim());
current = '';
continue;
}
current += char;
}
if (current.trim()) parts.push(current.trim());
// Process each part (column or constraint)
for (const part of parts) {
// Handle constraint definitions
if (part.match(/^\s*CONSTRAINT/i)) {
// Parse constraints
const constraintMatch = part.match(
/CONSTRAINT\s+\[?(\w+)\]?\s+(PRIMARY\s+KEY|UNIQUE|FOREIGN\s+KEY)/i
);
if (constraintMatch) {
const [, constraintName, constraintType] = constraintMatch;
if (constraintType.match(/PRIMARY\s+KEY/i)) {
// Extract columns from PRIMARY KEY constraint - handle multi-line format
const pkColumnsMatch = part.match(
/PRIMARY\s+KEY(?:\s+CLUSTERED)?\s*\(([\s\S]+?)\)/i
);
if (pkColumnsMatch) {
const pkColumns = pkColumnsMatch[1]
.split(',')
.map((c) =>
c
.trim()
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
.trim()
);
pkColumns.forEach((col) => {
const column = columns.find((c) => c.name === col);
if (column) column.primaryKey = true;
});
}
} else if (constraintType === 'UNIQUE') {
// Extract columns from UNIQUE constraint
const uniqueColumnsMatch = part.match(
/UNIQUE(?:\s+NONCLUSTERED)?\s*\(([\s\S]+?)\)/i
);
if (uniqueColumnsMatch) {
const uniqueColumns = uniqueColumnsMatch[1]
.split(',')
.map((c) =>
c
.trim()
.replace(/\[|\]|\s+(ASC|DESC)/gi, '')
.trim()
);
indexes.push({
name: constraintName,
columns: uniqueColumns,
unique: true,
});
}
} else if (constraintType.match(/FOREIGN\s+KEY/i)) {
// Parse foreign key constraint
const fkMatch = part.match(
/FOREIGN\s+KEY\s*\(([^)]+)\)\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
);
if (fkMatch) {
const [
,
sourceCol,
targetSchema = 'dbo',
targetTable,
targetCol,
] = fkMatch;
relationships.push({
name: constraintName,
sourceTable: tableName,
sourceSchema: schema,
sourceColumn: sourceCol
.trim()
.replace(/\[|\]/g, ''),
targetTable: targetTable,
targetSchema: targetSchema,
targetColumn: targetCol
.trim()
.replace(/\[|\]/g, ''),
sourceTableId: tableId,
targetTableId: '', // Will be filled later
});
}
}
}
continue;
}
// Parse column definition - handle both numeric args and 'max'
// Handle brackets around column names and types
let columnMatch = part.match(
/^\s*\[?(\w+)\]?\s+\[?(\w+)\]?(?:\s*\(\s*([\d,\s]+|max)\s*\))?(.*)$/i
);
// If no match, try pattern for preprocessed types without parentheses
if (!columnMatch) {
columnMatch = part.match(/^\s*(\w+)\s+(\w+)\s+([\d,\s]+)\s+(.*)$/i);
}
if (columnMatch) {
const [, colName, baseType, typeArgs, rest] = columnMatch;
if (
colName &&
!colName.match(/^(PRIMARY|FOREIGN|UNIQUE|CHECK)$/i)
) {
// Check for inline foreign key
const inlineFkMatch = rest.match(
/FOREIGN\s+KEY\s+REFERENCES\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
);
if (inlineFkMatch) {
const [, targetSchema = 'dbo', targetTable, targetCol] =
inlineFkMatch;
relationships.push({
name: `FK_${tableName}_${colName}`,
sourceTable: tableName,
sourceSchema: schema,
sourceColumn: colName,
targetTable: targetTable,
targetSchema: targetSchema,
targetColumn: targetCol.trim().replace(/\[|\]/g, ''),
sourceTableId: tableId,
targetTableId: '', // Will be filled later
});
}
const isPrimaryKey = !!rest.match(/PRIMARY\s+KEY/i);
const isNotNull = !!rest.match(/NOT\s+NULL/i);
const isIdentity = !!rest.match(
/IDENTITY(?:\s*\(\s*\d+\s*,\s*\d+\s*\))?/i
);
const isUnique = !!rest.match(/UNIQUE/i);
const defaultMatch = rest.match(/DEFAULT\s+([^,]+)/i);
// Parse type arguments
let parsedTypeArgs: number[] | string | undefined;
if (typeArgs) {
if (typeArgs.toLowerCase() === 'max') {
// Preserve 'max' keyword for varchar/nvarchar types
parsedTypeArgs = 'max';
} else {
// Parse numeric args
parsedTypeArgs = typeArgs
.split(',')
.map((arg) => parseInt(arg.trim()));
}
}
const column: SQLColumn = {
name: colName,
type: normalizeSQLServerDataType(baseType.trim()),
nullable: !isNotNull && !isPrimaryKey,
primaryKey: isPrimaryKey,
unique: isUnique,
increment: isIdentity,
default: defaultMatch ? defaultMatch[1].trim() : undefined,
};
// Add type arguments if present
if (parsedTypeArgs) {
if (typeof parsedTypeArgs === 'string') {
// For 'max' keyword
column.typeArgs = parsedTypeArgs;
} else if (parsedTypeArgs.length > 0) {
// For numeric arguments
column.typeArgs = parsedTypeArgs;
}
}
columns.push(column);
}
}
}
// Add the table
tables.push({
id: tableId,
name: tableName,
schema: schema,
columns,
indexes,
order: tables.length,
});
}
/**
* Parse SQL Server DDL scripts and extract database structure
* @param sqlContent SQL Server DDL content as string
@@ -280,84 +513,131 @@ export async function fromSQLServer(
const tableMap: Record<string, string> = {}; // Maps table name to its ID
try {
// Preprocess the SQL content to handle T-SQL specific syntax
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
// First, handle ALTER TABLE statements for foreign keys
// Split by GO or semicolon for SQL Server
const statements = sqlContent
.split(';')
.split(/(?:GO\s*$|;\s*$)/im)
.filter((stmt) => stmt.trim().length > 0);
const alterTableStatements = statements.filter(
(stmt) =>
stmt.trim().toUpperCase().startsWith('ALTER TABLE') &&
stmt.trim().toUpperCase().includes('ALTER TABLE') &&
stmt.includes('FOREIGN KEY')
);
if (alterTableStatements.length > 0) {
const { fkData, tableMap: fkTableMap } =
parseAlterTableAddConstraint(alterTableStatements);
// Store table IDs from alter statements
Object.assign(tableMap, fkTableMap);
const fkData = parseAlterTableAddConstraint(alterTableStatements);
// Store foreign key relationships for later processing
relationships.push(...fkData);
}
const { Parser } = await import('node-sql-parser');
const parser = new Parser();
let ast;
try {
ast = parser.astify(preprocessedSQL, parserOpts);
} catch {
// Fallback: Try to parse each statement individually
const statements = preprocessedSQL
.split(';')
.filter((stmt) => stmt.trim().length > 0);
ast = [];
// Parse CREATE TABLE statements manually first
const createTableStatements = statements.filter((stmt) =>
stmt.trim().toUpperCase().includes('CREATE TABLE')
);
for (const stmt of statements) {
try {
const stmtAst = parser.astify(stmt + ';', parserOpts);
if (Array.isArray(stmtAst)) {
ast.push(...stmtAst);
} else if (stmtAst) {
ast.push(stmtAst);
for (const stmt of createTableStatements) {
parseCreateTableManually(stmt, tables, tableMap, relationships);
}
// Preprocess the SQL content for node-sql-parser
const preprocessedSQL = preprocessSQLServerScript(sqlContent);
// Try to use node-sql-parser for additional parsing
try {
const { Parser } = await import('node-sql-parser');
const parser = new Parser();
let ast;
try {
ast = parser.astify(preprocessedSQL, parserOpts);
} catch {
// Fallback: Try to parse each statement individually
const statements = preprocessedSQL
.split(';')
.filter((stmt) => stmt.trim().length > 0);
ast = [];
for (const stmt of statements) {
try {
const stmtAst = parser.astify(stmt + ';', parserOpts);
if (Array.isArray(stmtAst)) {
ast.push(...stmtAst);
} else if (stmtAst) {
ast.push(stmtAst);
}
} catch {
// Skip statements that can't be parsed
}
} catch {
// Skip statements that can't be parsed
}
}
if (Array.isArray(ast) && ast.length > 0) {
// Process each statement
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
// Process CREATE INDEX statements
if (stmt.type === 'create' && stmt.keyword === 'index') {
processCreateIndex(
stmt as CreateIndexStatement,
tables
);
}
// Process ALTER TABLE statements for non-FK constraints
else if (
stmt.type === 'alter' &&
stmt.keyword === 'table'
) {
processAlterTable(
stmt as AlterTableStatement,
tables,
relationships
);
}
});
}
} catch (parserError) {
// If parser fails completely, continue with manual parsing results
console.warn(
'node-sql-parser failed, using manual parsing only:',
parserError
);
}
// Parse CREATE INDEX statements manually
const createIndexStatements = statements.filter(
(stmt) =>
stmt.trim().toUpperCase().includes('CREATE') &&
stmt.trim().toUpperCase().includes('INDEX')
);
for (const stmt of createIndexStatements) {
const indexMatch = stmt.match(
/CREATE\s+(UNIQUE\s+)?INDEX\s+\[?(\w+)\]?\s+ON\s+(?:\[?(\w+)\]?\.)??\[?(\w+)\]?\s*\(([^)]+)\)/i
);
if (indexMatch) {
const [
,
unique,
indexName,
schema = 'dbo',
tableName,
columnsStr,
] = indexMatch;
const table = tables.find(
(t) => t.name === tableName && t.schema === schema
);
if (table) {
const columns = columnsStr
.split(',')
.map((c) => c.trim().replace(/\[|\]/g, ''));
table.indexes.push({
name: indexName,
columns,
unique: !!unique,
});
}
}
}
if (!Array.isArray(ast) || ast.length === 0) {
throw new Error('Failed to parse SQL DDL - Empty or invalid AST');
}
// Process each statement
(ast as unknown as SQLASTNode[]).forEach((stmt) => {
// Process CREATE TABLE statements
if (stmt.type === 'create' && stmt.keyword === 'table') {
processCreateTable(
stmt as CreateTableStatement,
tables,
tableMap,
relationships
);
}
// Process CREATE INDEX statements
else if (stmt.type === 'create' && stmt.keyword === 'index') {
processCreateIndex(stmt as CreateIndexStatement, tables);
}
// Process ALTER TABLE statements
else if (stmt.type === 'alter' && stmt.keyword === 'table') {
processAlterTable(
stmt as AlterTableStatement,
tables,
relationships
);
}
});
// Link relationships to ensure all targetTableId and sourceTableId fields are filled
const validRelationships = linkRelationships(
tables,
@@ -379,233 +659,6 @@ export async function fromSQLServer(
}
}
/**
* Process a CREATE TABLE statement
*/
function processCreateTable(
stmt: CreateTableStatement,
tables: SQLTable[],
tableMap: Record<string, string>,
relationships: SQLForeignKey[]
): void {
let tableName = '';
let schemaName = '';
// Extract table name and schema
if (stmt.table && typeof stmt.table === 'object') {
// Handle array of tables if needed
if (Array.isArray(stmt.table) && stmt.table.length > 0) {
const tableObj = stmt.table[0];
tableName = tableObj.table || '';
// SQL Server uses 'schema' or 'db' field
schemaName = tableObj.schema || tableObj.db || '';
} else {
// Direct object reference
const tableObj = stmt.table as TableReference;
tableName = tableObj.table || '';
schemaName = tableObj.schema || tableObj.db || '';
}
}
if (!tableName) {
return;
}
// If no schema specified, use default 'dbo' schema for SQL Server
if (!schemaName) {
schemaName = 'dbo';
}
// Generate a unique ID for the table
const tableId = generateId();
const tableKey = `${schemaName ? schemaName + '.' : ''}${tableName}`;
tableMap[tableKey] = tableId;
// Process table columns
const columns: SQLColumn[] = [];
const indexes: SQLIndex[] = [];
if (stmt.create_definitions && Array.isArray(stmt.create_definitions)) {
stmt.create_definitions.forEach(
(def: ColumnDefinition | ConstraintDefinition) => {
if (def.resource === 'column') {
// Process column definition
const columnDef = def as ColumnDefinition;
const columnName = extractColumnName(columnDef.column);
const rawDataType = columnDef.definition?.dataType || '';
const normalizedDataType =
normalizeSQLServerDataType(rawDataType);
if (columnName) {
// Check for SQL Server specific column properties
const isPrimaryKey =
columnDef.primary_key === 'primary key';
// For SQL Server, check for IDENTITY property in suffixes
const hasIdentity = columnDef.definition?.suffix?.some(
(suffix) =>
suffix.toLowerCase().includes('identity')
);
columns.push({
name: columnName,
type: normalizedDataType,
nullable: columnDef.nullable?.type !== 'not null',
primaryKey: isPrimaryKey,
unique: columnDef.unique === 'unique',
typeArgs: getTypeArgs(columnDef.definition),
default: columnDef.default_val
? safelyBuildSQLFromAST(columnDef.default_val)
: undefined,
increment: hasIdentity,
});
}
} else if (def.resource === 'constraint') {
// Handle constraint definitions
const constraintDef = def as ConstraintDefinition;
// Handle PRIMARY KEY constraints
if (constraintDef.constraint_type === 'primary key') {
if (Array.isArray(constraintDef.definition)) {
// Extract column names from primary key constraint
for (const colDef of constraintDef.definition) {
if (
colDef &&
typeof colDef === 'object' &&
'type' in colDef &&
colDef.type === 'column_ref' &&
'column' in colDef
) {
const pkColumnName = extractColumnName(
colDef as ColumnReference
);
// Find and mark the column as primary key
const column = columns.find(
(col) => col.name === pkColumnName
);
if (column) {
column.primaryKey = true;
}
}
}
}
}
// Handle UNIQUE constraints
else if (constraintDef.constraint_type === 'unique') {
if (Array.isArray(constraintDef.definition)) {
const uniqueColumns: string[] = [];
// Extract column names from unique constraint
for (const colDef of constraintDef.definition) {
if (
colDef &&
typeof colDef === 'object' &&
'type' in colDef &&
colDef.type === 'column_ref' &&
'column' in colDef
) {
const uniqueColumnName = extractColumnName(
colDef as ColumnReference
);
uniqueColumns.push(uniqueColumnName);
}
}
// Add as an index
if (uniqueColumns.length > 0) {
indexes.push({
name:
constraintDef.constraint ||
`unique_${tableName}_${uniqueColumns.join('_')}`,
columns: uniqueColumns,
unique: true,
});
}
}
}
// Handle FOREIGN KEY constraints
else if (
constraintDef.constraint_type === 'foreign key' &&
constraintDef.reference
) {
const reference = constraintDef.reference;
if (
reference &&
reference.table &&
reference.columns &&
reference.columns.length > 0
) {
// Extract target table info
const targetTable =
reference.table as TableReference;
const targetTableName = targetTable.table;
const targetSchemaName =
targetTable.schema || targetTable.db || 'dbo';
// Extract source column
let sourceColumnName = '';
if (
Array.isArray(constraintDef.definition) &&
constraintDef.definition.length > 0
) {
const sourceColDef =
constraintDef.definition[0];
if (
sourceColDef &&
typeof sourceColDef === 'object' &&
'type' in sourceColDef &&
sourceColDef.type === 'column_ref'
) {
sourceColumnName = extractColumnName(
sourceColDef as ColumnReference
);
}
}
// Extract target column
const targetColumnName = extractColumnName(
reference.columns[0]
);
if (
sourceColumnName &&
targetTableName &&
targetColumnName
) {
// Create a foreign key relationship
relationships.push({
name:
constraintDef.constraint ||
`fk_${tableName}_${sourceColumnName}`,
sourceTable: tableName,
sourceSchema: schemaName,
sourceColumn: sourceColumnName,
targetTable: targetTableName,
targetSchema: targetSchemaName,
targetColumn: targetColumnName,
sourceTableId: tableId,
targetTableId: '', // Will be filled later
updateAction: reference.on_update,
deleteAction: reference.on_delete,
});
}
}
}
}
}
);
}
// Create the table object
tables.push({
id: tableId,
name: tableName,
schema: schemaName,
columns,
indexes,
order: tables.length,
});
}
/**
* Process a CREATE INDEX statement
*/

View File

@@ -133,7 +133,12 @@ const mapDBMLTypeToGenericType = (dbmlType: string): DataType => {
const foundType = genericDataTypes.find((t) => t.id === mappedType);
if (foundType) return foundType;
}
return genericDataTypes.find((t) => t.id === 'varchar')!;
const type = genericDataTypes.find((t) => t.id === 'varchar')!;
return {
id: type.id,
name: type.name,
};
};
const determineCardinality = (

View File

@@ -19,6 +19,14 @@ import { useDebounce } from '@/hooks/use-debounce';
import equal from 'fast-deep-equal';
import type { DatabaseType } from '@/lib/domain';
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from '@/components/select/select';
export interface TableFieldPopoverProps {
field: DBField;
databaseType: DatabaseType;
@@ -144,20 +152,105 @@ export const TableFieldPopover: React.FC<TableFieldPopoverProps> = ({
'side_panel.tables_section.table.field_actions.character_length'
)}
</Label>
<Input
value={
localField.characterMaximumLength ?? ''
}
type="number"
onChange={(e) =>
setLocalField((current) => ({
...current,
characterMaximumLength:
e.target.value,
}))
}
className="w-full rounded-md bg-muted text-sm"
/>
{dataFieldType?.fieldAttributes
?.hasCharMaxLengthOption ? (
<div className="flex gap-2">
<Select
value={
localField.characterMaximumLength ===
'max'
? 'max'
: localField.characterMaximumLength
? 'custom'
: 'none'
}
onValueChange={(value) => {
if (value === 'max') {
setLocalField(
(current) => ({
...current,
characterMaximumLength:
'max',
})
);
} else if (value === 'custom') {
setLocalField(
(current) => ({
...current,
characterMaximumLength:
'255',
})
);
} else {
setLocalField(
(current) => ({
...current,
characterMaximumLength:
null,
})
);
}
}}
>
<SelectTrigger className="w-full bg-muted">
<SelectValue placeholder="Select length" />
</SelectTrigger>
<SelectContent>
<SelectItem value="none">
No length
</SelectItem>
<SelectItem value="max">
MAX
</SelectItem>
<SelectItem value="custom">
Custom
</SelectItem>
</SelectContent>
</Select>
{localField.characterMaximumLength &&
localField.characterMaximumLength !==
'max' ? (
<Input
value={
localField.characterMaximumLength
}
type="number"
min="1"
max={
dataFieldType
?.fieldAttributes
?.maxLength || undefined
}
onChange={(e) =>
setLocalField(
(current) => ({
...current,
characterMaximumLength:
e.target.value,
})
)
}
className="w-24 rounded-md bg-muted text-sm"
/>
) : null}
</div>
) : (
<Input
value={
localField.characterMaximumLength ??
''
}
type="number"
onChange={(e) =>
setLocalField((current) => ({
...current,
characterMaximumLength:
e.target.value,
}))
}
className="w-full rounded-md bg-muted text-sm"
/>
)}
</div>
) : null}
{dataFieldType?.fieldAttributes?.precision ||

View File

@@ -3,6 +3,7 @@ import { GripVertical, KeyRound } from 'lucide-react';
import { Input } from '@/components/input/input';
import { generateDBFieldSuffix, type DBField } from '@/lib/domain/db-field';
import { useChartDB } from '@/hooks/use-chartdb';
import type { DataTypeData } from '@/lib/data/data-types/data-types';
import {
dataTypeDataToDataType,
sortedDataTypeMap,
@@ -29,6 +30,51 @@ export interface TableFieldProps {
removeField: () => void;
}
const generateFieldRegexPatterns = (
dataType: DataTypeData
): {
regex?: string;
extractRegex?: RegExp;
} => {
if (!dataType.fieldAttributes) {
return { regex: undefined, extractRegex: undefined };
}
const typeName = dataType.name;
const fieldAttributes = dataType.fieldAttributes;
if (fieldAttributes.hasCharMaxLength) {
if (fieldAttributes.hasCharMaxLengthOption) {
return {
regex: `^${typeName}\\((\\d+|[mM][aA][xX])\\)$`,
extractRegex: /\((\d+|max)\)/i,
};
}
return {
regex: `^${typeName}\\(\\d+\\)$`,
extractRegex: /\((\d+)\)/,
};
}
if (fieldAttributes.precision && fieldAttributes.scale) {
return {
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`,
extractRegex: new RegExp(
`${typeName}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
),
};
}
if (fieldAttributes.precision) {
return {
regex: `^${typeName}\\s*\\(\\s*\\d+\\s*\\)$`,
extractRegex: /\((\d+)\)/,
};
}
return { regex: undefined, extractRegex: undefined };
};
export const TableField: React.FC<TableFieldProps> = ({
field,
updateField,
@@ -43,27 +89,17 @@ export const TableField: React.FC<TableFieldProps> = ({
const dataFieldOptions = useMemo(() => {
const standardTypes: SelectBoxOption[] = sortedDataTypeMap[
databaseType
].map((type) => ({
label: type.name,
value: type.id,
regex: type.fieldAttributes?.hasCharMaxLength
? `^${type.name}\\(\\d+\\)$`
: type.fieldAttributes?.precision && type.fieldAttributes?.scale
? `^${type.name}\\s*\\(\\s*\\d+\\s*(?:,\\s*\\d+\\s*)?\\)$`
: type.fieldAttributes?.precision
? `^${type.name}\\s*\\(\\s*\\d+\\s*\\)$`
: undefined,
extractRegex: type.fieldAttributes?.hasCharMaxLength
? /\((\d+)\)/
: type.fieldAttributes?.precision && type.fieldAttributes?.scale
? new RegExp(
`${type.name}\\s*\\(\\s*(\\d+)\\s*(?:,\\s*(\\d+)\\s*)?\\)`
)
: type.fieldAttributes?.precision
? /\((\d+)\)/
: undefined,
group: customTypes?.length ? 'Standard Types' : undefined,
}));
].map((type) => {
const regexPatterns = generateFieldRegexPatterns(type);
return {
label: type.name,
value: type.id,
regex: regexPatterns.regex,
extractRegex: regexPatterns.extractRegex,
group: customTypes?.length ? 'Standard Types' : undefined,
};
});
if (!customTypes?.length) {
return standardTypes;
@@ -100,7 +136,7 @@ export const TableField: React.FC<TableFieldProps> = ({
if (regexMatches?.length) {
if (dataType?.fieldAttributes?.hasCharMaxLength) {
characterMaximumLength = regexMatches[1];
characterMaximumLength = regexMatches[1]?.toLowerCase();
} else if (
dataType?.fieldAttributes?.precision &&
dataType?.fieldAttributes?.scale