mirror of
https://github.com/chartdb/chartdb.git
synced 2025-10-23 07:11:56 +00:00
Compare commits
3 Commits
b9750ddb9a
...
jf/fix_sql
Author | SHA1 | Date | |
---|---|---|---|
|
2f5533c071 | ||
|
5b9a88a8f3 | ||
|
2a8714a564 |
97
.husky/README.md
Normal file
97
.husky/README.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Smart Pre-commit Hooks
|
||||
|
||||
This directory contains intelligent pre-commit hooks that run relevant tests based on the files being committed.
|
||||
|
||||
## Features
|
||||
|
||||
- **Smart Test Detection**: Automatically detects which tests to run based on changed files
|
||||
- **Configurable Mappings**: Easy to configure via `test-mapping.json` (optional)
|
||||
- **Performance Optimized**: Only runs tests for affected code
|
||||
- **Skip Option**: Temporarily skip tests when needed
|
||||
- **Progressive Enhancement**: Works without dependencies, enhanced with `jq` if available
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Linting**: Always runs linting first
|
||||
2. **File Analysis**: Examines staged files to determine which are SQL import related
|
||||
3. **Test Selection**: Maps changed files to relevant test suites
|
||||
4. **Test Execution**: Runs only the necessary tests
|
||||
|
||||
## Configuration
|
||||
|
||||
The test runner works in two modes:
|
||||
|
||||
### Basic Mode (No Dependencies)
|
||||
- Uses built-in patterns for common SQL import files
|
||||
- Works out of the box without any additional tools
|
||||
|
||||
### Enhanced Mode (With `jq`)
|
||||
- Reads configuration from `test-mapping.json`
|
||||
- Allows custom patterns and mappings
|
||||
- More flexible and maintainable
|
||||
|
||||
### Automatic Behaviors
|
||||
- **Documentation Changes**: Tests are automatically skipped for .md, .txt, and .rst files
|
||||
- **Verbose Output**: Always shows matched files and test paths for better visibility
|
||||
|
||||
## File Mappings
|
||||
|
||||
Built-in mappings:
|
||||
- PostgreSQL import files → PostgreSQL tests
|
||||
- MySQL import files → MySQL tests
|
||||
- SQLite import files → SQLite tests
|
||||
- SQL Server import files → SQL Server tests
|
||||
- Common SQL files → All dialect tests
|
||||
- SQL validator → PostgreSQL tests
|
||||
|
||||
## Usage
|
||||
|
||||
### Normal Operation
|
||||
Just commit as usual. The hooks will automatically run relevant tests.
|
||||
|
||||
### Skip Tests Temporarily
|
||||
```bash
|
||||
# Create skip file
|
||||
touch .husky/.skip-tests
|
||||
|
||||
# Commit without tests
|
||||
git commit -m "WIP: debugging"
|
||||
|
||||
# Remove skip file to re-enable
|
||||
rm .husky/.skip-tests
|
||||
```
|
||||
|
||||
### Customize Mappings
|
||||
1. Install `jq`: `brew install jq` (macOS) or `apt-get install jq` (Linux)
|
||||
2. Edit `test-mapping.json` to add new patterns or modify existing ones
|
||||
|
||||
## Requirements
|
||||
|
||||
- **Required**: None (works with bash only)
|
||||
- **Optional**: `jq` for JSON configuration support
|
||||
|
||||
## Examples
|
||||
|
||||
### Example 1: PostgreSQL Parser Change
|
||||
```bash
|
||||
# Changed: src/lib/data/sql-import/dialect-importers/postgresql/postgresql-improved.ts
|
||||
# Runs: src/lib/data/sql-import/dialect-importers/postgresql/__tests__
|
||||
```
|
||||
|
||||
### Example 2: Common SQL Import Change
|
||||
```bash
|
||||
# Changed: src/lib/data/sql-import/common.ts
|
||||
# Runs: All dialect tests (PostgreSQL, MySQL, SQLite, SQL Server)
|
||||
```
|
||||
|
||||
### Example 3: Test File Change
|
||||
```bash
|
||||
# Changed: src/lib/data/sql-import/dialect-importers/postgresql/__tests__/test-types.test.ts
|
||||
# Runs: That specific test file
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
1. **Tests not running**: Check if `.husky/.skip-tests` exists
|
||||
2. **Wrong tests running**: Check `test-mapping.json` patterns
|
||||
3. **All tests running**: You may have exceeded the change threshold
|
@@ -1,2 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Run linting first
|
||||
npm run lint || { echo "lint failed, please run \"npm run lint:fix\" to fix the errors." ; exit 1; }
|
||||
|
||||
# Check if tests should be skipped
|
||||
if [ -f .husky/.skip-tests ]; then
|
||||
echo "⚠️ Tests skipped (remove .husky/.skip-tests to enable)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Run smart test runner for SQL import related changes
|
||||
.husky/smart-test-runner.sh || exit 1
|
||||
|
214
.husky/smart-test-runner.sh
Executable file
214
.husky/smart-test-runner.sh
Executable file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Get the directory of this script
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
CONFIG_FILE="$SCRIPT_DIR/test-mapping.json"
|
||||
|
||||
# Get list of staged files
|
||||
STAGED_FILES=$(git diff --cached --name-only)
|
||||
|
||||
# Check if only documentation files are staged
|
||||
DOC_ONLY=true
|
||||
NON_DOC_COUNT=0
|
||||
while IFS= read -r file; do
|
||||
[ -z "$file" ] && continue
|
||||
if [[ ! "$file" =~ \.(md|txt|rst)$ ]]; then
|
||||
DOC_ONLY=false
|
||||
((NON_DOC_COUNT++))
|
||||
fi
|
||||
done <<< "$STAGED_FILES"
|
||||
|
||||
# Skip tests if only docs are changed
|
||||
if [ "$DOC_ONLY" = "true" ]; then
|
||||
echo -e "${YELLOW}ℹ️ Only documentation files changed, skipping tests.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Initialize test tracking
|
||||
TESTS_TO_RUN=""
|
||||
MATCHED_FILES=()
|
||||
|
||||
# Function to add test path
|
||||
add_test() {
|
||||
local test_path=$1
|
||||
if [ -d "$test_path" ] || [ -f "$test_path" ]; then
|
||||
# Add to list if not already present
|
||||
if [[ ! "$TESTS_TO_RUN" =~ "$test_path" ]]; then
|
||||
if [ -z "$TESTS_TO_RUN" ]; then
|
||||
TESTS_TO_RUN="$test_path"
|
||||
else
|
||||
TESTS_TO_RUN="$TESTS_TO_RUN $test_path"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to check if file matches pattern (simple glob matching)
|
||||
matches_pattern() {
|
||||
local file=$1
|
||||
local pattern=$2
|
||||
|
||||
# Use bash pattern matching
|
||||
case "$file" in
|
||||
$pattern) return 0 ;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Always verbose by default
|
||||
VERBOSE=true
|
||||
|
||||
# Process files based on available tools
|
||||
if command -v jq &> /dev/null && [ -f "$CONFIG_FILE" ]; then
|
||||
echo -e "${YELLOW}Using configuration from test-mapping.json${NC}"
|
||||
|
||||
# Process each staged file
|
||||
while IFS= read -r file; do
|
||||
[ -z "$file" ] && continue
|
||||
|
||||
# Check against each mapping rule
|
||||
jq -c '.mappings[]' "$CONFIG_FILE" 2>/dev/null | while read -r mapping; do
|
||||
name=$(echo "$mapping" | jq -r '.name')
|
||||
|
||||
# Check patterns
|
||||
echo "$mapping" | jq -r '.patterns[]' | while read -r pattern; do
|
||||
if matches_pattern "$file" "$pattern"; then
|
||||
# Check exclusions
|
||||
excluded=false
|
||||
echo "$mapping" | jq -r '.excludePatterns[]?' 2>/dev/null | while read -r exclude; do
|
||||
if matches_pattern "$file" "$exclude"; then
|
||||
excluded=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$excluded" = "false" ]; then
|
||||
[ "$VERBOSE" = "true" ] && echo -e "${GREEN}✓ Matched rule '$name' for file: $file${NC}"
|
||||
MATCHED_FILES+=("$file")
|
||||
|
||||
# Add tests for this mapping
|
||||
echo "$mapping" | jq -r '.tests[]' | while read -r test_path; do
|
||||
[ -n "$test_path" ] && echo "$test_path" >> /tmp/test_paths_$$
|
||||
done
|
||||
fi
|
||||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
done <<< "$STAGED_FILES"
|
||||
|
||||
# Read test paths from temp file
|
||||
if [ -f /tmp/test_paths_$$ ]; then
|
||||
while read -r test_path; do
|
||||
add_test "$test_path"
|
||||
done < /tmp/test_paths_$$
|
||||
rm -f /tmp/test_paths_$$
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW}Using built-in patterns (install jq for config file support)${NC}"
|
||||
|
||||
# Fallback to hardcoded patterns
|
||||
while IFS= read -r file; do
|
||||
[ -z "$file" ] && continue
|
||||
|
||||
case "$file" in
|
||||
# PostgreSQL import files
|
||||
src/lib/data/sql-import/dialect-importers/postgresql/*.ts)
|
||||
if [[ ! "$file" =~ \.test\.ts$ ]] && [[ ! "$file" =~ \.spec\.ts$ ]]; then
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed PostgreSQL import file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "src/lib/data/sql-import/dialect-importers/postgresql/__tests__"
|
||||
fi
|
||||
;;
|
||||
|
||||
# MySQL import files
|
||||
src/lib/data/sql-import/dialect-importers/mysql/*.ts)
|
||||
if [[ ! "$file" =~ \.test\.ts$ ]] && [[ ! "$file" =~ \.spec\.ts$ ]]; then
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed MySQL import file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "src/lib/data/sql-import/dialect-importers/mysql/__tests__"
|
||||
fi
|
||||
;;
|
||||
|
||||
# SQLite import files
|
||||
src/lib/data/sql-import/dialect-importers/sqlite/*.ts)
|
||||
if [[ ! "$file" =~ \.test\.ts$ ]] && [[ ! "$file" =~ \.spec\.ts$ ]]; then
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed SQLite import file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "src/lib/data/sql-import/dialect-importers/sqlite/__tests__"
|
||||
fi
|
||||
;;
|
||||
|
||||
# SQL Server import files
|
||||
src/lib/data/sql-import/dialect-importers/sql-server/*.ts)
|
||||
if [[ ! "$file" =~ \.test\.ts$ ]] && [[ ! "$file" =~ \.spec\.ts$ ]]; then
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed SQL Server import file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "src/lib/data/sql-import/dialect-importers/sql-server/__tests__"
|
||||
fi
|
||||
;;
|
||||
|
||||
# Common SQL import files
|
||||
src/lib/data/sql-import/*.ts)
|
||||
if [[ ! "$file" =~ \.test\.ts$ ]] && [[ ! "$file" =~ \.spec\.ts$ ]] && [[ ! "$file" =~ /dialect-importers/ ]]; then
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed common SQL import file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
# Run all dialect tests if common files change
|
||||
add_test "src/lib/data/sql-import/dialect-importers/postgresql/__tests__"
|
||||
add_test "src/lib/data/sql-import/dialect-importers/mysql/__tests__"
|
||||
add_test "src/lib/data/sql-import/dialect-importers/sqlite/__tests__"
|
||||
add_test "src/lib/data/sql-import/dialect-importers/sql-server/__tests__"
|
||||
fi
|
||||
;;
|
||||
|
||||
# SQL validator
|
||||
src/lib/data/sql-import/sql-validator.ts)
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed SQL validator"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "src/lib/data/sql-import/dialect-importers/postgresql/__tests__"
|
||||
;;
|
||||
|
||||
# Test files themselves
|
||||
src/lib/data/sql-import/**/*.test.ts|src/lib/data/sql-import/**/*.spec.ts)
|
||||
[ "$VERBOSE" = "true" ] && echo "📝 Changed test file: $file"
|
||||
MATCHED_FILES+=("$file")
|
||||
add_test "$file"
|
||||
;;
|
||||
esac
|
||||
done <<< "$STAGED_FILES"
|
||||
fi
|
||||
|
||||
# Run tests if any were found
|
||||
if [ -n "$TESTS_TO_RUN" ]; then
|
||||
echo ""
|
||||
echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e "${YELLOW}🧪 Running SQL import tests...${NC}"
|
||||
[ "$VERBOSE" = "true" ] && echo -e "Matched files: ${#MATCHED_FILES[@]}"
|
||||
[ "$VERBOSE" = "true" ] && echo -e "Test paths: $TESTS_TO_RUN"
|
||||
echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo ""
|
||||
|
||||
# Run the tests
|
||||
npm test -- $TESTS_TO_RUN --run
|
||||
TEST_RESULT=$?
|
||||
|
||||
if [ $TEST_RESULT -ne 0 ]; then
|
||||
echo ""
|
||||
echo -e "${RED}❌ SQL import tests failed! Please fix the tests before committing.${NC}"
|
||||
exit 1
|
||||
else
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ SQL import tests passed!${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW}ℹ️ No SQL import related changes detected, skipping SQL import tests.${NC}"
|
||||
fi
|
||||
|
||||
exit 0
|
95
.husky/test-mapping.json
Normal file
95
.husky/test-mapping.json
Normal file
@@ -0,0 +1,95 @@
|
||||
{
|
||||
"mappings": [
|
||||
{
|
||||
"name": "PostgreSQL Import",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/dialect-importers/postgresql/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.ts",
|
||||
"*.spec.ts"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/postgresql/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "MySQL Import",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/dialect-importers/mysql/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.ts",
|
||||
"*.spec.ts"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/mysql/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "SQLite Import",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/dialect-importers/sqlite/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.ts",
|
||||
"*.spec.ts"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/sqlite/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "SQL Server Import",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/dialect-importers/sql-server/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.ts",
|
||||
"*.spec.ts"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/sql-server/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Common SQL Import",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/*.ts",
|
||||
"src/lib/data/sql-import/common/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.ts",
|
||||
"*.spec.ts",
|
||||
"*/dialect-importers/*"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/postgresql/__tests__",
|
||||
"src/lib/data/sql-import/dialect-importers/mysql/__tests__",
|
||||
"src/lib/data/sql-import/dialect-importers/sqlite/__tests__",
|
||||
"src/lib/data/sql-import/dialect-importers/sql-server/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "SQL Validator",
|
||||
"patterns": [
|
||||
"src/lib/data/sql-import/sql-validator.ts"
|
||||
],
|
||||
"tests": [
|
||||
"src/lib/data/sql-import/dialect-importers/postgresql/__tests__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Import Dialog",
|
||||
"patterns": [
|
||||
"src/dialogs/common/import-database/*.tsx",
|
||||
"src/dialogs/common/import-database/*.ts"
|
||||
],
|
||||
"excludePatterns": [
|
||||
"*.test.tsx",
|
||||
"*.spec.tsx"
|
||||
],
|
||||
"tests": []
|
||||
}
|
||||
]
|
||||
}
|
890
package-lock.json
generated
890
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -9,7 +9,10 @@
|
||||
"lint": "eslint . --report-unused-disable-directives --max-warnings 0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"preview": "vite preview",
|
||||
"prepare": "husky"
|
||||
"prepare": "husky",
|
||||
"test": "vitest",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:coverage": "vitest --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
@@ -73,12 +76,16 @@
|
||||
"@eslint/compat": "^1.2.4",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.16.0",
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^22.1.0",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.0",
|
||||
"@typescript-eslint/parser": "^8.18.0",
|
||||
"@vitejs/plugin-react": "^4.3.1",
|
||||
"@vitest/ui": "^3.2.4",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"eslint": "^9.16.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
@@ -90,6 +97,7 @@
|
||||
"eslint-plugin-react-refresh": "^0.4.7",
|
||||
"eslint-plugin-tailwindcss": "^3.17.4",
|
||||
"globals": "^15.13.0",
|
||||
"happy-dom": "^18.0.1",
|
||||
"husky": "^9.1.5",
|
||||
"postcss": "^8.4.40",
|
||||
"prettier": "^3.3.3",
|
||||
@@ -97,6 +105,7 @@
|
||||
"tailwindcss": "^3.4.7",
|
||||
"typescript": "^5.2.2",
|
||||
"unplugin-inject-preload": "^3.0.0",
|
||||
"vite": "^5.3.4"
|
||||
"vite": "^5.3.4",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
}
|
||||
|
166
parser-comparison-analysis.md
Normal file
166
parser-comparison-analysis.md
Normal file
@@ -0,0 +1,166 @@
|
||||
# PostgreSQL vs MySQL Parser Comparison Analysis
|
||||
|
||||
## Overview
|
||||
This document compares how the PostgreSQL and MySQL parsers in ChartDB handle SQL parsing, focusing on the differences that could cause the same SQL file to produce different results.
|
||||
|
||||
## 1. SQL Sanitization and Comment Handling
|
||||
|
||||
### PostgreSQL Parser (`postgresql-improved.ts`)
|
||||
|
||||
#### Comment Removal Strategy:
|
||||
1. **Order**: Comments are removed FIRST, before any other processing
|
||||
2. **Multi-line comments**: Removed using regex: `/\/\*[\s\S]*?\*\//g`
|
||||
3. **Single-line comments**: Removed line-by-line, checking for `--` while respecting string boundaries
|
||||
4. **String-aware**: Preserves `--` inside quoted strings
|
||||
|
||||
```typescript
|
||||
// PostgreSQL approach (lines 60-100)
|
||||
// 1. First removes ALL multi-line comments
|
||||
cleanedSQL = cleanedSQL.replace(/\/\*[\s\S]*?\*\//g, '');
|
||||
|
||||
// 2. Then processes single-line comments while respecting strings
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
// Tracks if we're inside a string to avoid removing -- inside quotes
|
||||
}
|
||||
```
|
||||
|
||||
### MySQL Parser (`mysql-improved.ts`)
|
||||
|
||||
#### Comment Removal Strategy:
|
||||
1. **Order**: Comments are sanitized but with special handling for problematic patterns
|
||||
2. **Special handling**: Specifically fixes multi-line comments that contain quotes or JSON
|
||||
3. **Line-by-line**: Processes comments line by line, removing lines that start with `--` or `#`
|
||||
|
||||
```typescript
|
||||
// MySQL approach (lines 35-67)
|
||||
// 1. First fixes specific problematic patterns
|
||||
result = result.replace(/--\s*"[^"]*",?\s*\n\s*"[^"]*".*$/gm, function(match) {
|
||||
return match.replace(/\n/g, ' ');
|
||||
});
|
||||
|
||||
// 2. Then removes comment lines entirely
|
||||
.map((line) => {
|
||||
if (trimmed.startsWith('--') || trimmed.startsWith('#')) {
|
||||
return '';
|
||||
}
|
||||
return line;
|
||||
})
|
||||
```
|
||||
|
||||
**Key Difference**: PostgreSQL removes ALL comments upfront, while MySQL tries to fix problematic comment patterns first, then removes comment lines.
|
||||
|
||||
## 2. Order of Operations
|
||||
|
||||
### PostgreSQL Parser
|
||||
1. **Preprocess SQL** (removes all comments first)
|
||||
2. **Split statements** by semicolons (handles dollar quotes)
|
||||
3. **Categorize statements** (table, index, alter, etc.)
|
||||
4. **Parse with node-sql-parser**
|
||||
5. **Fallback to regex** if parser fails
|
||||
6. **Extract relationships**
|
||||
|
||||
### MySQL Parser
|
||||
1. **Validate syntax** (checks for known issues)
|
||||
2. **Sanitize SQL** (fixes problematic patterns)
|
||||
3. **Extract statements** by semicolons
|
||||
4. **Parse with node-sql-parser**
|
||||
5. **Fallback to regex** if parser fails
|
||||
6. **Process relationships**
|
||||
|
||||
**Key Difference**: MySQL validates BEFORE sanitizing, while PostgreSQL sanitizes first. This means MySQL can detect and report issues that PostgreSQL might silently fix.
|
||||
|
||||
## 3. Multi-line Comment Handling
|
||||
|
||||
### PostgreSQL
|
||||
- Removes ALL multi-line comments using `[\s\S]*?` pattern
|
||||
- No special handling for comments containing quotes or JSON
|
||||
- Clean removal before any parsing
|
||||
|
||||
### MySQL
|
||||
- Specifically detects and fixes multi-line comments with quotes:
|
||||
```sql
|
||||
-- "Beliebt",
|
||||
"Empfohlen" -- This breaks MySQL parser
|
||||
```
|
||||
- Detects JSON arrays in comments spanning lines:
|
||||
```sql
|
||||
-- [
|
||||
"Ubuntu 22.04",
|
||||
"CentOS 8"
|
||||
] -- This also breaks MySQL parser
|
||||
```
|
||||
- Converts these to single-line comments before parsing
|
||||
|
||||
**Key Difference**: MySQL has specific handling for problematic comment patterns that PostgreSQL simply removes entirely.
|
||||
|
||||
## 4. Statement Splitting
|
||||
|
||||
### PostgreSQL
|
||||
- Handles PostgreSQL-specific dollar quotes (`$$ ... $$`)
|
||||
- Tracks quote depth for proper splitting
|
||||
- Supports function bodies with dollar quotes
|
||||
|
||||
### MySQL
|
||||
- Simple quote tracking (single, double, backtick)
|
||||
- Handles escape sequences (`\`)
|
||||
- No special quote constructs
|
||||
|
||||
## 5. Validation Approach
|
||||
|
||||
### PostgreSQL
|
||||
- No pre-validation
|
||||
- Relies on parser and fallback regex
|
||||
- Reports warnings for unsupported features
|
||||
|
||||
### MySQL
|
||||
- Pre-validates SQL before parsing
|
||||
- Detects known problematic patterns:
|
||||
- Multi-line comments with quotes
|
||||
- JSON arrays in comments
|
||||
- Inline REFERENCES (PostgreSQL syntax)
|
||||
- Missing semicolons
|
||||
- Can reject SQL before attempting to parse
|
||||
|
||||
## 6. Why Same SQL Gives Different Results
|
||||
|
||||
### Example Problematic SQL:
|
||||
```sql
|
||||
CREATE TABLE products (
|
||||
id INT PRIMARY KEY,
|
||||
status VARCHAR(50), -- "active",
|
||||
"inactive", "pending"
|
||||
data JSON -- [
|
||||
{"key": "value"},
|
||||
{"key": "value2"}
|
||||
]
|
||||
);
|
||||
```
|
||||
|
||||
### PostgreSQL Result:
|
||||
- Successfully parses (comments are removed entirely)
|
||||
- Table created with proper columns
|
||||
|
||||
### MySQL Result:
|
||||
- Validation fails with errors:
|
||||
- MULTILINE_COMMENT_QUOTE at line 3
|
||||
- MULTILINE_JSON_COMMENT at line 5
|
||||
- Import blocked unless validation is skipped
|
||||
|
||||
## 7. Recommendations
|
||||
|
||||
1. **For Cross-Database Compatibility**:
|
||||
- Avoid multi-line comments with quotes or JSON
|
||||
- Keep comments on single lines
|
||||
- Use proper FOREIGN KEY syntax instead of inline REFERENCES
|
||||
|
||||
2. **For MySQL Import**:
|
||||
- Fix validation errors before import
|
||||
- Or use `skipValidation: true` option if SQL is known to work
|
||||
|
||||
3. **For PostgreSQL Import**:
|
||||
- Be aware that comments are stripped entirely
|
||||
- Complex comments might hide syntax issues
|
||||
|
||||
## Conclusion
|
||||
|
||||
The main difference is that PostgreSQL takes a "remove all comments first" approach, while MySQL tries to detect and handle problematic comment patterns. This makes PostgreSQL more forgiving but MySQL more explicit about potential issues. The same SQL file can succeed in PostgreSQL but fail in MySQL if it contains multi-line comments with special characters.
|
@@ -35,8 +35,11 @@ import type { OnChange } from '@monaco-editor/react';
|
||||
import { useDebounce } from '@/hooks/use-debounce-v2';
|
||||
import { InstructionsSection } from './instructions-section/instructions-section';
|
||||
import { parseSQLError } from '@/lib/data/sql-import';
|
||||
import type { editor } from 'monaco-editor';
|
||||
import type * as monaco from 'monaco-editor';
|
||||
import { waitFor } from '@/lib/utils';
|
||||
import { type ValidationResult } from '@/lib/data/sql-import/sql-validator';
|
||||
import { validateSQL } from '@/lib/data/sql-import/unified-sql-validator';
|
||||
import { SQLValidationStatus } from './sql-validation-status';
|
||||
|
||||
const errorScriptOutputMessage =
|
||||
'Invalid JSON. Please correct it or contact us at support@chartdb.io for help.';
|
||||
@@ -117,7 +120,8 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
}) => {
|
||||
const { effectiveTheme } = useTheme();
|
||||
const [errorMessage, setErrorMessage] = useState('');
|
||||
const editorRef = useRef<editor.IStandaloneCodeEditor | null>(null);
|
||||
const editorRef = useRef<monaco.editor.IStandaloneCodeEditor | null>(null);
|
||||
const pasteDisposableRef = useRef<monaco.IDisposable | null>(null);
|
||||
|
||||
const { t } = useTranslation();
|
||||
const { isSm: isDesktop } = useBreakpoint('sm');
|
||||
@@ -125,6 +129,11 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const [showCheckJsonButton, setShowCheckJsonButton] = useState(false);
|
||||
const [isCheckingJson, setIsCheckingJson] = useState(false);
|
||||
const [showSSMSInfoDialog, setShowSSMSInfoDialog] = useState(false);
|
||||
const [sqlValidation, setSqlValidation] = useState<ValidationResult | null>(
|
||||
null
|
||||
);
|
||||
const [isAutoFixing, setIsAutoFixing] = useState(false);
|
||||
const [showAutoFixButton, setShowAutoFixButton] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
setScriptResult('');
|
||||
@@ -135,11 +144,33 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
// Check if the ddl is valid
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'ddl') {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!scriptResult.trim()) return;
|
||||
if (!scriptResult.trim()) {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// First run our validation based on database type
|
||||
const validation = validateSQL(scriptResult, databaseType);
|
||||
setSqlValidation(validation);
|
||||
|
||||
// If we have auto-fixable errors, show the auto-fix button
|
||||
if (validation.fixedSQL && validation.errors.length > 0) {
|
||||
setShowAutoFixButton(true);
|
||||
// Don't try to parse invalid SQL
|
||||
setErrorMessage('SQL contains syntax errors');
|
||||
return;
|
||||
}
|
||||
|
||||
// Hide auto-fix button if no fixes available
|
||||
setShowAutoFixButton(false);
|
||||
|
||||
// Validate the SQL (either original or already fixed)
|
||||
parseSQLError({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
@@ -185,6 +216,28 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
}
|
||||
}, [errorMessage.length, onImport, scriptResult]);
|
||||
|
||||
const handleAutoFix = useCallback(() => {
|
||||
if (sqlValidation?.fixedSQL) {
|
||||
setIsAutoFixing(true);
|
||||
setShowAutoFixButton(false);
|
||||
|
||||
// Apply the fix with a delay so user sees the fixing message
|
||||
setTimeout(() => {
|
||||
setScriptResult(sqlValidation.fixedSQL!);
|
||||
setIsAutoFixing(false);
|
||||
}, 1000);
|
||||
}
|
||||
}, [sqlValidation, setScriptResult]);
|
||||
|
||||
const handleErrorClick = useCallback((line: number) => {
|
||||
if (editorRef.current) {
|
||||
// Set cursor to the error line
|
||||
editorRef.current.setPosition({ lineNumber: line, column: 1 });
|
||||
editorRef.current.revealLineInCenter(line);
|
||||
editorRef.current.focus();
|
||||
}
|
||||
}, []);
|
||||
|
||||
const formatEditor = useCallback(() => {
|
||||
if (editorRef.current) {
|
||||
setTimeout(() => {
|
||||
@@ -229,37 +282,66 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setIsCheckingJson(false);
|
||||
}, [scriptResult, setScriptResult, formatEditor]);
|
||||
|
||||
const detectAndSetImportMethod = useCallback(() => {
|
||||
const content = editorRef.current?.getValue();
|
||||
if (content && content.trim()) {
|
||||
const detectedType = detectContentType(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
setImportMethod(detectedType);
|
||||
}
|
||||
}
|
||||
}, [setImportMethod, importMethod]);
|
||||
|
||||
const [editorDidMount, setEditorDidMount] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (editorRef.current && editorDidMount) {
|
||||
editorRef.current.onDidPaste(() => {
|
||||
setTimeout(() => {
|
||||
editorRef.current
|
||||
?.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 0);
|
||||
setTimeout(detectAndSetImportMethod, 0);
|
||||
});
|
||||
}
|
||||
}, [detectAndSetImportMethod, editorDidMount]);
|
||||
// Cleanup paste handler on unmount
|
||||
return () => {
|
||||
if (pasteDisposableRef.current) {
|
||||
pasteDisposableRef.current.dispose();
|
||||
pasteDisposableRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: editor.IStandaloneCodeEditor) => {
|
||||
(editor: monaco.editor.IStandaloneCodeEditor) => {
|
||||
editorRef.current = editor;
|
||||
setEditorDidMount(true);
|
||||
|
||||
// Cleanup previous disposable if it exists
|
||||
if (pasteDisposableRef.current) {
|
||||
pasteDisposableRef.current.dispose();
|
||||
pasteDisposableRef.current = null;
|
||||
}
|
||||
|
||||
// Add paste handler for all modes
|
||||
const disposable = editor.onDidPaste(() => {
|
||||
const model = editor.getModel();
|
||||
if (!model) return;
|
||||
|
||||
const content = model.getValue();
|
||||
|
||||
// First, detect content type to determine if we should switch modes
|
||||
const detectedType = detectContentType(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
// Switch to the detected mode immediately
|
||||
setImportMethod(detectedType);
|
||||
|
||||
// Only format if it's JSON (query mode)
|
||||
if (detectedType === 'query') {
|
||||
// For JSON mode, format after a short delay
|
||||
setTimeout(() => {
|
||||
editor
|
||||
.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode, do NOT format as it can break the SQL
|
||||
} else {
|
||||
// Content type didn't change, apply formatting based on current mode
|
||||
if (importMethod === 'query') {
|
||||
// Only format JSON content
|
||||
setTimeout(() => {
|
||||
editor
|
||||
.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode, do NOT format
|
||||
}
|
||||
});
|
||||
|
||||
pasteDisposableRef.current = disposable;
|
||||
},
|
||||
[]
|
||||
[importMethod, setImportMethod]
|
||||
);
|
||||
|
||||
const renderHeader = useCallback(() => {
|
||||
@@ -316,7 +398,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
: 'dbml-light'
|
||||
}
|
||||
options={{
|
||||
formatOnPaste: true,
|
||||
formatOnPaste: false, // Never format on paste - we handle it manually
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
@@ -345,10 +427,21 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
</Suspense>
|
||||
</div>
|
||||
|
||||
{errorMessage ? (
|
||||
<div className="mt-2 flex shrink-0 items-center gap-2">
|
||||
<p className="text-xs text-red-700">{errorMessage}</p>
|
||||
</div>
|
||||
{errorMessage || (importMethod === 'ddl' && sqlValidation) ? (
|
||||
importMethod === 'ddl' ? (
|
||||
<SQLValidationStatus
|
||||
validation={sqlValidation}
|
||||
errorMessage={errorMessage}
|
||||
isAutoFixing={isAutoFixing}
|
||||
onErrorClick={handleErrorClick}
|
||||
/>
|
||||
) : (
|
||||
<div className="mt-2 flex shrink-0 items-center gap-2">
|
||||
<p className="text-xs text-red-700">
|
||||
{errorMessage}
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
) : null}
|
||||
</div>
|
||||
),
|
||||
@@ -359,6 +452,9 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
effectiveTheme,
|
||||
debouncedHandleInputChange,
|
||||
handleEditorDidMount,
|
||||
sqlValidation,
|
||||
isAutoFixing,
|
||||
handleErrorClick,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -444,13 +540,28 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
)
|
||||
)}
|
||||
</Button>
|
||||
) : showAutoFixButton && importMethod === 'ddl' ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
onClick={handleAutoFix}
|
||||
disabled={isAutoFixing}
|
||||
className="bg-blue-600 text-white hover:bg-blue-700"
|
||||
>
|
||||
{isAutoFixing ? (
|
||||
<Spinner size="small" />
|
||||
) : (
|
||||
'Try auto-fix'
|
||||
)}
|
||||
</Button>
|
||||
) : keepDialogAfterImport ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="default"
|
||||
disabled={
|
||||
scriptResult.trim().length === 0 ||
|
||||
errorMessage.length > 0
|
||||
errorMessage.length > 0 ||
|
||||
isAutoFixing
|
||||
}
|
||||
onClick={handleImport}
|
||||
>
|
||||
@@ -463,7 +574,8 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
variant="default"
|
||||
disabled={
|
||||
scriptResult.trim().length === 0 ||
|
||||
errorMessage.length > 0
|
||||
errorMessage.length > 0 ||
|
||||
isAutoFixing
|
||||
}
|
||||
onClick={handleImport}
|
||||
>
|
||||
@@ -496,6 +608,10 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
handleCheckJson,
|
||||
goBack,
|
||||
t,
|
||||
importMethod,
|
||||
isAutoFixing,
|
||||
showAutoFixButton,
|
||||
handleAutoFix,
|
||||
]);
|
||||
|
||||
return (
|
||||
|
122
src/dialogs/common/import-database/sql-validation-status.tsx
Normal file
122
src/dialogs/common/import-database/sql-validation-status.tsx
Normal file
@@ -0,0 +1,122 @@
|
||||
import React from 'react';
|
||||
import {
|
||||
AlertCircle,
|
||||
CheckCircle,
|
||||
AlertTriangle,
|
||||
Lightbulb,
|
||||
} from 'lucide-react';
|
||||
import { Alert, AlertDescription } from '@/components/alert/alert';
|
||||
import type { ValidationResult } from '@/lib/data/sql-import/sql-validator';
|
||||
|
||||
interface SQLValidationStatusProps {
|
||||
validation: ValidationResult | null;
|
||||
errorMessage: string;
|
||||
isAutoFixing?: boolean;
|
||||
onErrorClick?: (line: number) => void;
|
||||
}
|
||||
|
||||
export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
validation,
|
||||
errorMessage,
|
||||
isAutoFixing = false,
|
||||
onErrorClick,
|
||||
}) => {
|
||||
if (!validation && !errorMessage && !isAutoFixing) return null;
|
||||
|
||||
const hasErrors = validation?.errors && validation.errors.length > 0;
|
||||
const hasWarnings = validation?.warnings && validation.warnings.length > 0;
|
||||
const wasAutoFixed =
|
||||
validation?.warnings?.some((w) => w.message.includes('Auto-fixed')) ||
|
||||
false;
|
||||
|
||||
// If we have parser errors (errorMessage) after validation
|
||||
if (errorMessage && !hasErrors) {
|
||||
return (
|
||||
<Alert variant="destructive" className="mt-2">
|
||||
<AlertCircle className="size-4" />
|
||||
<AlertDescription className="text-sm">
|
||||
{errorMessage}
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mt-2 space-y-2">
|
||||
{isAutoFixing && (
|
||||
<Alert className="border-blue-200 bg-blue-50 dark:border-blue-800 dark:bg-blue-950">
|
||||
<Lightbulb className="size-4 animate-pulse text-blue-600 dark:text-blue-400" />
|
||||
<AlertDescription className="text-sm text-blue-700 dark:text-blue-300">
|
||||
Auto-fixing SQL syntax errors...
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{hasErrors && !isAutoFixing && (
|
||||
<Alert variant="destructive">
|
||||
<AlertCircle className="size-4" />
|
||||
<AlertDescription className="space-y-1 text-sm">
|
||||
<div className="font-medium">SQL Syntax Errors:</div>
|
||||
{validation.errors.slice(0, 3).map((error, idx) => (
|
||||
<div key={idx} className="ml-2">
|
||||
•{' '}
|
||||
<button
|
||||
onClick={() => onErrorClick?.(error.line)}
|
||||
className="rounded underline hover:text-red-600 focus:outline-none focus:ring-1 focus:ring-red-500"
|
||||
type="button"
|
||||
>
|
||||
Line {error.line}
|
||||
</button>
|
||||
: {error.message}
|
||||
{error.suggestion && (
|
||||
<div className="ml-4 text-xs opacity-80">
|
||||
→ {error.suggestion}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
{validation.errors.length > 3 && (
|
||||
<div className="ml-2 text-xs opacity-70">
|
||||
... and {validation.errors.length - 3} more
|
||||
errors
|
||||
</div>
|
||||
)}
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{wasAutoFixed && !hasErrors && (
|
||||
<Alert className="border-green-200 bg-green-50 dark:border-green-800 dark:bg-green-950">
|
||||
<CheckCircle className="size-4 text-green-600 dark:text-green-400" />
|
||||
<AlertDescription className="text-sm text-green-700 dark:text-green-300">
|
||||
SQL syntax errors were automatically fixed. Your SQL is
|
||||
now ready to import.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{hasWarnings && !hasErrors && (
|
||||
<Alert>
|
||||
<AlertTriangle className="size-4" />
|
||||
<AlertDescription className="space-y-1 text-sm">
|
||||
<div className="font-medium">Import Info:</div>
|
||||
{validation.warnings.map((warning, idx) => (
|
||||
<div key={idx} className="ml-2">
|
||||
• {warning.message}
|
||||
</div>
|
||||
))}
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{!hasErrors && !hasWarnings && !errorMessage && validation && (
|
||||
<Alert className="flex border-green-200 bg-green-50 dark:border-green-800 dark:bg-green-950 [&>svg]:static [&>svg]:mr-2 [&>svg~*]:pl-0">
|
||||
<CheckCircle className="size-4 text-green-600 dark:text-green-400" />
|
||||
<AlertDescription className="text-sm text-green-700 dark:text-green-300">
|
||||
SQL syntax validated successfully
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
@@ -58,13 +58,16 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
|
||||
const importDatabase = useCallback(async () => {
|
||||
let diagram: Diagram | undefined;
|
||||
let warnings: string[] | undefined;
|
||||
|
||||
if (importMethod === 'ddl') {
|
||||
diagram = await sqlImportToDiagram({
|
||||
const result = await sqlImportToDiagram({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
diagram = result;
|
||||
warnings = result.warnings;
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
@@ -319,7 +322,38 @@ export const ImportDatabaseDialog: React.FC<ImportDatabaseDialogProps> = ({
|
||||
resetRedoStack();
|
||||
resetUndoStack();
|
||||
|
||||
closeImportDatabaseDialog();
|
||||
// Show warnings if any
|
||||
if (warnings && warnings.length > 0) {
|
||||
const warningContent = (
|
||||
<div className="space-y-2">
|
||||
<div className="font-semibold">
|
||||
The following SQL statements were skipped:
|
||||
</div>
|
||||
<ul className="list-inside list-disc space-y-1">
|
||||
{warnings.map((warning, index) => (
|
||||
<li key={index} className="text-sm">
|
||||
{warning}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<div className="mt-3 text-sm text-muted-foreground">
|
||||
Only table definitions, indexes, and foreign key
|
||||
constraints are currently supported.
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
showAlert({
|
||||
title: 'Import completed with warnings',
|
||||
content: warningContent,
|
||||
actionLabel: 'OK',
|
||||
onAction: () => {
|
||||
closeImportDatabaseDialog();
|
||||
},
|
||||
});
|
||||
} else {
|
||||
closeImportDatabaseDialog();
|
||||
}
|
||||
}, [
|
||||
importMethod,
|
||||
databaseEdition,
|
||||
|
131
src/lib/data/sql-import/__tests__/sql-validator-autofix.test.ts
Normal file
131
src/lib/data/sql-import/__tests__/sql-validator-autofix.test.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { validatePostgreSQLSyntax } from '../sql-validator';
|
||||
|
||||
describe('SQL Validator Auto-fix', () => {
|
||||
it('should provide auto-fix for cast operator errors', () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
lair_location GEOGRAPHY(POINT, 4326)
|
||||
);
|
||||
|
||||
-- Problematic queries with cast operator errors
|
||||
SELECT id: :text FROM dragons;
|
||||
SELECT ST_X(lair_location: :geometry) AS longitude FROM dragons;
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
// Should detect errors
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
|
||||
// Should provide fixed SQL
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Fixed SQL should have correct cast operators
|
||||
expect(result.fixedSQL).toContain('::text');
|
||||
expect(result.fixedSQL).toContain('::geometry');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
|
||||
// The CREATE TABLE should remain intact
|
||||
expect(result.fixedSQL).toContain('GEOGRAPHY(POINT, 4326)');
|
||||
});
|
||||
|
||||
it('should handle multi-line cast operator errors', () => {
|
||||
const sql = `
|
||||
SELECT AVG(power_level): :DECIMAL(3,
|
||||
2) FROM enchantments;
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).toContain('::DECIMAL(3,');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
});
|
||||
|
||||
it('should auto-fix split DECIMAL declarations', () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
strength DECIMAL(10,
|
||||
2) NOT NULL,
|
||||
effectiveness NUMERIC(5,
|
||||
3) DEFAULT 0.000
|
||||
);`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
|
||||
// Should provide fixed SQL
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Fixed SQL should have DECIMAL on one line
|
||||
expect(result.fixedSQL).toContain('DECIMAL(10,2)');
|
||||
expect(result.fixedSQL).toContain('NUMERIC(5,3)');
|
||||
expect(result.fixedSQL).not.toMatch(
|
||||
/DECIMAL\s*\(\s*\d+\s*,\s*\n\s*\d+\s*\)/
|
||||
);
|
||||
|
||||
// Should have warning about auto-fix
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle multiple auto-fixes together', () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantments (
|
||||
id INTEGER PRIMARY KEY,
|
||||
power_level DECIMAL(10,
|
||||
2) NOT NULL,
|
||||
magic_type VARCHAR(50)
|
||||
);
|
||||
|
||||
SELECT AVG(power_level): :DECIMAL(3,
|
||||
2) FROM enchantments;
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Should fix both issues
|
||||
expect(result.fixedSQL).toContain('DECIMAL(10,2)');
|
||||
expect(result.fixedSQL).toContain('::DECIMAL(3,');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
|
||||
// Should have warnings for both fixes
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed cast operator')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should preserve original SQL when no errors', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.fixedSQL).toBeUndefined();
|
||||
});
|
||||
});
|
144
src/lib/data/sql-import/__tests__/sql-validator.test.ts
Normal file
144
src/lib/data/sql-import/__tests__/sql-validator.test.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { validatePostgreSQLSyntax } from '../sql-validator';
|
||||
|
||||
describe('SQL Validator', () => {
|
||||
it('should detect cast operator errors (: :)', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
spellbook JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
SELECT id: :text FROM wizards;
|
||||
SELECT COUNT(*): :integer FROM wizards;
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors).toHaveLength(2);
|
||||
expect(result.errors[0].message).toContain('Invalid cast operator');
|
||||
expect(result.errors[0].suggestion).toBe('Replace ": :" with "::"');
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).toContain('::text');
|
||||
expect(result.fixedSQL).toContain('::integer');
|
||||
});
|
||||
|
||||
it('should detect split DECIMAL declarations', () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
power_level DECIMAL(10,
|
||||
2) NOT NULL
|
||||
);`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(
|
||||
result.errors.some((e) =>
|
||||
e.message.includes('DECIMAL type declaration is split')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about extensions', () => {
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
CREATE EXTENSION postgis;
|
||||
CREATE TABLE dragons (id UUID PRIMARY KEY);
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(
|
||||
result.warnings.some((w) => w.message.includes('CREATE EXTENSION'))
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about functions and triggers', () => {
|
||||
const sql = `
|
||||
CREATE OR REPLACE FUNCTION update_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_wizards_timestamp
|
||||
BEFORE UPDATE ON wizards
|
||||
FOR EACH ROW EXECUTE FUNCTION update_timestamp();
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Function definitions')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Trigger definitions')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate clean SQL as valid', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
magic_email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id SERIAL PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
name VARCHAR(200) NOT NULL,
|
||||
incantation TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.fixedSQL).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle the fifth example file issues', () => {
|
||||
const sql = `
|
||||
-- Sample from the problematic file
|
||||
UPDATE magic_towers
|
||||
SET
|
||||
power_average = (
|
||||
SELECT AVG(power): :DECIMAL(3,
|
||||
2)
|
||||
FROM enchantments
|
||||
WHERE tower_id = NEW.tower_id
|
||||
);
|
||||
|
||||
SELECT
|
||||
ST_X(t.location: :geometry) AS longitude,
|
||||
ST_Y(t.location: :geometry) AS latitude
|
||||
FROM towers t;
|
||||
`;
|
||||
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
// Should find multiple cast operator errors
|
||||
expect(
|
||||
result.errors.filter((e) =>
|
||||
e.message.includes('Invalid cast operator')
|
||||
).length
|
||||
).toBeGreaterThan(0);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
expect(result.fixedSQL).toContain('::DECIMAL');
|
||||
expect(result.fixedSQL).toContain('::geometry');
|
||||
});
|
||||
});
|
@@ -3,10 +3,13 @@ import { generateDiagramId, generateId } from '@/lib/utils';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { Cardinality, DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import { genericDataTypes } from '@/lib/data/data-types/generic-data-types';
|
||||
import { randomColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBCustomType } from '@/lib/domain/db-custom-type';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
|
||||
// Common interfaces for SQL entities
|
||||
export interface SQLColumn {
|
||||
@@ -62,6 +65,7 @@ export interface SQLParserResult {
|
||||
relationships: SQLForeignKey[];
|
||||
types?: SQLCustomType[];
|
||||
enums?: SQLEnumType[];
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
// Define more specific types for SQL AST nodes
|
||||
@@ -543,6 +547,18 @@ export function convertToChartDBDiagram(
|
||||
) {
|
||||
// Ensure integer types are preserved
|
||||
mappedType = { id: 'integer', name: 'integer' };
|
||||
} else if (
|
||||
sourceDatabaseType === DatabaseType.POSTGRESQL &&
|
||||
parserResult.enums &&
|
||||
parserResult.enums.some(
|
||||
(e) => e.name.toLowerCase() === column.type.toLowerCase()
|
||||
)
|
||||
) {
|
||||
// If the column type matches a custom enum type, preserve it
|
||||
mappedType = {
|
||||
id: column.type.toLowerCase(),
|
||||
name: column.type,
|
||||
};
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
@@ -588,25 +604,38 @@ export function convertToChartDBDiagram(
|
||||
});
|
||||
|
||||
// Create indexes
|
||||
const indexes = table.indexes.map((sqlIndex) => {
|
||||
const fieldIds = sqlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
const indexes = table.indexes
|
||||
.map((sqlIndex) => {
|
||||
const fieldIds = sqlIndex.columns
|
||||
.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
console.warn(
|
||||
`Index ${sqlIndex.name} references non-existent column: ${columnName} in table ${table.name}. Skipping this column.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return field.id;
|
||||
})
|
||||
.filter((id): id is string => id !== null);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: sqlIndex.name,
|
||||
fieldIds,
|
||||
unique: sqlIndex.unique,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
});
|
||||
// Only create index if at least one column was found
|
||||
if (fieldIds.length === 0) {
|
||||
console.warn(
|
||||
`Index ${sqlIndex.name} has no valid columns. Skipping index.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: sqlIndex.name,
|
||||
fieldIds,
|
||||
unique: sqlIndex.unique,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
})
|
||||
.filter((idx): idx is DBIndex => idx !== null);
|
||||
|
||||
return {
|
||||
id: newId,
|
||||
@@ -708,12 +737,29 @@ export function convertToChartDBDiagram(
|
||||
});
|
||||
});
|
||||
|
||||
// Convert SQL enum types to ChartDB custom types
|
||||
const customTypes: DBCustomType[] = [];
|
||||
|
||||
if (parserResult.enums) {
|
||||
parserResult.enums.forEach((enumType, index) => {
|
||||
customTypes.push({
|
||||
id: generateId(),
|
||||
name: enumType.name,
|
||||
schema: 'public', // Default to public schema for now
|
||||
kind: DBCustomTypeKind.enum,
|
||||
values: enumType.values,
|
||||
order: index,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: `SQL Import (${sourceDatabaseType})`,
|
||||
databaseType: targetDatabaseType,
|
||||
tables,
|
||||
relationships,
|
||||
customTypes: customTypes.length > 0 ? customTypes : undefined,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
@@ -0,0 +1,150 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
import { sqlImportToDiagram, detectDatabaseType } from '../../../index';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('MariaDB Integration', () => {
|
||||
it('should detect MariaDB from SQL dump', () => {
|
||||
const mariaDbSql = `
|
||||
-- MariaDB dump 10.19 Distrib 10.11.2-MariaDB, for Linux (x86_64)
|
||||
--
|
||||
-- Host: localhost Database: fantasy_db
|
||||
-- ------------------------------------------------------
|
||||
-- Server version 10.11.2-MariaDB
|
||||
|
||||
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
|
||||
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
|
||||
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
|
||||
/*!40101 SET NAMES utf8mb4 */;
|
||||
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
|
||||
/*!40103 SET TIME_ZONE='+00:00' */;
|
||||
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
|
||||
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
|
||||
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
|
||||
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
|
||||
|
||||
CREATE TABLE magic_realms (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;`;
|
||||
|
||||
const detectedType = detectDatabaseType(mariaDbSql);
|
||||
expect(detectedType).toBe(DatabaseType.MARIADB);
|
||||
});
|
||||
|
||||
it('should parse MariaDB SQL using MySQL parser', async () => {
|
||||
const mariaDbSql = `
|
||||
CREATE TABLE wizards (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
power_level INT DEFAULT 1,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE spells (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
wizard_id INT NOT NULL,
|
||||
mana_cost INT DEFAULT 10,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;`;
|
||||
|
||||
const result = await fromMySQL(mariaDbSql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const wizards = result.tables.find((t) => t.name === 'wizards');
|
||||
expect(wizards?.columns).toHaveLength(5);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('spells');
|
||||
expect(fk.targetTable).toBe('wizards');
|
||||
expect(fk.deleteAction).toBe('CASCADE');
|
||||
});
|
||||
|
||||
it('should handle MariaDB-specific storage engines', async () => {
|
||||
const mariaDbSql = `
|
||||
-- Using Aria storage engine (MariaDB specific)
|
||||
CREATE TABLE magical_logs (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
event_type VARCHAR(50) NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=Aria DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Using ColumnStore engine (MariaDB specific)
|
||||
CREATE TABLE spell_analytics (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
spell_name VARCHAR(200),
|
||||
cast_count BIGINT DEFAULT 0,
|
||||
avg_mana_cost DECIMAL(10,2)
|
||||
) ENGINE=COLUMNSTORE DEFAULT CHARSET=utf8mb4;`;
|
||||
|
||||
const result = await fromMySQL(mariaDbSql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(
|
||||
result.tables.find((t) => t.name === 'magical_logs')
|
||||
).toBeDefined();
|
||||
expect(
|
||||
result.tables.find((t) => t.name === 'spell_analytics')
|
||||
).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle MariaDB-specific data types', async () => {
|
||||
const mariaDbSql = `
|
||||
CREATE TABLE advanced_spells (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
spell_id UUID, -- MariaDB has native UUID type
|
||||
spell_data JSON, -- JSON support
|
||||
cast_location POINT, -- Geometry type
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB;`;
|
||||
|
||||
const result = await fromMySQL(mariaDbSql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
expect(table.columns).toHaveLength(5);
|
||||
|
||||
const uuidCol = table.columns.find((c) => c.name === 'spell_id');
|
||||
expect(uuidCol?.type).toBe('UUID');
|
||||
|
||||
const jsonCol = table.columns.find((c) => c.name === 'spell_data');
|
||||
expect(jsonCol?.type).toBe('JSON');
|
||||
});
|
||||
|
||||
it('should work with sqlImportToDiagram for MariaDB', async () => {
|
||||
const mariaDbSql = `
|
||||
/*!100100 SET @@SQL_MODE='STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */;
|
||||
|
||||
CREATE TABLE dragon_riders (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
dragon_count INT DEFAULT 0
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
rider_id INT,
|
||||
FOREIGN KEY (rider_id) REFERENCES dragon_riders(id)
|
||||
) ENGINE=InnoDB;`;
|
||||
|
||||
const diagram = await sqlImportToDiagram({
|
||||
sqlContent: mariaDbSql,
|
||||
sourceDatabaseType: DatabaseType.MARIADB,
|
||||
targetDatabaseType: DatabaseType.GENERIC,
|
||||
});
|
||||
|
||||
expect(diagram.tables).toHaveLength(2);
|
||||
expect(diagram.relationships).toHaveLength(1);
|
||||
|
||||
// Check that tables are properly sorted
|
||||
expect(diagram.tables[0].name).toBe('dragon_riders');
|
||||
expect(diagram.tables[1].name).toBe('dragons');
|
||||
});
|
||||
});
|
@@ -0,0 +1,487 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
import { fromMySQL } from '../mysql';
|
||||
|
||||
describe('MySQL Core Functionality', () => {
|
||||
describe('Basic Table Parsing', () => {
|
||||
it('should parse a simple table', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
username VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('users');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
|
||||
const idColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'id'
|
||||
);
|
||||
expect(idColumn?.primaryKey).toBe(true);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
|
||||
const emailColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'email'
|
||||
);
|
||||
expect(emailColumn?.unique).toBe(true);
|
||||
expect(emailColumn?.nullable).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse tables with backticks', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE \`user-profiles\` (
|
||||
\`user-id\` INT PRIMARY KEY AUTO_INCREMENT,
|
||||
\`full-name\` VARCHAR(255) NOT NULL,
|
||||
\`bio-text\` TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('user-profiles');
|
||||
expect(
|
||||
result.tables[0].columns.some((c) => c.name === 'user-id')
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.tables[0].columns.some((c) => c.name === 'full-name')
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle IF NOT EXISTS clause', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE IF NOT EXISTS products (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('products');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data Types', () => {
|
||||
it('should parse various MySQL data types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE data_types_test (
|
||||
col_tinyint TINYINT,
|
||||
col_smallint SMALLINT,
|
||||
col_mediumint MEDIUMINT,
|
||||
col_int INT(11),
|
||||
col_bigint BIGINT,
|
||||
col_decimal DECIMAL(10,2),
|
||||
col_float FLOAT,
|
||||
col_double DOUBLE,
|
||||
col_bit BIT(8),
|
||||
col_char CHAR(10),
|
||||
col_varchar VARCHAR(255),
|
||||
col_binary BINARY(16),
|
||||
col_varbinary VARBINARY(255),
|
||||
col_tinytext TINYTEXT,
|
||||
col_text TEXT,
|
||||
col_mediumtext MEDIUMTEXT,
|
||||
col_longtext LONGTEXT,
|
||||
col_tinyblob TINYBLOB,
|
||||
col_blob BLOB,
|
||||
col_mediumblob MEDIUMBLOB,
|
||||
col_longblob LONGBLOB,
|
||||
col_date DATE,
|
||||
col_datetime DATETIME,
|
||||
col_timestamp TIMESTAMP,
|
||||
col_time TIME,
|
||||
col_year YEAR,
|
||||
col_enum ENUM('small', 'medium', 'large'),
|
||||
col_set SET('read', 'write', 'execute'),
|
||||
col_json JSON,
|
||||
col_geometry GEOMETRY,
|
||||
col_point POINT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const table = result.tables[0];
|
||||
|
||||
expect(table.columns.find((c) => c.name === 'col_int')?.type).toBe(
|
||||
'INT'
|
||||
);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'col_varchar')?.type
|
||||
).toBe('VARCHAR');
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'col_decimal')?.type
|
||||
).toBe('DECIMAL');
|
||||
expect(table.columns.find((c) => c.name === 'col_enum')?.type).toBe(
|
||||
'ENUM'
|
||||
);
|
||||
expect(table.columns.find((c) => c.name === 'col_json')?.type).toBe(
|
||||
'JSON'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Constraints', () => {
|
||||
it('should parse PRIMARY KEY constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE pk_test (
|
||||
id INT,
|
||||
code VARCHAR(10),
|
||||
PRIMARY KEY (id, code)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.columns.find((c) => c.name === 'id')?.primaryKey).toBe(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
table.columns.find((c) => c.name === 'code')?.primaryKey
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) =>
|
||||
idx.name === 'pk_pk_test' &&
|
||||
idx.columns.includes('id') &&
|
||||
idx.columns.includes('code')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse UNIQUE constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE unique_test (
|
||||
id INT PRIMARY KEY,
|
||||
email VARCHAR(255),
|
||||
username VARCHAR(100),
|
||||
UNIQUE KEY uk_email (email),
|
||||
UNIQUE KEY uk_username_email (username, email)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) => idx.name === 'uk_email' && idx.unique === true
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) =>
|
||||
idx.name === 'uk_username_email' &&
|
||||
idx.unique === true &&
|
||||
idx.columns.length === 2
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse CHECK constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE check_test (
|
||||
id INT PRIMARY KEY,
|
||||
age INT CHECK (age >= 18),
|
||||
price DECIMAL(10,2) CHECK (price > 0)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Foreign Keys', () => {
|
||||
it('should parse inline FOREIGN KEY constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE departments (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE employees (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
department_id INT,
|
||||
FOREIGN KEY (department_id) REFERENCES departments(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('employees');
|
||||
expect(fk.sourceColumn).toBe('department_id');
|
||||
expect(fk.targetTable).toBe('departments');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse named FOREIGN KEY constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE orders (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
customer_id INT NOT NULL,
|
||||
CONSTRAINT fk_order_customer FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE customers (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.name).toBe('fk_order_customer');
|
||||
expect(fk.deleteAction).toBe('CASCADE');
|
||||
expect(fk.updateAction).toBe('CASCADE');
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ADD FOREIGN KEY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE products (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
product_id INT NOT NULL,
|
||||
rating INT NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE reviews
|
||||
ADD CONSTRAINT fk_review_product
|
||||
FOREIGN KEY (product_id) REFERENCES products(id) ON DELETE CASCADE;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('reviews');
|
||||
expect(fk.targetTable).toBe('products');
|
||||
expect(fk.deleteAction).toBe('CASCADE');
|
||||
});
|
||||
|
||||
it('should handle composite foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE tenants (
|
||||
id INT NOT NULL,
|
||||
region VARCHAR(10) NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
PRIMARY KEY (id, region)
|
||||
);
|
||||
|
||||
CREATE TABLE tenant_settings (
|
||||
tenant_id INT NOT NULL,
|
||||
tenant_region VARCHAR(10) NOT NULL,
|
||||
setting_key VARCHAR(100) NOT NULL,
|
||||
setting_value TEXT,
|
||||
PRIMARY KEY (tenant_id, tenant_region, setting_key),
|
||||
FOREIGN KEY (tenant_id, tenant_region) REFERENCES tenants(id, region)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
const fk1 = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'tenant_id'
|
||||
);
|
||||
const fk2 = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'tenant_region'
|
||||
);
|
||||
|
||||
expect(fk1?.targetColumn).toBe('id');
|
||||
expect(fk2?.targetColumn).toBe('region');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Indexes', () => {
|
||||
it('should parse CREATE INDEX statements', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE products (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
category VARCHAR(100),
|
||||
price DECIMAL(10,2)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_category ON products(category);
|
||||
CREATE UNIQUE INDEX idx_name ON products(name);
|
||||
CREATE INDEX idx_category_price ON products(category, price);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) => idx.name === 'idx_category' && !idx.unique
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) => idx.name === 'idx_name' && idx.unique
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
table.indexes.some(
|
||||
(idx) =>
|
||||
idx.name === 'idx_category_price' &&
|
||||
idx.columns.length === 2
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse inline INDEX definitions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
email VARCHAR(255) NOT NULL,
|
||||
username VARCHAR(100) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_email (email),
|
||||
UNIQUE INDEX uk_username (username),
|
||||
INDEX idx_created (created_at DESC)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.indexes.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Table Options', () => {
|
||||
it('should handle ENGINE and CHARSET options', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE products (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE logs (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
message TEXT
|
||||
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables[0].name).toBe('products');
|
||||
expect(result.tables[1].name).toBe('logs');
|
||||
});
|
||||
|
||||
it('should handle AUTO_INCREMENT initial value', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE orders (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
order_number VARCHAR(50) NOT NULL
|
||||
) AUTO_INCREMENT=1000;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const idColumn = result.tables[0].columns.find(
|
||||
(c) => c.name === 'id'
|
||||
);
|
||||
expect(idColumn?.increment).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should reject inline REFERENCES (PostgreSQL style)', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INT PRIMARY KEY,
|
||||
department_id INT REFERENCES departments(id)
|
||||
);
|
||||
`;
|
||||
|
||||
// Using the original parser which checks for inline REFERENCES
|
||||
await expect(fromMySQL(sql)).rejects.toThrow(
|
||||
/MySQL\/MariaDB does not support inline REFERENCES/
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle malformed SQL gracefully', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test (
|
||||
id INT PRIMARY KEY
|
||||
name VARCHAR(255) -- missing comma
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
// Should still create a table with fallback parsing
|
||||
expect(result.tables.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Comments and Special Cases', () => {
|
||||
it('should handle SQL comments', async () => {
|
||||
const sql = `
|
||||
-- This is a comment
|
||||
CREATE TABLE users (
|
||||
id INT PRIMARY KEY, -- user identifier
|
||||
/* Multi-line comment
|
||||
spanning multiple lines */
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
# MySQL-style comment
|
||||
CREATE TABLE posts (
|
||||
id INT PRIMARY KEY
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'posts',
|
||||
'users',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle empty or whitespace-only input', async () => {
|
||||
const result1 = await fromMySQLImproved('');
|
||||
expect(result1.tables).toHaveLength(0);
|
||||
expect(result1.relationships).toHaveLength(0);
|
||||
|
||||
const result2 = await fromMySQLImproved(' \n\n ');
|
||||
expect(result2.tables).toHaveLength(0);
|
||||
expect(result2.relationships).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,498 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
|
||||
describe('MySQL Real-World Examples', () => {
|
||||
describe('Magical Academy Example', () => {
|
||||
it('should parse the magical academy example with all 16 tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE schools(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE ranks(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE spell_permissions(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
spell_type VARCHAR(100) NOT NULL,
|
||||
casting_level VARCHAR(50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rank_spell_permissions(
|
||||
rank_id INT NOT NULL,
|
||||
spell_permission_id INT NOT NULL,
|
||||
PRIMARY KEY (rank_id, spell_permission_id),
|
||||
FOREIGN KEY (rank_id) REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (spell_permission_id) REFERENCES spell_permissions(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE grimoire_types(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
wizard_name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) NOT NULL,
|
||||
UNIQUE KEY school_wizard_unique (school_id, wizard_name),
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id INT NOT NULL,
|
||||
rank_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
assigned_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (rank_id) REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE apprentices(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
first_name VARCHAR(100) NOT NULL,
|
||||
last_name VARCHAR(100) NOT NULL,
|
||||
enrollment_date DATE NOT NULL,
|
||||
primary_mentor INT,
|
||||
sponsoring_wizard INT,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (primary_mentor) REFERENCES wizards(id),
|
||||
FOREIGN KEY (sponsoring_wizard) REFERENCES wizards(id)
|
||||
);
|
||||
|
||||
CREATE TABLE spell_lessons(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
apprentice_id INT NOT NULL,
|
||||
instructor_id INT NOT NULL,
|
||||
lesson_date DATETIME NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (apprentice_id) REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (instructor_id) REFERENCES wizards(id)
|
||||
);
|
||||
|
||||
CREATE TABLE grimoires(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
apprentice_id INT NOT NULL,
|
||||
grimoire_type_id INT NOT NULL,
|
||||
author_wizard_id INT NOT NULL,
|
||||
content JSON NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (apprentice_id) REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (grimoire_type_id) REFERENCES grimoire_types(id),
|
||||
FOREIGN KEY (author_wizard_id) REFERENCES wizards(id)
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_scrolls(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT NOT NULL,
|
||||
tower_id INT NOT NULL,
|
||||
apprentice_id INT NOT NULL,
|
||||
total_amount DECIMAL(10,2) NOT NULL,
|
||||
status VARCHAR(50) NOT NULL,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (apprentice_id) REFERENCES apprentices(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_items(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
tuition_scroll_id INT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
FOREIGN KEY (tuition_scroll_id) REFERENCES tuition_scrolls(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE patron_sponsorships(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
tuition_scroll_id INT NOT NULL,
|
||||
patron_house VARCHAR(255) NOT NULL,
|
||||
sponsorship_code VARCHAR(100) NOT NULL,
|
||||
status VARCHAR(50) NOT NULL,
|
||||
FOREIGN KEY (tuition_scroll_id) REFERENCES tuition_scrolls(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE gold_payments(
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
tuition_scroll_id INT NOT NULL,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
payment_date TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (tuition_scroll_id) REFERENCES tuition_scrolls(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_logs(
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
school_id INT,
|
||||
wizard_id INT,
|
||||
tower_id INT,
|
||||
table_name VARCHAR(100) NOT NULL,
|
||||
operation VARCHAR(50) NOT NULL,
|
||||
record_id INT,
|
||||
changes JSON,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE SET NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'patron_sponsorships',
|
||||
'rank_spell_permissions',
|
||||
'ranks',
|
||||
'schools',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_items',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables).toHaveLength(16);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships exist
|
||||
const relationships = result.relationships;
|
||||
|
||||
// Check some critical relationships
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizard_ranks' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'wizard_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enchanted Bazaar Example', () => {
|
||||
it('should parse the enchanted bazaar example with triggers and procedures', async () => {
|
||||
const sql = `
|
||||
-- Enchanted Bazaar tables with complex features
|
||||
CREATE TABLE merchants(
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE artifacts(
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
merchant_id INT,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL CHECK (price >= 0),
|
||||
enchantment_charges INT DEFAULT 0 CHECK (enchantment_charges >= 0),
|
||||
FOREIGN KEY (merchant_id) REFERENCES merchants(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Stored procedure that should be skipped
|
||||
DELIMITER $$
|
||||
CREATE PROCEDURE consume_charges(IN artifact_id INT, IN charges_used INT)
|
||||
BEGIN
|
||||
UPDATE artifacts SET enchantment_charges = enchantment_charges - charges_used WHERE id = artifact_id;
|
||||
END$$
|
||||
DELIMITER ;
|
||||
|
||||
CREATE TABLE trades(
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(50) DEFAULT 'negotiating'
|
||||
);
|
||||
|
||||
CREATE TABLE trade_items(
|
||||
trade_id INT,
|
||||
artifact_id INT,
|
||||
quantity INT NOT NULL CHECK (quantity > 0),
|
||||
agreed_price DECIMAL(10, 2) NOT NULL,
|
||||
PRIMARY KEY (trade_id, artifact_id),
|
||||
FOREIGN KEY (trade_id) REFERENCES trades(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (artifact_id) REFERENCES artifacts(id)
|
||||
);
|
||||
|
||||
-- Create trigger
|
||||
CREATE TRIGGER charge_consumption_trigger
|
||||
AFTER INSERT ON trade_items
|
||||
FOR EACH ROW
|
||||
CALL consume_charges(NEW.artifact_id, NEW.quantity);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql, {
|
||||
includeWarnings: true,
|
||||
});
|
||||
|
||||
// Should parse all tables despite procedures and triggers
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
// Check for specific tables
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('merchants');
|
||||
expect(tableNames).toContain('artifacts');
|
||||
expect(tableNames).toContain('trades');
|
||||
expect(tableNames).toContain('trade_items');
|
||||
|
||||
// Check relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'artifacts' &&
|
||||
r.targetTable === 'merchants'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'trade_items' &&
|
||||
r.targetTable === 'trades'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
if (result.warnings) {
|
||||
expect(
|
||||
result.warnings.some(
|
||||
(w) =>
|
||||
w.includes('procedure') ||
|
||||
w.includes('function') ||
|
||||
w.includes('Trigger') ||
|
||||
w.includes('trigger')
|
||||
)
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dragon Registry Example', () => {
|
||||
it('should parse dragon registry with mixed constraint styles', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon_species (
|
||||
id INT NOT NULL AUTO_INCREMENT,
|
||||
species_name VARCHAR(100) NOT NULL UNIQUE,
|
||||
breath_type ENUM('fire', 'ice', 'lightning', 'acid', 'poison') NOT NULL,
|
||||
max_wingspan DECIMAL(5,2),
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE dragon_habitats (
|
||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
||||
habitat_name VARCHAR(200) NOT NULL,
|
||||
location_type VARCHAR(50) NOT NULL,
|
||||
climate VARCHAR(50),
|
||||
INDEX idx_location (location_type)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragons (
|
||||
dragon_id INT NOT NULL AUTO_INCREMENT,
|
||||
dragon_name VARCHAR(255) NOT NULL,
|
||||
species_id INT NOT NULL,
|
||||
habitat_id INT,
|
||||
birth_year INT,
|
||||
treasure_value DECIMAL(15,2) DEFAULT 0.00,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
PRIMARY KEY (dragon_id),
|
||||
CONSTRAINT fk_dragon_species FOREIGN KEY (species_id) REFERENCES dragon_species(id),
|
||||
CONSTRAINT fk_dragon_habitat FOREIGN KEY (habitat_id) REFERENCES dragon_habitats(id) ON DELETE SET NULL,
|
||||
INDEX idx_species (species_id),
|
||||
INDEX idx_active_dragons (is_active, species_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE dragon_riders (
|
||||
rider_id INT NOT NULL AUTO_INCREMENT,
|
||||
rider_name VARCHAR(255) NOT NULL,
|
||||
guild_membership VARCHAR(100),
|
||||
years_experience INT DEFAULT 0,
|
||||
PRIMARY KEY (rider_id),
|
||||
UNIQUE KEY uk_rider_name (rider_name)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragon_bonds (
|
||||
bond_id INT NOT NULL AUTO_INCREMENT,
|
||||
dragon_id INT NOT NULL,
|
||||
rider_id INT NOT NULL,
|
||||
bond_date DATE NOT NULL,
|
||||
bond_strength ENUM('weak', 'moderate', 'strong', 'unbreakable') DEFAULT 'weak',
|
||||
PRIMARY KEY (bond_id),
|
||||
UNIQUE KEY unique_dragon_rider (dragon_id, rider_id),
|
||||
FOREIGN KEY (dragon_id) REFERENCES dragons(dragon_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (rider_id) REFERENCES dragon_riders(rider_id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(5);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'dragon_bonds',
|
||||
'dragon_habitats',
|
||||
'dragon_riders',
|
||||
'dragon_species',
|
||||
'dragons',
|
||||
]);
|
||||
|
||||
// Check that ENUMs were parsed correctly
|
||||
const dragonSpecies = result.tables.find(
|
||||
(t) => t.name === 'dragon_species'
|
||||
);
|
||||
const breathTypeColumn = dragonSpecies?.columns.find(
|
||||
(c) => c.name === 'breath_type'
|
||||
);
|
||||
expect(breathTypeColumn?.type).toBe('ENUM');
|
||||
|
||||
// Check indexes
|
||||
const dragonsTable = result.tables.find(
|
||||
(t) => t.name === 'dragons'
|
||||
);
|
||||
expect(dragonsTable?.indexes.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
dragonsTable?.indexes.some((idx) => idx.name === 'idx_species')
|
||||
).toBe(true);
|
||||
|
||||
// Check relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'dragons' &&
|
||||
r.targetTable === 'dragon_species' &&
|
||||
r.sourceColumn === 'species_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'dragon_bonds' &&
|
||||
r.targetTable === 'dragons' &&
|
||||
r.deleteAction === 'CASCADE'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mystic Marketplace Example with Backticks', () => {
|
||||
it('should handle tables with backticks and special characters', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE \`marketplace-vendors\` (
|
||||
\`vendor-id\` INT NOT NULL AUTO_INCREMENT,
|
||||
\`vendor name\` VARCHAR(255) NOT NULL,
|
||||
\`shop.location\` VARCHAR(500),
|
||||
\`rating%\` DECIMAL(3,2),
|
||||
PRIMARY KEY (\`vendor-id\`)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE \`item_categories\` (
|
||||
\`category-id\` INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
||||
\`category@name\` VARCHAR(100) NOT NULL UNIQUE,
|
||||
\`parent_category\` INT,
|
||||
FOREIGN KEY (\`parent_category\`) REFERENCES \`item_categories\`(\`category-id\`)
|
||||
);
|
||||
|
||||
CREATE TABLE \`magical.items\` (
|
||||
\`item#id\` INT NOT NULL AUTO_INCREMENT,
|
||||
\`item-name\` VARCHAR(255) NOT NULL,
|
||||
\`vendor-id\` INT NOT NULL,
|
||||
\`category-id\` INT NOT NULL,
|
||||
\`price$gold\` DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (\`item#id\`),
|
||||
CONSTRAINT \`fk_item_vendor\` FOREIGN KEY (\`vendor-id\`) REFERENCES \`marketplace-vendors\`(\`vendor-id\`),
|
||||
CONSTRAINT \`fk_item_category\` FOREIGN KEY (\`category-id\`) REFERENCES \`item_categories\`(\`category-id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// Check that backtick-wrapped names are preserved
|
||||
const vendor = result.tables.find(
|
||||
(t) => t.name === 'marketplace-vendors'
|
||||
);
|
||||
expect(vendor).toBeDefined();
|
||||
expect(vendor?.columns.some((c) => c.name === 'vendor-id')).toBe(
|
||||
true
|
||||
);
|
||||
expect(vendor?.columns.some((c) => c.name === 'vendor name')).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Check self-referencing foreign key
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'item_categories' &&
|
||||
r.targetTable === 'item_categories' &&
|
||||
r.sourceColumn === 'parent_category'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check cross-table relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'magical.items' &&
|
||||
r.targetTable === 'marketplace-vendors'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,401 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
import { validateMySQLSyntax } from '../mysql-validator';
|
||||
|
||||
describe('MySQL Fantasy World Integration', () => {
|
||||
const fantasyWorldSQL = `
|
||||
-- Fantasy World Database Schema
|
||||
-- A magical realm management system
|
||||
|
||||
-- Realm Management
|
||||
CREATE TABLE realms (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
magic_level ENUM('low', 'medium', 'high', 'legendary') DEFAULT 'medium',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_magic_level (magic_level)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Magical Creatures Registry
|
||||
CREATE TABLE creature_types (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) UNIQUE NOT NULL,
|
||||
classification ENUM('beast', 'dragon', 'elemental', 'undead', 'fey', 'construct') NOT NULL,
|
||||
danger_level INT CHECK (danger_level BETWEEN 1 AND 10),
|
||||
is_sentient BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE creatures (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
creature_type_id INT NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
health_points INT DEFAULT 100,
|
||||
magic_points INT DEFAULT 50,
|
||||
special_abilities JSON, -- ["fire_breath", "invisibility", "teleportation"]
|
||||
last_sighted DATETIME,
|
||||
status ENUM('active', 'dormant', 'banished', 'deceased') DEFAULT 'active',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (creature_type_id) REFERENCES creature_types(id),
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id) ON DELETE CASCADE,
|
||||
INDEX idx_realm_status (realm_id, status),
|
||||
INDEX idx_type (creature_type_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Wizard Registry
|
||||
CREATE TABLE wizard_ranks (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
rank_name VARCHAR(50) UNIQUE NOT NULL,
|
||||
min_power_level INT NOT NULL,
|
||||
permissions JSON, -- ["cast_forbidden_spells", "access_restricted_library", "mentor_apprentices"]
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
wizard_rank_id INT NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
power_level INT DEFAULT 1,
|
||||
specialization ENUM('elemental', 'necromancy', 'illusion', 'healing', 'divination') NOT NULL,
|
||||
familiar_creature_id INT,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (wizard_rank_id) REFERENCES wizard_ranks(id),
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id),
|
||||
FOREIGN KEY (familiar_creature_id) REFERENCES creatures(id) ON DELETE SET NULL,
|
||||
INDEX idx_rank (wizard_rank_id),
|
||||
INDEX idx_realm (realm_id),
|
||||
INDEX idx_email (email)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Spell Library
|
||||
CREATE TABLE spell_schools (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
forbidden BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE spells (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
incantation TEXT,
|
||||
spell_school_id INT NOT NULL,
|
||||
mana_cost INT DEFAULT 10,
|
||||
cast_time_seconds INT DEFAULT 3,
|
||||
range_meters INT DEFAULT 10,
|
||||
components JSON, -- ["verbal", "somatic", "material:dragon_scale"]
|
||||
effects JSON, -- {"damage": 50, "duration": 300, "area": "cone"}
|
||||
min_wizard_rank_id INT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (spell_school_id) REFERENCES spell_schools(id),
|
||||
FOREIGN KEY (min_wizard_rank_id) REFERENCES wizard_ranks(id),
|
||||
INDEX idx_school (spell_school_id),
|
||||
FULLTEXT idx_search (name, incantation)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Wizard Spellbooks (many-to-many)
|
||||
CREATE TABLE wizard_spellbooks (
|
||||
wizard_id INT NOT NULL,
|
||||
spell_id INT NOT NULL,
|
||||
learned_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
mastery_level INT DEFAULT 1 CHECK (mastery_level BETWEEN 1 AND 5),
|
||||
times_cast INT DEFAULT 0,
|
||||
PRIMARY KEY (wizard_id, spell_id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (spell_id) REFERENCES spells(id) ON DELETE CASCADE,
|
||||
INDEX idx_spell (spell_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Magical Items
|
||||
CREATE TABLE item_categories (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE magical_items (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
item_category_id INT NOT NULL,
|
||||
rarity ENUM('common', 'uncommon', 'rare', 'epic', 'legendary', 'artifact') NOT NULL,
|
||||
power_level INT DEFAULT 1,
|
||||
enchantments JSON, -- ["strength+5", "fire_resistance", "invisibility_on_use"]
|
||||
curse_effects JSON, -- ["bound_to_owner", "drains_life", "attracts_monsters"]
|
||||
created_by_wizard_id INT,
|
||||
found_in_realm_id INT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (item_category_id) REFERENCES item_categories(id),
|
||||
FOREIGN KEY (created_by_wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (found_in_realm_id) REFERENCES realms(id) ON DELETE SET NULL,
|
||||
INDEX idx_category (item_category_id),
|
||||
INDEX idx_rarity (rarity)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Wizard Inventory
|
||||
CREATE TABLE wizard_inventory (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
wizard_id INT NOT NULL,
|
||||
item_id INT NOT NULL,
|
||||
quantity INT DEFAULT 1,
|
||||
equipped BOOLEAN DEFAULT FALSE,
|
||||
acquired_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (item_id) REFERENCES magical_items(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY uk_wizard_item (wizard_id, item_id),
|
||||
INDEX idx_wizard (wizard_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Quests and Adventures
|
||||
CREATE TABLE quests (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
title VARCHAR(200) NOT NULL,
|
||||
description TEXT,
|
||||
realm_id INT NOT NULL,
|
||||
difficulty ENUM('novice', 'adept', 'expert', 'master', 'legendary') NOT NULL,
|
||||
reward_gold INT DEFAULT 0,
|
||||
reward_experience INT DEFAULT 0,
|
||||
reward_items JSON, -- [{"item_id": 1, "quantity": 1}]
|
||||
status ENUM('available', 'in_progress', 'completed', 'failed') DEFAULT 'available',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id),
|
||||
INDEX idx_realm_status (realm_id, status)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Quest Participants
|
||||
CREATE TABLE quest_participants (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
quest_id INT NOT NULL,
|
||||
wizard_id INT NOT NULL,
|
||||
role ENUM('leader', 'member', 'guide', 'support') DEFAULT 'member',
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP NULL,
|
||||
FOREIGN KEY (quest_id) REFERENCES quests(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY uk_quest_wizard (quest_id, wizard_id),
|
||||
INDEX idx_wizard (wizard_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Magical Events Log
|
||||
CREATE TABLE magical_events (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
event_type ENUM('spell_cast', 'item_created', 'creature_summoned', 'realm_shift', 'quest_completed') NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
wizard_id INT,
|
||||
creature_id INT,
|
||||
description TEXT,
|
||||
magic_fluctuation INT DEFAULT 0, -- Positive or negative impact on realm magic
|
||||
event_data JSON, -- Additional event-specific data
|
||||
occurred_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (creature_id) REFERENCES creatures(id) ON DELETE SET NULL,
|
||||
INDEX idx_realm_time (realm_id, occurred_at),
|
||||
INDEX idx_event_type (event_type)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Wizard Guilds
|
||||
CREATE TABLE guilds (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) UNIQUE NOT NULL,
|
||||
motto TEXT,
|
||||
realm_id INT NOT NULL,
|
||||
founded_by_wizard_id INT,
|
||||
member_count INT DEFAULT 0,
|
||||
guild_hall_location VARCHAR(500),
|
||||
treasury_gold INT DEFAULT 0,
|
||||
founded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id),
|
||||
FOREIGN KEY (founded_by_wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
INDEX idx_realm (realm_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Guild Memberships
|
||||
CREATE TABLE guild_memberships (
|
||||
wizard_id INT NOT NULL,
|
||||
guild_id INT NOT NULL,
|
||||
rank ENUM('apprentice', 'member', 'officer', 'leader') DEFAULT 'member',
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
contribution_points INT DEFAULT 0,
|
||||
PRIMARY KEY (wizard_id, guild_id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE,
|
||||
INDEX idx_guild (guild_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Enchantment Recipes
|
||||
CREATE TABLE enchantment_recipes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
description TEXT,
|
||||
required_spell_ids JSON NOT NULL, -- [1, 5, 12]
|
||||
required_items JSON NOT NULL, -- [{"item_id": 3, "quantity": 2}]
|
||||
result_enchantment VARCHAR(200) NOT NULL,
|
||||
success_rate DECIMAL(5,2) DEFAULT 75.00,
|
||||
created_by_wizard_id INT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (created_by_wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
INDEX idx_creator (created_by_wizard_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ALTER TABLE for additional constraints
|
||||
ALTER TABLE creatures ADD CONSTRAINT fk_creature_realm
|
||||
FOREIGN KEY (realm_id) REFERENCES realms(id) ON UPDATE CASCADE;
|
||||
|
||||
ALTER TABLE magical_items ADD CONSTRAINT unique_artifact_name
|
||||
UNIQUE KEY (name, rarity);
|
||||
`;
|
||||
|
||||
describe('Full Fantasy World Schema', () => {
|
||||
it('should parse the complete fantasy world database', async () => {
|
||||
const result = await fromMySQL(fantasyWorldSQL);
|
||||
|
||||
// Verify all tables are parsed
|
||||
expect(result.tables).toHaveLength(17);
|
||||
|
||||
const expectedTables = [
|
||||
'creature_types',
|
||||
'creatures',
|
||||
'enchantment_recipes',
|
||||
'guild_memberships',
|
||||
'guilds',
|
||||
'item_categories',
|
||||
'magical_events',
|
||||
'magical_items',
|
||||
'quest_participants',
|
||||
'quests',
|
||||
'realms',
|
||||
'spell_schools',
|
||||
'spells',
|
||||
'wizard_inventory',
|
||||
'wizard_ranks',
|
||||
'wizard_spellbooks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'wizard_ranks' &&
|
||||
r.sourceColumn === 'wizard_rank_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'creatures' &&
|
||||
r.targetTable === 'realms' &&
|
||||
r.deleteAction === 'CASCADE'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Verify JSON columns
|
||||
const creatures = result.tables.find((t) => t.name === 'creatures');
|
||||
const abilitiesCol = creatures?.columns.find(
|
||||
(c) => c.name === 'special_abilities'
|
||||
);
|
||||
expect(abilitiesCol?.type).toBe('JSON');
|
||||
|
||||
// Verify ENUM columns
|
||||
const magicLevel = result.tables
|
||||
.find((t) => t.name === 'realms')
|
||||
?.columns.find((c) => c.name === 'magic_level');
|
||||
expect(magicLevel?.type).toBe('ENUM');
|
||||
|
||||
// Verify indexes
|
||||
const wizards = result.tables.find((t) => t.name === 'wizards');
|
||||
expect(
|
||||
wizards?.indexes.some((idx) => idx.name === 'idx_email')
|
||||
).toBe(true);
|
||||
|
||||
// Verify many-to-many relationship table
|
||||
const spellbooks = result.tables.find(
|
||||
(t) => t.name === 'wizard_spellbooks'
|
||||
);
|
||||
expect(
|
||||
spellbooks?.columns.filter((c) => c.primaryKey)
|
||||
).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle the schema with skipValidation', async () => {
|
||||
const result = await fromMySQLImproved(fantasyWorldSQL, {
|
||||
skipValidation: true,
|
||||
includeWarnings: true,
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(17);
|
||||
expect(result.relationships.length).toBeGreaterThan(20);
|
||||
|
||||
// Check for CASCADE actions
|
||||
const cascadeRelations = result.relationships.filter(
|
||||
(r) =>
|
||||
r.deleteAction === 'CASCADE' || r.updateAction === 'CASCADE'
|
||||
);
|
||||
expect(cascadeRelations.length).toBeGreaterThan(5);
|
||||
});
|
||||
|
||||
it('should validate the fantasy schema', () => {
|
||||
const validation = validateMySQLSyntax(fantasyWorldSQL);
|
||||
|
||||
// Should be valid (no multi-line comment issues)
|
||||
expect(validation.isValid).toBe(true);
|
||||
expect(validation.errors).toHaveLength(0);
|
||||
|
||||
// May have some warnings but should be minimal
|
||||
expect(validation.warnings.length).toBeLessThan(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Fantasy Schema with Validation Issues', () => {
|
||||
it('should handle SQL that becomes invalid after comment removal', async () => {
|
||||
const problematicSQL = `
|
||||
CREATE TABLE spell_components (
|
||||
id INT PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
rarity VARCHAR(50), -- "Common",
|
||||
"Rare", "Legendary" -- This will cause issues
|
||||
properties JSON -- [
|
||||
"magical_essence",
|
||||
"dragon_scale"
|
||||
] -- This JSON example will also cause issues
|
||||
);`;
|
||||
|
||||
// After comment removal, this SQL becomes malformed
|
||||
// The parser should handle this gracefully
|
||||
try {
|
||||
await fromMySQL(problematicSQL);
|
||||
// If it parses, that's OK - the sanitizer may have cleaned it up
|
||||
} catch (error) {
|
||||
// If it fails, that's also OK - the SQL was problematic
|
||||
expect(error.message).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should detect inline REFERENCES in fantasy schema', async () => {
|
||||
const invalidSQL = `
|
||||
CREATE TABLE wizard_familiars (
|
||||
id INT PRIMARY KEY,
|
||||
wizard_id INT REFERENCES wizards(id), -- PostgreSQL style, not MySQL
|
||||
familiar_name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
await expect(fromMySQL(invalidSQL)).rejects.toThrow(
|
||||
'inline REFERENCES'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,257 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
|
||||
describe('MySQL Fantasy Schema Test', () => {
|
||||
it('should parse a complete fantasy realm management schema', async () => {
|
||||
const fantasySQL = `
|
||||
-- Enchanted Realms Database
|
||||
CREATE TABLE magical_realms (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
realm_name VARCHAR(100) UNIQUE NOT NULL,
|
||||
magic_density DECIMAL(5,2) DEFAULT 100.00,
|
||||
portal_coordinates VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_realm_name (realm_name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE creature_classes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
class_name VARCHAR(50) UNIQUE NOT NULL,
|
||||
base_health INT DEFAULT 100,
|
||||
base_mana INT DEFAULT 50,
|
||||
special_traits JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE magical_creatures (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
creature_name VARCHAR(200) NOT NULL,
|
||||
class_id INT NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
level INT DEFAULT 1,
|
||||
experience_points INT DEFAULT 0,
|
||||
is_legendary BOOLEAN DEFAULT FALSE,
|
||||
abilities TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (class_id) REFERENCES creature_classes(id),
|
||||
FOREIGN KEY (realm_id) REFERENCES magical_realms(id) ON DELETE CASCADE,
|
||||
INDEX idx_realm_creatures (realm_id),
|
||||
INDEX idx_legendary (is_legendary)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE wizard_towers (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
tower_name VARCHAR(100) NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
height_meters INT DEFAULT 50,
|
||||
defensive_wards JSON,
|
||||
library_size ENUM('small', 'medium', 'large', 'grand') DEFAULT 'medium',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (realm_id) REFERENCES magical_realms(id),
|
||||
INDEX idx_realm_towers (realm_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE arcane_wizards (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
wizard_name VARCHAR(200) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
tower_id INT,
|
||||
specialization VARCHAR(50) NOT NULL,
|
||||
mana_capacity INT DEFAULT 1000,
|
||||
spell_slots INT DEFAULT 10,
|
||||
familiar_creature_id INT,
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (tower_id) REFERENCES wizard_towers(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (familiar_creature_id) REFERENCES magical_creatures(id) ON DELETE SET NULL,
|
||||
INDEX idx_email (email),
|
||||
INDEX idx_tower (tower_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE spell_tomes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
spell_name VARCHAR(200) NOT NULL,
|
||||
mana_cost INT DEFAULT 50,
|
||||
cast_time_seconds DECIMAL(4,2) DEFAULT 1.5,
|
||||
damage_type ENUM('fire', 'ice', 'lightning', 'arcane', 'nature', 'shadow') NOT NULL,
|
||||
spell_description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FULLTEXT idx_spell_search (spell_name, spell_description)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE wizard_spellbooks (
|
||||
wizard_id INT NOT NULL,
|
||||
spell_id INT NOT NULL,
|
||||
mastery_level INT DEFAULT 1,
|
||||
times_cast INT DEFAULT 0,
|
||||
learned_date DATE,
|
||||
PRIMARY KEY (wizard_id, spell_id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES arcane_wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (spell_id) REFERENCES spell_tomes(id) ON DELETE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE enchanted_artifacts (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
artifact_name VARCHAR(200) UNIQUE NOT NULL,
|
||||
power_level INT CHECK (power_level BETWEEN 1 AND 100),
|
||||
curse_type VARCHAR(100),
|
||||
owner_wizard_id INT,
|
||||
found_in_realm_id INT,
|
||||
enchantments JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (owner_wizard_id) REFERENCES arcane_wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (found_in_realm_id) REFERENCES magical_realms(id),
|
||||
INDEX idx_power (power_level),
|
||||
INDEX idx_owner (owner_wizard_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE portal_network (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
portal_name VARCHAR(100) NOT NULL,
|
||||
source_realm_id INT NOT NULL,
|
||||
destination_realm_id INT NOT NULL,
|
||||
stability_percentage DECIMAL(5,2) DEFAULT 95.00,
|
||||
mana_cost_per_use INT DEFAULT 100,
|
||||
is_bidirectional BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (source_realm_id) REFERENCES magical_realms(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (destination_realm_id) REFERENCES magical_realms(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY uk_portal_connection (source_realm_id, destination_realm_id),
|
||||
INDEX idx_source (source_realm_id),
|
||||
INDEX idx_destination (destination_realm_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE magical_guilds (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
guild_name VARCHAR(200) UNIQUE NOT NULL,
|
||||
founding_wizard_id INT,
|
||||
headquarters_tower_id INT,
|
||||
guild_treasury INT DEFAULT 0,
|
||||
member_limit INT DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (founding_wizard_id) REFERENCES arcane_wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (headquarters_tower_id) REFERENCES wizard_towers(id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE guild_memberships (
|
||||
wizard_id INT NOT NULL,
|
||||
guild_id INT NOT NULL,
|
||||
joined_date DATE NOT NULL,
|
||||
guild_rank ENUM('apprentice', 'member', 'elder', 'master') DEFAULT 'apprentice',
|
||||
contribution_points INT DEFAULT 0,
|
||||
PRIMARY KEY (wizard_id, guild_id),
|
||||
FOREIGN KEY (wizard_id) REFERENCES arcane_wizards(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (guild_id) REFERENCES magical_guilds(id) ON DELETE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE realm_events (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
event_type VARCHAR(50) NOT NULL,
|
||||
realm_id INT NOT NULL,
|
||||
description TEXT,
|
||||
magic_fluctuation INT DEFAULT 0,
|
||||
participants JSON,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (realm_id) REFERENCES magical_realms(id) ON DELETE CASCADE,
|
||||
INDEX idx_realm_time (realm_id, event_timestamp),
|
||||
INDEX idx_event_type (event_type)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Additional constraints via ALTER TABLE
|
||||
ALTER TABLE magical_creatures
|
||||
ADD CONSTRAINT chk_level CHECK (level BETWEEN 1 AND 100);
|
||||
|
||||
ALTER TABLE wizard_spellbooks
|
||||
ADD CONSTRAINT chk_mastery CHECK (mastery_level BETWEEN 1 AND 10);
|
||||
`;
|
||||
|
||||
console.log('Parsing fantasy realm schema...');
|
||||
const result = await fromMySQL(fantasySQL);
|
||||
|
||||
// Expected structure
|
||||
const expectedTables = [
|
||||
'arcane_wizards',
|
||||
'creature_classes',
|
||||
'enchanted_artifacts',
|
||||
'guild_memberships',
|
||||
'magical_creatures',
|
||||
'magical_guilds',
|
||||
'magical_realms',
|
||||
'portal_network',
|
||||
'realm_events',
|
||||
'spell_tomes',
|
||||
'wizard_spellbooks',
|
||||
'wizard_towers',
|
||||
];
|
||||
|
||||
console.log('Found tables:', result.tables.map((t) => t.name).sort());
|
||||
|
||||
expect(result.tables).toHaveLength(12);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(expectedTables);
|
||||
|
||||
// Verify relationships
|
||||
console.log(
|
||||
`\nTotal relationships found: ${result.relationships.length}`
|
||||
);
|
||||
|
||||
// Check some key relationships
|
||||
const creatureRelations = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'magical_creatures'
|
||||
);
|
||||
expect(creatureRelations).toHaveLength(2); // class_id and realm_id
|
||||
|
||||
const wizardRelations = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'arcane_wizards'
|
||||
);
|
||||
expect(wizardRelations).toHaveLength(2); // tower_id and familiar_creature_id
|
||||
|
||||
// Check CASCADE relationships
|
||||
const cascadeRelations = result.relationships.filter(
|
||||
(r) => r.deleteAction === 'CASCADE'
|
||||
);
|
||||
console.log(
|
||||
`\nRelationships with CASCADE delete: ${cascadeRelations.length}`
|
||||
);
|
||||
expect(cascadeRelations.length).toBeGreaterThan(5);
|
||||
|
||||
// Verify special columns
|
||||
const realms = result.tables.find((t) => t.name === 'magical_realms');
|
||||
const magicDensity = realms?.columns.find(
|
||||
(c) => c.name === 'magic_density'
|
||||
);
|
||||
expect(magicDensity?.type).toBe('DECIMAL');
|
||||
|
||||
const spells = result.tables.find((t) => t.name === 'spell_tomes');
|
||||
const damageType = spells?.columns.find(
|
||||
(c) => c.name === 'damage_type'
|
||||
);
|
||||
expect(damageType?.type).toBe('ENUM');
|
||||
|
||||
// Check indexes
|
||||
const wizards = result.tables.find((t) => t.name === 'arcane_wizards');
|
||||
expect(wizards?.indexes.some((idx) => idx.name === 'idx_email')).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Check unique constraints
|
||||
const portals = result.tables.find((t) => t.name === 'portal_network');
|
||||
expect(
|
||||
portals?.indexes.some(
|
||||
(idx) =>
|
||||
idx.name === 'uk_portal_connection' && idx.unique === true
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
console.log('\n=== Parsing Summary ===');
|
||||
console.log(`Tables parsed: ${result.tables.length}`);
|
||||
console.log(`Relationships found: ${result.relationships.length}`);
|
||||
console.log(
|
||||
`Tables with indexes: ${result.tables.filter((t) => t.indexes.length > 0).length}`
|
||||
);
|
||||
console.log(
|
||||
`Tables with primary keys: ${
|
||||
result.tables.filter((t) => t.columns.some((c) => c.primaryKey))
|
||||
.length
|
||||
}`
|
||||
);
|
||||
});
|
||||
});
|
@@ -0,0 +1,195 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQL } from '../mysql';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
|
||||
describe('MySQL Final Integration', () => {
|
||||
it('should use the improved parser from fromMySQL', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id INT PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE posts (
|
||||
id INT PRIMARY KEY,
|
||||
user_id INT,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);`;
|
||||
|
||||
const result = await fromMySQL(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should reject inline REFERENCES', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE posts (
|
||||
id INT PRIMARY KEY,
|
||||
user_id INT REFERENCES users(id)
|
||||
);`;
|
||||
|
||||
await expect(fromMySQL(sql)).rejects.toThrow(
|
||||
'MySQL/MariaDB does not support inline REFERENCES'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle a large fantasy schema with skipValidation', async () => {
|
||||
const fantasySQL = `
|
||||
-- Dragon Registry System
|
||||
CREATE TABLE dragon_species (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
species_name VARCHAR(100) UNIQUE NOT NULL,
|
||||
element_affinity ENUM('fire', 'ice', 'lightning', 'earth', 'shadow', 'light') NOT NULL,
|
||||
average_wingspan_meters DECIMAL(6,2),
|
||||
is_ancient BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragon_lairs (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
location_name VARCHAR(200) NOT NULL,
|
||||
coordinates JSON, -- {"x": 1000, "y": 2000, "z": 500}
|
||||
treasure_value INT DEFAULT 0,
|
||||
trap_level INT DEFAULT 1 CHECK (trap_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_treasure (treasure_value)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
dragon_name VARCHAR(200) NOT NULL,
|
||||
species_id INT NOT NULL,
|
||||
lair_id INT,
|
||||
age_years INT DEFAULT 0,
|
||||
hoard_size INT DEFAULT 0,
|
||||
breath_weapon_power INT DEFAULT 100,
|
||||
is_sleeping BOOLEAN DEFAULT FALSE,
|
||||
last_seen_at DATETIME,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (species_id) REFERENCES dragon_species(id),
|
||||
FOREIGN KEY (lair_id) REFERENCES dragon_lairs(id) ON DELETE SET NULL,
|
||||
INDEX idx_species (species_id),
|
||||
INDEX idx_lair (lair_id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
-- Adventurer's Guild
|
||||
CREATE TABLE adventurer_classes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
class_name VARCHAR(50) UNIQUE NOT NULL,
|
||||
primary_stat ENUM('strength', 'dexterity', 'intelligence', 'wisdom', 'charisma') NOT NULL,
|
||||
hit_dice INT DEFAULT 6,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
adventurer_name VARCHAR(200) NOT NULL,
|
||||
class_id INT NOT NULL,
|
||||
level INT DEFAULT 1,
|
||||
experience_points INT DEFAULT 0,
|
||||
gold_pieces INT DEFAULT 100,
|
||||
is_alive BOOLEAN DEFAULT TRUE,
|
||||
last_quest_date DATE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (class_id) REFERENCES adventurer_classes(id),
|
||||
INDEX idx_class (class_id),
|
||||
INDEX idx_level (level)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE dragon_encounters (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
dragon_id INT NOT NULL,
|
||||
adventurer_id INT NOT NULL,
|
||||
encounter_date DATETIME NOT NULL,
|
||||
outcome ENUM('fled', 'negotiated', 'fought', 'befriended') NOT NULL,
|
||||
gold_stolen INT DEFAULT 0,
|
||||
survived BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (dragon_id) REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (adventurer_id) REFERENCES adventurers(id) ON DELETE CASCADE,
|
||||
INDEX idx_dragon (dragon_id),
|
||||
INDEX idx_adventurer (adventurer_id),
|
||||
INDEX idx_date (encounter_date)
|
||||
) ENGINE=InnoDB;
|
||||
`;
|
||||
|
||||
// First, let's try with skipValidation
|
||||
const result = await fromMySQLImproved(fantasySQL, {
|
||||
skipValidation: true,
|
||||
includeWarnings: true,
|
||||
});
|
||||
|
||||
console.log('\n=== Results with skipValidation ===');
|
||||
console.log('Tables:', result.tables.length);
|
||||
console.log('Relationships:', result.relationships.length);
|
||||
console.log('Warnings:', result.warnings?.length || 0);
|
||||
|
||||
expect(result.tables.length).toBe(6);
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(5);
|
||||
|
||||
// Verify key tables
|
||||
const dragons = result.tables.find((t) => t.name === 'dragons');
|
||||
expect(dragons).toBeDefined();
|
||||
expect(
|
||||
dragons?.columns.find((c) => c.name === 'breath_weapon_power')
|
||||
).toBeDefined();
|
||||
|
||||
// Check relationships
|
||||
const dragonRelations = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'dragons'
|
||||
);
|
||||
expect(dragonRelations).toHaveLength(2); // species_id and lair_id
|
||||
});
|
||||
|
||||
it('should handle SQL with comments like PostgreSQL', async () => {
|
||||
// Use properly formatted SQL that won't break when comments are removed
|
||||
const sqlWithComments = `
|
||||
CREATE TABLE test (
|
||||
id INT PRIMARY KEY,
|
||||
data JSON, -- Example: ["value1", "value2"]
|
||||
status VARCHAR(50), -- Can be "active" or "inactive"
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -- Auto-set timestamp
|
||||
);`;
|
||||
|
||||
// This should work because comments are removed first
|
||||
const result = await fromMySQL(sqlWithComments);
|
||||
|
||||
console.log('\n=== Result ===');
|
||||
console.log('Tables:', result.tables.length);
|
||||
console.log('Columns:', result.tables[0]?.columns.length);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('test');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
|
||||
// Verify columns were parsed correctly
|
||||
const columns = result.tables[0].columns.map((c) => c.name);
|
||||
expect(columns).toContain('id');
|
||||
expect(columns).toContain('data');
|
||||
expect(columns).toContain('status');
|
||||
expect(columns).toContain('created_at');
|
||||
});
|
||||
|
||||
it('should handle SQL that may become problematic after comment removal', async () => {
|
||||
// This SQL is problematic because removing comments leaves invalid syntax
|
||||
const problematicSql = `
|
||||
CREATE TABLE test (
|
||||
id INT PRIMARY KEY,
|
||||
data JSON, -- [
|
||||
"value1",
|
||||
"value2"
|
||||
] -- This leaves broken syntax
|
||||
);`;
|
||||
|
||||
// The parser might handle this in different ways
|
||||
try {
|
||||
const result = await fromMySQL(problematicSql);
|
||||
// If it succeeds, it might have parsed partially
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(0);
|
||||
} catch (error) {
|
||||
// If it fails, that's also acceptable
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,101 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
|
||||
describe('MySQL Fix Test', () => {
|
||||
it('should parse foreign keys with comments containing commas', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE product_categories (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE packages (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
category_id INT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
badge_text VARCHAR(50), -- "Beliebt", "Empfohlen", etc.
|
||||
color_code VARCHAR(7), -- Hex-Farbe für UI
|
||||
FOREIGN KEY (category_id) REFERENCES product_categories(id)
|
||||
);`;
|
||||
|
||||
const result = await fromMySQLImproved(sql, { skipValidation: true });
|
||||
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships.length);
|
||||
result.relationships.forEach((r) => {
|
||||
console.log(
|
||||
` ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0]).toMatchObject({
|
||||
sourceTable: 'packages',
|
||||
sourceColumn: 'category_id',
|
||||
targetTable: 'product_categories',
|
||||
targetColumn: 'id',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse the actual packages table from the file', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE product_categories (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
slug VARCHAR(100) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
icon VARCHAR(255),
|
||||
sort_order INT DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Pakete (für VServer, Game-Server, Web-Hosting)
|
||||
CREATE TABLE packages (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
category_id INT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
slug VARCHAR(255) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
|
||||
-- Paket-Eigenschaften
|
||||
is_popular BOOLEAN DEFAULT FALSE,
|
||||
badge_text VARCHAR(50), -- Examples: "Beliebt", "Empfohlen", etc.
|
||||
color_code VARCHAR(7), -- Hex color for UI
|
||||
|
||||
sort_order INT DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (category_id) REFERENCES product_categories(id),
|
||||
|
||||
-- Only categories with packages: VServer(2), Game-Server(1), Web-Hosting(4)
|
||||
CHECK (category_id IN (1, 2, 4))
|
||||
);`;
|
||||
|
||||
const result = await fromMySQLImproved(sql, { skipValidation: true });
|
||||
|
||||
console.log('\nActual packages table test:');
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships.length);
|
||||
result.relationships.forEach((r) => {
|
||||
console.log(
|
||||
` ${r.sourceTable}.${r.sourceColumn} -> ${r.targetTable}.${r.targetColumn}`
|
||||
);
|
||||
});
|
||||
|
||||
const packagesRelationships = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'packages'
|
||||
);
|
||||
expect(packagesRelationships).toHaveLength(1);
|
||||
});
|
||||
});
|
@@ -0,0 +1,601 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromMySQLImproved } from '../mysql-improved';
|
||||
|
||||
describe('MySQL Integration Tests', () => {
|
||||
describe('E-Commerce Database Schema', () => {
|
||||
it('should parse a complete e-commerce database', async () => {
|
||||
const sql = `
|
||||
-- E-commerce database schema
|
||||
CREATE TABLE categories (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
parent_id INT,
|
||||
slug VARCHAR(100) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
INDEX idx_parent (parent_id),
|
||||
INDEX idx_active (is_active)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE brands (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
slug VARCHAR(100) UNIQUE NOT NULL,
|
||||
logo_url VARCHAR(500),
|
||||
website VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
sku VARCHAR(50) UNIQUE NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
brand_id INT,
|
||||
category_id INT NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
compare_at_price DECIMAL(10,2),
|
||||
cost DECIMAL(10,2),
|
||||
quantity INT DEFAULT 0,
|
||||
weight DECIMAL(8,3),
|
||||
status ENUM('active', 'draft', 'archived') DEFAULT 'draft',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (brand_id) REFERENCES brands(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id),
|
||||
INDEX idx_sku (sku),
|
||||
INDEX idx_category (category_id),
|
||||
INDEX idx_brand (brand_id),
|
||||
INDEX idx_status (status),
|
||||
FULLTEXT idx_search (name, description)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE product_images (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
product_id INT NOT NULL,
|
||||
image_url VARCHAR(500) NOT NULL,
|
||||
alt_text VARCHAR(255),
|
||||
position INT DEFAULT 0,
|
||||
is_primary BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (product_id) REFERENCES products(id) ON DELETE CASCADE,
|
||||
INDEX idx_product (product_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE customers (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
first_name VARCHAR(100),
|
||||
last_name VARCHAR(100),
|
||||
phone VARCHAR(20),
|
||||
email_verified BOOLEAN DEFAULT FALSE,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_email (email)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE addresses (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
customer_id INT NOT NULL,
|
||||
type ENUM('billing', 'shipping', 'both') DEFAULT 'both',
|
||||
first_name VARCHAR(100) NOT NULL,
|
||||
last_name VARCHAR(100) NOT NULL,
|
||||
company VARCHAR(100),
|
||||
address_line1 VARCHAR(255) NOT NULL,
|
||||
address_line2 VARCHAR(255),
|
||||
city VARCHAR(100) NOT NULL,
|
||||
state_province VARCHAR(100),
|
||||
postal_code VARCHAR(20),
|
||||
country_code CHAR(2) NOT NULL,
|
||||
phone VARCHAR(20),
|
||||
is_default BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE,
|
||||
INDEX idx_customer (customer_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE carts (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
customer_id INT,
|
||||
session_id VARCHAR(128),
|
||||
status ENUM('active', 'abandoned', 'converted') DEFAULT 'active',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
expires_at TIMESTAMP NULL,
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE SET NULL,
|
||||
INDEX idx_customer (customer_id),
|
||||
INDEX idx_session (session_id),
|
||||
INDEX idx_status (status)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE cart_items (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
cart_id INT NOT NULL,
|
||||
product_id INT NOT NULL,
|
||||
quantity INT NOT NULL DEFAULT 1,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (cart_id) REFERENCES carts(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (product_id) REFERENCES products(id),
|
||||
UNIQUE KEY uk_cart_product (cart_id, product_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE orders (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
order_number VARCHAR(50) UNIQUE NOT NULL,
|
||||
customer_id INT NOT NULL,
|
||||
billing_address_id INT NOT NULL,
|
||||
shipping_address_id INT NOT NULL,
|
||||
status ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled', 'refunded') DEFAULT 'pending',
|
||||
subtotal DECIMAL(10,2) NOT NULL,
|
||||
tax_amount DECIMAL(10,2) DEFAULT 0.00,
|
||||
shipping_amount DECIMAL(10,2) DEFAULT 0.00,
|
||||
discount_amount DECIMAL(10,2) DEFAULT 0.00,
|
||||
total_amount DECIMAL(10,2) NOT NULL,
|
||||
currency_code CHAR(3) DEFAULT 'USD',
|
||||
payment_status ENUM('pending', 'paid', 'partially_paid', 'refunded', 'failed') DEFAULT 'pending',
|
||||
notes TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id),
|
||||
FOREIGN KEY (billing_address_id) REFERENCES addresses(id),
|
||||
FOREIGN KEY (shipping_address_id) REFERENCES addresses(id),
|
||||
INDEX idx_order_number (order_number),
|
||||
INDEX idx_customer (customer_id),
|
||||
INDEX idx_status (status),
|
||||
INDEX idx_created (created_at)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE order_items (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
order_id INT NOT NULL,
|
||||
product_id INT NOT NULL,
|
||||
product_name VARCHAR(255) NOT NULL,
|
||||
product_sku VARCHAR(50) NOT NULL,
|
||||
quantity INT NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
total DECIMAL(10,2) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (order_id) REFERENCES orders(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (product_id) REFERENCES products(id),
|
||||
INDEX idx_order (order_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE payments (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
order_id INT NOT NULL,
|
||||
payment_method ENUM('credit_card', 'debit_card', 'paypal', 'stripe', 'bank_transfer') NOT NULL,
|
||||
transaction_id VARCHAR(255) UNIQUE,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
currency_code CHAR(3) DEFAULT 'USD',
|
||||
status ENUM('pending', 'processing', 'completed', 'failed', 'refunded') DEFAULT 'pending',
|
||||
gateway_response JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (order_id) REFERENCES orders(id),
|
||||
INDEX idx_order (order_id),
|
||||
INDEX idx_transaction (transaction_id),
|
||||
INDEX idx_status (status)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
product_id INT NOT NULL,
|
||||
customer_id INT NOT NULL,
|
||||
order_id INT,
|
||||
rating INT NOT NULL CHECK (rating >= 1 AND rating <= 5),
|
||||
title VARCHAR(255),
|
||||
comment TEXT,
|
||||
is_verified_purchase BOOLEAN DEFAULT FALSE,
|
||||
is_featured BOOLEAN DEFAULT FALSE,
|
||||
helpful_count INT DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (product_id) REFERENCES products(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (order_id) REFERENCES orders(id) ON DELETE SET NULL,
|
||||
UNIQUE KEY uk_product_customer (product_id, customer_id),
|
||||
INDEX idx_product (product_id),
|
||||
INDEX idx_rating (rating),
|
||||
INDEX idx_created (created_at)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE coupons (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
code VARCHAR(50) UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
discount_type ENUM('fixed', 'percentage') NOT NULL,
|
||||
discount_amount DECIMAL(10,2) NOT NULL,
|
||||
minimum_amount DECIMAL(10,2),
|
||||
usage_limit INT,
|
||||
usage_count INT DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
valid_from DATETIME NOT NULL,
|
||||
valid_until DATETIME,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_code (code),
|
||||
INDEX idx_active (is_active),
|
||||
INDEX idx_valid_dates (valid_from, valid_until)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE order_coupons (
|
||||
order_id INT NOT NULL,
|
||||
coupon_id INT NOT NULL,
|
||||
discount_amount DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, coupon_id),
|
||||
FOREIGN KEY (order_id) REFERENCES orders(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (coupon_id) REFERENCES coupons(id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
// Verify all tables are parsed
|
||||
expect(result.tables).toHaveLength(14);
|
||||
|
||||
const expectedTables = [
|
||||
'addresses',
|
||||
'brands',
|
||||
'cart_items',
|
||||
'carts',
|
||||
'categories',
|
||||
'coupons',
|
||||
'customers',
|
||||
'order_coupons',
|
||||
'order_items',
|
||||
'orders',
|
||||
'payments',
|
||||
'product_images',
|
||||
'products',
|
||||
'reviews',
|
||||
];
|
||||
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'products' &&
|
||||
r.targetTable === 'categories'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'cart_items' &&
|
||||
r.targetTable === 'products'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'orders' &&
|
||||
r.targetTable === 'customers'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check self-referencing relationship
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'categories' &&
|
||||
r.targetTable === 'categories' &&
|
||||
r.sourceColumn === 'parent_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Verify ENUMs are parsed
|
||||
const products = result.tables.find((t) => t.name === 'products');
|
||||
const statusColumn = products?.columns.find(
|
||||
(c) => c.name === 'status'
|
||||
);
|
||||
expect(statusColumn?.type).toBe('ENUM');
|
||||
|
||||
// Verify indexes
|
||||
expect(
|
||||
products?.indexes.some((idx) => idx.name === 'idx_sku')
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Social Media Platform Schema', () => {
|
||||
it('should parse a social media database with complex relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE users (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
username VARCHAR(50) UNIQUE NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
display_name VARCHAR(100),
|
||||
bio TEXT,
|
||||
avatar_url VARCHAR(500),
|
||||
cover_image_url VARCHAR(500),
|
||||
is_verified BOOLEAN DEFAULT FALSE,
|
||||
is_private BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_username (username),
|
||||
INDEX idx_email (email)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE posts (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
user_id BIGINT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
visibility ENUM('public', 'followers', 'private') DEFAULT 'public',
|
||||
reply_to_id BIGINT,
|
||||
repost_of_id BIGINT,
|
||||
like_count INT DEFAULT 0,
|
||||
reply_count INT DEFAULT 0,
|
||||
repost_count INT DEFAULT 0,
|
||||
view_count INT DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (reply_to_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (repost_of_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||
INDEX idx_user (user_id),
|
||||
INDEX idx_created (created_at),
|
||||
FULLTEXT idx_content (content)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE follows (
|
||||
follower_id BIGINT NOT NULL,
|
||||
following_id BIGINT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (follower_id, following_id),
|
||||
FOREIGN KEY (follower_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (following_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
INDEX idx_following (following_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE likes (
|
||||
user_id BIGINT NOT NULL,
|
||||
post_id BIGINT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (user_id, post_id),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||
INDEX idx_post (post_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE hashtags (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
tag VARCHAR(100) UNIQUE NOT NULL,
|
||||
post_count INT DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_tag (tag)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE post_hashtags (
|
||||
post_id BIGINT NOT NULL,
|
||||
hashtag_id INT NOT NULL,
|
||||
PRIMARY KEY (post_id, hashtag_id),
|
||||
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (hashtag_id) REFERENCES hashtags(id) ON DELETE CASCADE,
|
||||
INDEX idx_hashtag (hashtag_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
CREATE TABLE messages (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
sender_id BIGINT NOT NULL,
|
||||
recipient_id BIGINT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
is_read BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (sender_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (recipient_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
INDEX idx_recipient (recipient_id, is_read),
|
||||
INDEX idx_created (created_at)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE notifications (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
user_id BIGINT NOT NULL,
|
||||
type ENUM('like', 'follow', 'reply', 'repost', 'mention') NOT NULL,
|
||||
actor_id BIGINT NOT NULL,
|
||||
post_id BIGINT,
|
||||
is_read BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (actor_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||
INDEX idx_user_unread (user_id, is_read),
|
||||
INDEX idx_created (created_at)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(8);
|
||||
|
||||
// Check self-referencing relationships in posts
|
||||
const postRelationships = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'posts'
|
||||
);
|
||||
expect(
|
||||
postRelationships.some(
|
||||
(r) =>
|
||||
r.targetTable === 'posts' &&
|
||||
r.sourceColumn === 'reply_to_id'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
postRelationships.some(
|
||||
(r) =>
|
||||
r.targetTable === 'posts' &&
|
||||
r.sourceColumn === 'repost_of_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check many-to-many relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'follows' &&
|
||||
r.sourceColumn === 'follower_id' &&
|
||||
r.targetTable === 'users'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'follows' &&
|
||||
r.sourceColumn === 'following_id' &&
|
||||
r.targetTable === 'users'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Verify composite primary keys
|
||||
const follows = result.tables.find((t) => t.name === 'follows');
|
||||
const followerCol = follows?.columns.find(
|
||||
(c) => c.name === 'follower_id'
|
||||
);
|
||||
const followingCol = follows?.columns.find(
|
||||
(c) => c.name === 'following_id'
|
||||
);
|
||||
expect(followerCol?.primaryKey).toBe(true);
|
||||
expect(followingCol?.primaryKey).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Financial System Schema', () => {
|
||||
it('should parse a financial system with decimal precision and constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE currencies (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
code CHAR(3) UNIQUE NOT NULL,
|
||||
name VARCHAR(50) NOT NULL,
|
||||
symbol VARCHAR(5),
|
||||
decimal_places TINYINT DEFAULT 2,
|
||||
is_active BOOLEAN DEFAULT TRUE
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE accounts (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
account_number VARCHAR(20) UNIQUE NOT NULL,
|
||||
account_type ENUM('checking', 'savings', 'investment', 'credit') NOT NULL,
|
||||
currency_id INT NOT NULL,
|
||||
balance DECIMAL(19,4) DEFAULT 0.0000,
|
||||
available_balance DECIMAL(19,4) DEFAULT 0.0000,
|
||||
credit_limit DECIMAL(19,4),
|
||||
interest_rate DECIMAL(5,4),
|
||||
status ENUM('active', 'frozen', 'closed') DEFAULT 'active',
|
||||
opened_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
closed_at TIMESTAMP NULL,
|
||||
FOREIGN KEY (currency_id) REFERENCES currencies(id),
|
||||
INDEX idx_account_number (account_number),
|
||||
INDEX idx_status (status),
|
||||
CHECK (balance >= 0 OR account_type = 'credit'),
|
||||
CHECK (available_balance <= balance OR account_type = 'credit')
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE transactions (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
transaction_ref VARCHAR(50) UNIQUE NOT NULL,
|
||||
from_account_id BIGINT,
|
||||
to_account_id BIGINT,
|
||||
amount DECIMAL(19,4) NOT NULL,
|
||||
currency_id INT NOT NULL,
|
||||
type ENUM('deposit', 'withdrawal', 'transfer', 'fee', 'interest') NOT NULL,
|
||||
status ENUM('pending', 'processing', 'completed', 'failed', 'reversed') DEFAULT 'pending',
|
||||
description TEXT,
|
||||
metadata JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
processed_at TIMESTAMP NULL,
|
||||
FOREIGN KEY (from_account_id) REFERENCES accounts(id),
|
||||
FOREIGN KEY (to_account_id) REFERENCES accounts(id),
|
||||
FOREIGN KEY (currency_id) REFERENCES currencies(id),
|
||||
INDEX idx_ref (transaction_ref),
|
||||
INDEX idx_from_account (from_account_id),
|
||||
INDEX idx_to_account (to_account_id),
|
||||
INDEX idx_created (created_at),
|
||||
INDEX idx_status (status),
|
||||
CHECK (from_account_id IS NOT NULL OR to_account_id IS NOT NULL)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE exchange_rates (
|
||||
id INT PRIMARY KEY AUTO_INCREMENT,
|
||||
from_currency_id INT NOT NULL,
|
||||
to_currency_id INT NOT NULL,
|
||||
rate DECIMAL(19,10) NOT NULL,
|
||||
effective_date DATE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (from_currency_id) REFERENCES currencies(id),
|
||||
FOREIGN KEY (to_currency_id) REFERENCES currencies(id),
|
||||
UNIQUE KEY uk_currency_pair_date (from_currency_id, to_currency_id, effective_date),
|
||||
INDEX idx_effective_date (effective_date)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE audit_logs (
|
||||
id BIGINT PRIMARY KEY AUTO_INCREMENT,
|
||||
entity_type VARCHAR(50) NOT NULL,
|
||||
entity_id BIGINT NOT NULL,
|
||||
action VARCHAR(50) NOT NULL,
|
||||
user_id BIGINT,
|
||||
ip_address VARCHAR(45),
|
||||
user_agent TEXT,
|
||||
old_values JSON,
|
||||
new_values JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_entity (entity_type, entity_id),
|
||||
INDEX idx_created (created_at),
|
||||
INDEX idx_action (action)
|
||||
) ENGINE=InnoDB;
|
||||
`;
|
||||
|
||||
const result = await fromMySQLImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(5);
|
||||
|
||||
// Check decimal precision is preserved
|
||||
const accounts = result.tables.find((t) => t.name === 'accounts');
|
||||
const balanceCol = accounts?.columns.find(
|
||||
(c) => c.name === 'balance'
|
||||
);
|
||||
expect(balanceCol?.type).toBe('DECIMAL');
|
||||
|
||||
const transactionFKs = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'transactions'
|
||||
);
|
||||
|
||||
expect(
|
||||
transactionFKs.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'from_account_id' &&
|
||||
r.targetTable === 'accounts'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
transactionFKs.some(
|
||||
(r) =>
|
||||
r.sourceColumn === 'to_account_id' &&
|
||||
r.targetTable === 'accounts'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check composite unique constraint
|
||||
const exchangeRates = result.tables.find(
|
||||
(t) => t.name === 'exchange_rates'
|
||||
);
|
||||
expect(
|
||||
exchangeRates?.indexes.some(
|
||||
(idx) =>
|
||||
idx.name === 'uk_currency_pair_date' &&
|
||||
idx.unique === true
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,117 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
validateMySQLSyntax,
|
||||
formatValidationMessage,
|
||||
} from '../mysql-validator';
|
||||
|
||||
describe('MySQL Validator', () => {
|
||||
it('should pass valid MySQL after comments are removed', () => {
|
||||
// In the new flow, comments are removed before validation
|
||||
// So this SQL would have comments stripped and be valid
|
||||
const sql = `CREATE TABLE packages (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
badge_text VARCHAR(50),
|
||||
color_code VARCHAR(7)
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should validate clean SQL without comments', () => {
|
||||
// Comments would be removed before validation
|
||||
const sql = `CREATE TABLE product_vserver (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
available_os JSON
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should detect inline REFERENCES', () => {
|
||||
const sql = `CREATE TABLE users (
|
||||
id INT PRIMARY KEY,
|
||||
profile_id INT REFERENCES profiles(id)
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].code).toBe('INLINE_REFERENCES');
|
||||
expect(result.errors[0].line).toBe(3);
|
||||
});
|
||||
|
||||
it('should pass valid MySQL', () => {
|
||||
const sql = `CREATE TABLE users (
|
||||
id INT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE posts (
|
||||
id INT PRIMARY KEY,
|
||||
user_id INT NOT NULL,
|
||||
title VARCHAR(200),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should validate a fantasy-themed MySQL schema', () => {
|
||||
// Test with already sanitized SQL (comments removed)
|
||||
const sql = `
|
||||
CREATE TABLE magic_schools (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
element_type VARCHAR(50),
|
||||
forbidden_spells JSON,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
magic_school_id INT REFERENCES magic_schools(id), -- Inline REFERENCES (PostgreSQL style)
|
||||
power_level INT DEFAULT 1
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
|
||||
console.log('\n=== Fantasy Schema Validation ===');
|
||||
console.log(`Valid: ${result.isValid}`);
|
||||
console.log(`Errors: ${result.errors.length}`);
|
||||
console.log(`Warnings: ${result.warnings.length}`);
|
||||
|
||||
// Should only have inline REFERENCES error now
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.length).toBe(1);
|
||||
expect(result.errors[0].code).toBe('INLINE_REFERENCES');
|
||||
});
|
||||
|
||||
it('should format validation messages nicely', () => {
|
||||
const sql = `CREATE TABLE test (
|
||||
id INT PRIMARY KEY,
|
||||
ref_id INT REFERENCES other(id)
|
||||
);`;
|
||||
|
||||
const result = validateMySQLSyntax(sql);
|
||||
const message = formatValidationMessage(result);
|
||||
|
||||
console.log('\nFormatted validation message:');
|
||||
console.log(message);
|
||||
|
||||
expect(message).toContain('❌ MySQL/MariaDB syntax validation failed');
|
||||
expect(message).toContain('Error at line 3');
|
||||
expect(message).toContain('💡 Suggestion');
|
||||
});
|
||||
});
|
1165
src/lib/data/sql-import/dialect-importers/mysql/mysql-improved.ts
Normal file
1165
src/lib/data/sql-import/dialect-importers/mysql/mysql-improved.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,252 @@
|
||||
export interface MySQLValidationResult {
|
||||
isValid: boolean;
|
||||
errors: MySQLValidationError[];
|
||||
warnings: MySQLValidationWarning[];
|
||||
canAutoFix: boolean;
|
||||
}
|
||||
|
||||
export interface MySQLValidationError {
|
||||
line?: number;
|
||||
column?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
export interface MySQLValidationWarning {
|
||||
line?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
}
|
||||
|
||||
export function validateMySQLSyntax(sql: string): MySQLValidationResult {
|
||||
const errors: MySQLValidationError[] = [];
|
||||
const warnings: MySQLValidationWarning[] = [];
|
||||
const canAutoFix = false;
|
||||
|
||||
const lines = sql.split('\n');
|
||||
|
||||
// Check for common MySQL syntax issues
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const lineNum = i + 1;
|
||||
|
||||
// Skip comment checks if comments are already removed
|
||||
// This check is now less relevant since sanitizeSql removes comments first
|
||||
|
||||
// 2. Check for inline REFERENCES (PostgreSQL style)
|
||||
if (/\w+\s+\w+\s+(?:PRIMARY\s+KEY\s+)?REFERENCES\s+/i.test(line)) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'MySQL/MariaDB does not support inline REFERENCES in column definitions.',
|
||||
code: 'INLINE_REFERENCES',
|
||||
suggestion:
|
||||
'Use FOREIGN KEY constraint instead:\nFOREIGN KEY (column_name) REFERENCES table_name(column_name)',
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Check for missing semicolons - be more selective
|
||||
const trimmedLine = line.trim();
|
||||
// Only check if this looks like the end of a CREATE TABLE statement
|
||||
if (
|
||||
trimmedLine &&
|
||||
trimmedLine.endsWith(')') &&
|
||||
!trimmedLine.endsWith(';') &&
|
||||
!trimmedLine.endsWith(',') &&
|
||||
i + 1 < lines.length
|
||||
) {
|
||||
// Look backwards to see if this is part of a CREATE TABLE
|
||||
let isCreateTable = false;
|
||||
for (let j = i; j >= Math.max(0, i - 20); j--) {
|
||||
if (/CREATE\s+TABLE/i.test(lines[j])) {
|
||||
isCreateTable = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isCreateTable) {
|
||||
const nextLine = lines[i + 1].trim();
|
||||
// Only warn if next line starts a new statement
|
||||
if (
|
||||
nextLine &&
|
||||
nextLine.match(
|
||||
/^(CREATE|DROP|ALTER|INSERT|UPDATE|DELETE)\s+/i
|
||||
)
|
||||
) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message: 'Statement may be missing a semicolon',
|
||||
code: 'MISSING_SEMICOLON',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip JSON comment checks if comments are already removed
|
||||
|
||||
// 5. Check for common typos
|
||||
if (line.match(/FOREIGN\s+KEY\s*\(/i) && !line.includes('REFERENCES')) {
|
||||
// Check if REFERENCES is on the next line
|
||||
if (i + 1 >= lines.length || !lines[i + 1].includes('REFERENCES')) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'FOREIGN KEY constraint is missing REFERENCES clause',
|
||||
code: 'MISSING_REFERENCES',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Check for mismatched quotes - but be smart about it
|
||||
// Skip lines that are comments or contain escaped quotes
|
||||
if (!line.trim().startsWith('--') && !line.trim().startsWith('#')) {
|
||||
// Remove escaped quotes before counting
|
||||
const cleanLine = line
|
||||
.replace(/\\'/g, '')
|
||||
.replace(/\\"/g, '')
|
||||
.replace(/\\`/g, '');
|
||||
|
||||
// Also remove quoted strings to avoid false positives
|
||||
const withoutStrings = cleanLine
|
||||
.replace(/'[^']*'/g, '')
|
||||
.replace(/"[^"]*"/g, '')
|
||||
.replace(/`[^`]*`/g, '');
|
||||
|
||||
// Now count unmatched quotes
|
||||
const singleQuotes = (withoutStrings.match(/'/g) || []).length;
|
||||
const doubleQuotes = (withoutStrings.match(/"/g) || []).length;
|
||||
const backticks = (withoutStrings.match(/`/g) || []).length;
|
||||
|
||||
if (singleQuotes > 0 || doubleQuotes > 0 || backticks > 0) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message: 'Possible mismatched quotes detected',
|
||||
code: 'MISMATCHED_QUOTES',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unsupported MySQL features
|
||||
const unsupportedFeatures = [
|
||||
{ pattern: /CREATE\s+TRIGGER/i, feature: 'Triggers' },
|
||||
{ pattern: /CREATE\s+PROCEDURE/i, feature: 'Stored Procedures' },
|
||||
{ pattern: /CREATE\s+FUNCTION/i, feature: 'Functions' },
|
||||
{ pattern: /CREATE\s+EVENT/i, feature: 'Events' },
|
||||
{ pattern: /CREATE\s+VIEW/i, feature: 'Views' },
|
||||
];
|
||||
|
||||
for (const { pattern, feature } of unsupportedFeatures) {
|
||||
if (pattern.test(sql)) {
|
||||
warnings.push({
|
||||
message: `${feature} are not supported and will be ignored during import`,
|
||||
code: `UNSUPPORTED_${feature.toUpperCase().replace(' ', '_')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
canAutoFix,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Consolidate duplicate warnings and format them nicely
|
||||
*/
|
||||
function consolidateWarnings(warnings: MySQLValidationWarning[]): {
|
||||
message: string;
|
||||
count: number;
|
||||
lines?: number[];
|
||||
}[] {
|
||||
const warningMap = new Map<string, { count: number; lines: number[] }>();
|
||||
|
||||
for (const warning of warnings) {
|
||||
const key = warning.code || warning.message;
|
||||
if (!warningMap.has(key)) {
|
||||
warningMap.set(key, { count: 0, lines: [] });
|
||||
}
|
||||
const entry = warningMap.get(key)!;
|
||||
entry.count++;
|
||||
if (warning.line) {
|
||||
entry.lines.push(warning.line);
|
||||
}
|
||||
}
|
||||
|
||||
const consolidated: { message: string; count: number; lines?: number[] }[] =
|
||||
[];
|
||||
|
||||
for (const [key, value] of warningMap) {
|
||||
// Find the original warning to get the message
|
||||
const originalWarning = warnings.find(
|
||||
(w) => (w.code || w.message) === key
|
||||
)!;
|
||||
consolidated.push({
|
||||
message: originalWarning.message,
|
||||
count: value.count,
|
||||
lines: value.lines.length > 0 ? value.lines : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by count (most frequent first)
|
||||
return consolidated.sort((a, b) => b.count - a.count);
|
||||
}
|
||||
|
||||
export function formatValidationMessage(result: MySQLValidationResult): string {
|
||||
const messages: string[] = [];
|
||||
|
||||
if (!result.isValid) {
|
||||
messages.push('❌ MySQL/MariaDB syntax validation failed:\n');
|
||||
|
||||
for (const error of result.errors) {
|
||||
messages.push(
|
||||
` Error${error.line ? ` at line ${error.line}` : ''}: ${error.message}`
|
||||
);
|
||||
if (error.suggestion) {
|
||||
messages.push(` 💡 Suggestion: ${error.suggestion}`);
|
||||
}
|
||||
messages.push('');
|
||||
}
|
||||
}
|
||||
|
||||
if (result.warnings.length > 0) {
|
||||
const consolidated = consolidateWarnings(result.warnings);
|
||||
|
||||
// Only show if there are a reasonable number of warnings
|
||||
if (consolidated.length <= 5) {
|
||||
messages.push('⚠️ Import Notes:\n');
|
||||
for (const warning of consolidated) {
|
||||
if (warning.count > 1) {
|
||||
messages.push(
|
||||
` • ${warning.message} (${warning.count} occurrences)`
|
||||
);
|
||||
} else {
|
||||
messages.push(` • ${warning.message}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// For many warnings, just show a summary
|
||||
const totalWarnings = result.warnings.length;
|
||||
messages.push(
|
||||
`⚠️ Import completed with ${totalWarnings} warnings:\n`
|
||||
);
|
||||
|
||||
// Show top 3 most common warnings
|
||||
const topWarnings = consolidated.slice(0, 3);
|
||||
for (const warning of topWarnings) {
|
||||
messages.push(` • ${warning.message} (${warning.count}x)`);
|
||||
}
|
||||
|
||||
if (consolidated.length > 3) {
|
||||
messages.push(
|
||||
` • ...and ${consolidated.length - 3} other warning types`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return messages.join('\n');
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,51 @@
|
||||
# PostgreSQL Parser Tests
|
||||
|
||||
This directory contains comprehensive tests for the PostgreSQL SQL import parser.
|
||||
|
||||
## Test Files
|
||||
|
||||
- `postgresql-core.test.ts` - Core functionality tests that should always pass
|
||||
- `postgresql-parser.test.ts` - Comprehensive edge case tests (some may need adjustment based on parser limitations)
|
||||
- `postgresql-regression.test.ts` - Regression tests for specific bugs that were fixed
|
||||
- `postgresql-examples.test.ts` - Tests using real-world SQL examples
|
||||
|
||||
## Test Data
|
||||
|
||||
All test data is now embedded directly within the test files as hardcoded SQL strings. This ensures tests are self-contained and don't depend on external files.
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all PostgreSQL parser tests
|
||||
npm test src/lib/data/sql-import/dialect-importers/postgresql/__tests__
|
||||
|
||||
# Run specific test file
|
||||
npm test postgresql-core.test.ts
|
||||
|
||||
# Run tests in watch mode
|
||||
npm test -- --watch
|
||||
|
||||
# Run tests with coverage
|
||||
npm test:coverage
|
||||
```
|
||||
|
||||
## Test Coverage
|
||||
|
||||
The tests cover:
|
||||
|
||||
1. **Basic table parsing** - Simple CREATE TABLE statements
|
||||
2. **Foreign key relationships** - Both inline and table-level constraints
|
||||
3. **Complex data types** - UUID, JSONB, arrays, numeric precision
|
||||
4. **Generated columns** - IDENTITY and computed columns
|
||||
5. **Unsupported features** - Functions, triggers, policies, RLS
|
||||
6. **Edge cases** - Multi-line definitions, dollar quotes, malformed SQL
|
||||
7. **Fallback parsing** - Tables that fail AST parsing but can be extracted
|
||||
|
||||
## Adding New Tests
|
||||
|
||||
When adding new tests:
|
||||
|
||||
1. Add simple unit tests to `postgresql-core.test.ts`
|
||||
2. Add edge cases to `postgresql-parser.test.ts`
|
||||
3. Add regression tests for bugs to `postgresql-regression.test.ts`
|
||||
4. Use real SQL examples in `postgresql-examples.test.ts`
|
@@ -0,0 +1,458 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Core Parser Tests', () => {
|
||||
it('should parse basic tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should parse foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE mages (
|
||||
id INTEGER PRIMARY KEY,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
});
|
||||
|
||||
it('should skip functions with warnings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION test_func() RETURNS VOID AS $$
|
||||
BEGIN
|
||||
NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Function'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle tables that fail to parse', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE valid_table (id INTEGER PRIMARY KEY);
|
||||
|
||||
-- This table has syntax that might fail parsing
|
||||
CREATE TABLE complex_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value NUMERIC(10,
|
||||
2) GENERATED ALWAYS AS (1 + 1) STORED
|
||||
);
|
||||
|
||||
CREATE TABLE another_valid (
|
||||
id INTEGER PRIMARY KEY,
|
||||
complex_ref INTEGER REFERENCES complex_table(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find all 3 tables even if complex_table fails to parse
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'another_valid',
|
||||
'complex_table',
|
||||
'valid_table',
|
||||
]);
|
||||
|
||||
// Should still find the foreign key relationship
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'another_valid' &&
|
||||
r.targetTable === 'complex_table'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse the magical academy system fixture', async () => {
|
||||
const sql = `-- Magical Academy System Database Schema
|
||||
-- This is a test fixture representing a typical magical academy system
|
||||
|
||||
CREATE TABLE magic_schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
location text,
|
||||
crystal_frequency varchar(20),
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE magical_ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text,
|
||||
is_system boolean NOT NULL DEFAULT false,
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE spell_permissions(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
spell_school text NOT NULL,
|
||||
spell_action text NOT NULL,
|
||||
description text,
|
||||
UNIQUE (spell_school, spell_action)
|
||||
);
|
||||
|
||||
CREATE TABLE rank_permissions(
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
permission_id uuid NOT NULL REFERENCES spell_permissions(id) ON DELETE CASCADE,
|
||||
granted_at timestamptz NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (rank_id, permission_id)
|
||||
);
|
||||
|
||||
CREATE TABLE grimoire_types(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text,
|
||||
is_active boolean NOT NULL DEFAULT true
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
username text NOT NULL,
|
||||
email text NOT NULL,
|
||||
password_hash text NOT NULL,
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
is_active boolean NOT NULL DEFAULT true,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (school_id, username),
|
||||
UNIQUE (email)
|
||||
);
|
||||
|
||||
-- This function should not prevent the next table from being parsed
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM towers
|
||||
WHERE id = NEW.tower_id AND school_id = NEW.school_id
|
||||
) THEN
|
||||
RAISE EXCEPTION 'Tower does not belong to magic school';
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
assigned_at timestamptz NOT NULL DEFAULT now(),
|
||||
assigned_by uuid REFERENCES wizards(id),
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
CREATE TABLE apprentices(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id text NOT NULL, -- Magical Apprentice Identifier
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
date_of_birth date NOT NULL,
|
||||
magical_affinity varchar(10),
|
||||
email text,
|
||||
crystal_phone varchar(20),
|
||||
dormitory text,
|
||||
emergency_contact jsonb,
|
||||
patron_info jsonb,
|
||||
primary_mentor uuid REFERENCES wizards(id),
|
||||
referring_wizard uuid REFERENCES wizards(id),
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (school_id, apprentice_id)
|
||||
);
|
||||
|
||||
CREATE TABLE spell_lessons(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
lesson_date timestamptz NOT NULL,
|
||||
duration_minutes integer NOT NULL DEFAULT 30,
|
||||
status text NOT NULL DEFAULT 'scheduled',
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
CONSTRAINT valid_status CHECK (status IN ('scheduled', 'confirmed', 'in_progress', 'completed', 'cancelled', 'no_show'))
|
||||
);
|
||||
|
||||
CREATE TABLE grimoires(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
lesson_id uuid REFERENCES spell_lessons(id),
|
||||
grimoire_type_id uuid NOT NULL REFERENCES grimoire_types(id),
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
content jsonb NOT NULL,
|
||||
enchantments jsonb,
|
||||
is_sealed boolean NOT NULL DEFAULT false,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_scrolls(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
scroll_number text NOT NULL,
|
||||
scroll_date date NOT NULL DEFAULT CURRENT_DATE,
|
||||
due_date date NOT NULL,
|
||||
subtotal numeric(10,2) NOT NULL,
|
||||
magical_tax numeric(10,2) NOT NULL DEFAULT 0,
|
||||
scholarship_amount numeric(10,2) NOT NULL DEFAULT 0,
|
||||
total_gold numeric(10,2) NOT NULL,
|
||||
status text NOT NULL DEFAULT 'draft',
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
UNIQUE (school_id, scroll_number),
|
||||
CONSTRAINT valid_scroll_status CHECK (status IN ('draft', 'sent', 'paid', 'overdue', 'cancelled'))
|
||||
);
|
||||
|
||||
CREATE TABLE scroll_line_items(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
description text NOT NULL,
|
||||
quantity numeric(10,2) NOT NULL DEFAULT 1,
|
||||
gold_per_unit numeric(10,2) NOT NULL,
|
||||
total_gold numeric(10,2) NOT NULL,
|
||||
lesson_id uuid REFERENCES spell_lessons(id),
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE patron_sponsorships(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
patron_house text NOT NULL,
|
||||
sponsorship_code text NOT NULL,
|
||||
claim_number text NOT NULL,
|
||||
claim_date date NOT NULL DEFAULT CURRENT_DATE,
|
||||
gold_requested numeric(10,2) NOT NULL,
|
||||
gold_approved numeric(10,2),
|
||||
status text NOT NULL DEFAULT 'submitted',
|
||||
denial_reason text,
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (claim_number),
|
||||
CONSTRAINT valid_sponsorship_status CHECK (status IN ('draft', 'submitted', 'in_review', 'approved', 'partial', 'denied', 'appealed'))
|
||||
);
|
||||
|
||||
CREATE TABLE gold_payments(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
payment_date timestamptz NOT NULL DEFAULT now(),
|
||||
gold_amount numeric(10,2) NOT NULL,
|
||||
payment_method text NOT NULL,
|
||||
reference_rune text,
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
CONSTRAINT valid_payment_method CHECK (payment_method IN ('gold_coins', 'crystal_transfer', 'mithril_card', 'dragon_scale', 'patron_sponsorship', 'other'))
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
tower_id uuid,
|
||||
table_name text NOT NULL,
|
||||
record_id uuid,
|
||||
spell_operation text NOT NULL,
|
||||
old_values jsonb,
|
||||
new_values jsonb,
|
||||
casting_source inet,
|
||||
magical_signature text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
FOREIGN KEY (school_id) REFERENCES magic_schools(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE SET NULL,
|
||||
CONSTRAINT valid_spell_operation CHECK (spell_operation IN ('INSERT', 'UPDATE', 'DELETE'))
|
||||
);
|
||||
|
||||
-- Enable Row Level Security
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE apprentices ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE grimoires ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE spell_lessons ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE tuition_scrolls ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create RLS Policies
|
||||
CREATE POLICY school_isolation_wizards ON wizards
|
||||
FOR ALL TO authenticated
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
|
||||
CREATE POLICY school_isolation_apprentices ON apprentices
|
||||
FOR ALL TO authenticated
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
|
||||
-- Create arcane audit trigger function
|
||||
CREATE FUNCTION arcane_audit_trigger()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO arcane_logs (
|
||||
school_id,
|
||||
wizard_id,
|
||||
tower_id,
|
||||
table_name,
|
||||
record_id,
|
||||
spell_operation,
|
||||
old_values,
|
||||
new_values
|
||||
) VALUES (
|
||||
current_setting('app.current_school', true)::uuid,
|
||||
current_setting('app.current_wizard', true)::uuid,
|
||||
current_setting('app.current_tower', true)::uuid,
|
||||
TG_TABLE_NAME,
|
||||
COALESCE(NEW.id, OLD.id),
|
||||
TG_OP,
|
||||
CASE WHEN TG_OP IN ('UPDATE', 'DELETE') THEN to_jsonb(OLD) ELSE NULL END,
|
||||
CASE WHEN TG_OP IN ('INSERT', 'UPDATE') THEN to_jsonb(NEW) ELSE NULL END
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create triggers
|
||||
CREATE TRIGGER arcane_audit_wizards AFTER INSERT OR UPDATE OR DELETE ON wizards
|
||||
FOR EACH ROW EXECUTE FUNCTION arcane_audit_trigger();
|
||||
|
||||
CREATE TRIGGER arcane_audit_apprentices AFTER INSERT OR UPDATE OR DELETE ON apprentices
|
||||
FOR EACH ROW EXECUTE FUNCTION arcane_audit_trigger();`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
expect(result.tables).toHaveLength(16);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'magic_schools',
|
||||
'magical_ranks',
|
||||
'patron_sponsorships',
|
||||
'rank_permissions',
|
||||
'scroll_line_items',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(tableNames).toEqual(expectedTables);
|
||||
|
||||
// Should have many relationships
|
||||
expect(result.relationships.length).toBeGreaterThan(30);
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify specific critical relationships exist
|
||||
const hasWizardSchoolFK = result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'magic_schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
);
|
||||
expect(hasWizardSchoolFK).toBe(true);
|
||||
|
||||
const hasApprenticeMentorFK = result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
);
|
||||
expect(hasApprenticeMentorFK).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ENABLE ROW LEVEL SECURITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE secure_table (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE secure_table ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
// The warning should mention row level security
|
||||
expect(
|
||||
result.warnings!.some((w) =>
|
||||
w.toLowerCase().includes('row level security')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should extract foreign keys even from unparsed tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE base (id UUID PRIMARY KEY);
|
||||
|
||||
-- Intentionally malformed to fail parsing
|
||||
CREATE TABLE malformed (
|
||||
id UUID PRIMARY KEY,
|
||||
base_id UUID REFERENCES base(id),
|
||||
FOREIGN KEY (base_id) REFERENCES base(id) ON DELETE CASCADE,
|
||||
value NUMERIC(10,
|
||||
2) -- Missing closing paren will cause parse failure
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should still create the table entry
|
||||
expect(result.tables.map((t) => t.name)).toContain('malformed');
|
||||
|
||||
// Should extract the foreign key
|
||||
const fks = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'malformed'
|
||||
);
|
||||
expect(fks.length).toBeGreaterThan(0);
|
||||
expect(fks[0].targetTable).toBe('base');
|
||||
});
|
||||
});
|
@@ -0,0 +1,330 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Real-World Examples', () => {
|
||||
describe('Magical Academy Example', () => {
|
||||
it('should parse the magical academy example with all 16 tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spell_permissions(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
spell_type text NOT NULL,
|
||||
casting_level text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rank_spell_permissions(
|
||||
rank_id uuid NOT NULL REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
spell_permission_id uuid NOT NULL REFERENCES spell_permissions(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (rank_id, spell_permission_id)
|
||||
);
|
||||
|
||||
CREATE TABLE grimoire_types(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
wizard_name text NOT NULL,
|
||||
email text NOT NULL,
|
||||
UNIQUE (school_id, wizard_name)
|
||||
);
|
||||
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Function body
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
assigned_at timestamptz NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
CREATE TABLE apprentices(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
enrollment_date date NOT NULL,
|
||||
primary_mentor uuid REFERENCES wizards(id),
|
||||
sponsoring_wizard uuid REFERENCES wizards(id)
|
||||
);
|
||||
|
||||
CREATE TABLE spell_lessons(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
lesson_date timestamptz NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE grimoires(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
grimoire_type_id uuid NOT NULL REFERENCES grimoire_types(id),
|
||||
author_wizard_id uuid NOT NULL REFERENCES wizards(id),
|
||||
content jsonb NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_scrolls(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
total_amount numeric(10,2) NOT NULL,
|
||||
status text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_items(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
description text NOT NULL,
|
||||
amount numeric(10,2) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE patron_sponsorships(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
patron_house text NOT NULL,
|
||||
sponsorship_code text NOT NULL,
|
||||
status text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE gold_payments(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
amount numeric(10,2) NOT NULL,
|
||||
payment_date timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
tower_id uuid,
|
||||
table_name text NOT NULL,
|
||||
operation text NOT NULL,
|
||||
record_id uuid,
|
||||
changes jsonb,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Enable RLS
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE apprentices ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create policies
|
||||
CREATE POLICY school_isolation ON wizards
|
||||
FOR ALL TO public
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'patron_sponsorships',
|
||||
'rank_spell_permissions',
|
||||
'ranks',
|
||||
'schools',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_items',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables).toHaveLength(16);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships exist
|
||||
const relationships = result.relationships;
|
||||
|
||||
// Check some critical relationships
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizard_ranks' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'wizard_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have warnings about functions, policies, and RLS
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enchanted Bazaar Example', () => {
|
||||
it('should parse the enchanted bazaar example with functions and policies', async () => {
|
||||
const sql = `
|
||||
-- Enchanted Bazaar tables with complex features
|
||||
CREATE TABLE merchants(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE artifacts(
|
||||
id SERIAL PRIMARY KEY,
|
||||
merchant_id INTEGER REFERENCES merchants(id) ON DELETE CASCADE,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL CHECK (price >= 0),
|
||||
enchantment_charges INTEGER DEFAULT 0 CHECK (enchantment_charges >= 0)
|
||||
);
|
||||
|
||||
-- Function that should be skipped
|
||||
CREATE FUNCTION consume_charges(artifact_id INTEGER, charges_used INTEGER)
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET enchantment_charges = enchantment_charges - charges_used WHERE id = artifact_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE trades(
|
||||
id SERIAL PRIMARY KEY,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(50) DEFAULT 'negotiating'
|
||||
);
|
||||
|
||||
CREATE TABLE trade_items(
|
||||
trade_id INTEGER REFERENCES trades(id) ON DELETE CASCADE,
|
||||
artifact_id INTEGER REFERENCES artifacts(id),
|
||||
quantity INTEGER NOT NULL CHECK (quantity > 0),
|
||||
agreed_price DECIMAL(10, 2) NOT NULL,
|
||||
PRIMARY KEY (trade_id, artifact_id)
|
||||
);
|
||||
|
||||
-- Enable RLS
|
||||
ALTER TABLE artifacts ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create policy
|
||||
CREATE POLICY merchant_artifacts ON artifacts
|
||||
FOR ALL TO merchants
|
||||
USING (merchant_id = current_user_id());
|
||||
|
||||
-- Create trigger
|
||||
CREATE TRIGGER charge_consumption_trigger
|
||||
AFTER INSERT ON trade_items
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION consume_charges();
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should parse all tables despite functions, policies, and triggers
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
// Check for specific tables
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('merchants');
|
||||
expect(tableNames).toContain('artifacts');
|
||||
expect(tableNames).toContain('trades');
|
||||
expect(tableNames).toContain('trade_items');
|
||||
|
||||
// Check relationships
|
||||
if (tableNames.includes('marketplace_tokens')) {
|
||||
// Real file relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'marketplace_listings' &&
|
||||
r.targetTable === 'inventory_items'
|
||||
)
|
||||
).toBe(true);
|
||||
} else {
|
||||
// Mock data relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'artifacts' &&
|
||||
r.targetTable === 'merchants'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'trade_items' &&
|
||||
r.targetTable === 'trades'
|
||||
)
|
||||
).toBe(true);
|
||||
}
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
if (result.warnings) {
|
||||
expect(
|
||||
result.warnings.some(
|
||||
(w) =>
|
||||
w.includes('Function') ||
|
||||
w.includes('Policy') ||
|
||||
w.includes('Trigger') ||
|
||||
w.includes('ROW LEVEL SECURITY')
|
||||
)
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,145 @@
|
||||
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
import * as improvedModule from '../postgresql-improved';
|
||||
|
||||
// Spy on the improved parser
|
||||
const fromPostgresImprovedSpy = vi.spyOn(
|
||||
improvedModule,
|
||||
'fromPostgresImproved'
|
||||
);
|
||||
|
||||
describe('PostgreSQL Parser Integration', () => {
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should use standard parser for simple SQL', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
|
||||
// Should NOT use improved parser for simple SQL
|
||||
expect(fromPostgresImprovedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fall back to improved parser when functions are present', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION get_wizard() RETURNS INTEGER AS $$
|
||||
BEGIN
|
||||
RETURN 1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
|
||||
// Should use improved parser when functions are detected
|
||||
expect(fromPostgresImprovedSpy).toHaveBeenCalledWith(sql);
|
||||
});
|
||||
|
||||
it('should fall back to improved parser when policies are present', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_scrolls (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE POLICY wizard_policy ON ancient_scrolls
|
||||
FOR SELECT
|
||||
USING (true);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
// Should use improved parser when policies are detected
|
||||
expect(fromPostgresImprovedSpy).toHaveBeenCalledWith(sql);
|
||||
});
|
||||
|
||||
it('should fall back to improved parser when RLS is present', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_vault (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE enchanted_vault ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
// Should use improved parser when RLS is detected
|
||||
expect(fromPostgresImprovedSpy).toHaveBeenCalledWith(sql);
|
||||
});
|
||||
|
||||
it('should fall back to improved parser when triggers are present', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_log (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TRIGGER spell_trigger
|
||||
AFTER INSERT ON spell_log
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION spell_func();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
// Should use improved parser when triggers are detected
|
||||
expect(fromPostgresImprovedSpy).toHaveBeenCalledWith(sql);
|
||||
});
|
||||
|
||||
it('should preserve all relationships when using improved parser', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
|
||||
-- This function should trigger improved parser
|
||||
CREATE FUNCTION dummy() RETURNS VOID AS $$ BEGIN END; $$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE quests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id),
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(3);
|
||||
|
||||
// Verify all relationships are preserved
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'wizards' && r.targetTable === 'guilds'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'quests' && r.targetTable === 'wizards'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'quests' && r.targetTable === 'guilds'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have used improved parser
|
||||
expect(fromPostgresImprovedSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
@@ -0,0 +1,491 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Parser', () => {
|
||||
describe('Basic Table Parsing', () => {
|
||||
it('should parse simple tables with basic data types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
magic_email TEXT UNIQUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
expect(result.tables[0].columns[0].name).toBe('id');
|
||||
expect(result.tables[0].columns[0].type).toBe('INTEGER');
|
||||
expect(result.tables[0].columns[0].primaryKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse multiple tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE mages (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'guilds',
|
||||
'mages',
|
||||
]);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
});
|
||||
|
||||
it('should handle IF NOT EXISTS clause', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE IF NOT EXISTS potions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('potions');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Data Types', () => {
|
||||
it('should handle UUID and special PostgreSQL types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE special_types (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
data JSONB,
|
||||
tags TEXT[],
|
||||
location POINT,
|
||||
mana_cost MONEY,
|
||||
binary_data BYTEA
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.find((c) => c.name === 'id')?.type).toBe('UUID');
|
||||
expect(columns.find((c) => c.name === 'data')?.type).toBe('JSONB');
|
||||
expect(columns.find((c) => c.name === 'tags')?.type).toBe('TEXT[]');
|
||||
});
|
||||
|
||||
it('should handle numeric with precision', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE treasury (
|
||||
id SERIAL PRIMARY KEY,
|
||||
amount NUMERIC(10, 2),
|
||||
percentage DECIMAL(5, 2),
|
||||
big_number BIGINT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
// Parser limitation: scale on separate line is not captured
|
||||
const amountType = columns.find((c) => c.name === 'amount')?.type;
|
||||
expect(amountType).toMatch(/^NUMERIC/);
|
||||
});
|
||||
|
||||
it('should handle multi-line numeric definitions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE multi_line (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value NUMERIC(10,
|
||||
2),
|
||||
another_col TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Foreign Key Relationships', () => {
|
||||
it('should parse inline foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE realms (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE sanctuaries (
|
||||
id INTEGER PRIMARY KEY,
|
||||
realm_id INTEGER REFERENCES realms(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('sanctuaries');
|
||||
expect(result.relationships[0].targetTable).toBe('realms');
|
||||
expect(result.relationships[0].sourceColumn).toBe('realm_id');
|
||||
expect(result.relationships[0].targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse table-level foreign key constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantment_orders (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE enchantment_items (
|
||||
id INTEGER PRIMARY KEY,
|
||||
order_id INTEGER,
|
||||
CONSTRAINT fk_order FOREIGN KEY (order_id) REFERENCES enchantment_orders(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe(
|
||||
'enchantment_items'
|
||||
);
|
||||
expect(result.relationships[0].targetTable).toBe(
|
||||
'enchantment_orders'
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse composite foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magic_schools (id UUID PRIMARY KEY);
|
||||
CREATE TABLE quests (
|
||||
school_id UUID,
|
||||
quest_id UUID,
|
||||
name TEXT,
|
||||
PRIMARY KEY (school_id, quest_id),
|
||||
FOREIGN KEY (school_id) REFERENCES magic_schools(id)
|
||||
);
|
||||
CREATE TABLE rituals (
|
||||
id UUID PRIMARY KEY,
|
||||
school_id UUID,
|
||||
quest_id UUID,
|
||||
FOREIGN KEY (school_id, quest_id) REFERENCES quests(school_id, quest_id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
// Composite foreign keys are not fully supported
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('quests');
|
||||
expect(result.relationships[0].targetTable).toBe('magic_schools');
|
||||
});
|
||||
|
||||
it('should handle ON DELETE and ON UPDATE clauses', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE scrolls (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
// ON DELETE/UPDATE clauses are not preserved in output
|
||||
});
|
||||
});
|
||||
|
||||
describe('Constraints', () => {
|
||||
it('should parse unique constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
magic_email TEXT UNIQUE,
|
||||
wizard_name TEXT,
|
||||
UNIQUE (wizard_name)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.find((c) => c.name === 'magic_email')?.unique).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse check constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
mana_cost DECIMAL CHECK (mana_cost > 0),
|
||||
quantity INTEGER,
|
||||
CONSTRAINT positive_quantity CHECK (quantity >= 0)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should parse composite primary keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantment_items (
|
||||
order_id INTEGER,
|
||||
potion_id INTEGER,
|
||||
quantity INTEGER,
|
||||
PRIMARY KEY (order_id, potion_id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.filter((c) => c.primaryKey)).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Generated Columns', () => {
|
||||
it('should handle GENERATED ALWAYS AS IDENTITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns[0].increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle GENERATED BY DEFAULT AS IDENTITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns[0].increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle computed columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE calculations (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value1 NUMERIC,
|
||||
value2 NUMERIC,
|
||||
total NUMERIC GENERATED ALWAYS AS (value1 + value2) STORED
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unsupported Statements', () => {
|
||||
it('should skip and warn about functions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION get_wizard_name(wizard_id INTEGER)
|
||||
RETURNS TEXT AS $$
|
||||
BEGIN
|
||||
RETURN 'test';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE scrolls (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Function'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about triggers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_audit_log (id SERIAL PRIMARY KEY);
|
||||
|
||||
CREATE TRIGGER spell_audit_trigger
|
||||
AFTER INSERT ON spell_audit_log
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION spell_audit_function();
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Trigger'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about policies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE arcane_secrets (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE POLICY wizard_policy ON arcane_secrets
|
||||
FOR SELECT
|
||||
TO public
|
||||
USING (true);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Policy'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about RLS', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_vault (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE enchanted_vault ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(
|
||||
result.warnings!.some((w) =>
|
||||
w.toLowerCase().includes('row level security')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle tables after failed function parsing', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE before_enchantment (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION complex_spell()
|
||||
RETURNS TABLE(id INTEGER, name TEXT) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY SELECT 1, 'test';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE after_enchantment (
|
||||
id INTEGER PRIMARY KEY,
|
||||
ref_id INTEGER REFERENCES before_enchantment(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'after_enchantment',
|
||||
'before_enchantment',
|
||||
]);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle empty or null input', async () => {
|
||||
const result1 = await fromPostgresImproved('');
|
||||
expect(result1.tables).toHaveLength(0);
|
||||
expect(result1.relationships).toHaveLength(0);
|
||||
|
||||
const result2 = await fromPostgresImproved(' \n ');
|
||||
expect(result2.tables).toHaveLength(0);
|
||||
expect(result2.relationships).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle comments in various positions', async () => {
|
||||
const sql = `
|
||||
-- This is a comment
|
||||
CREATE TABLE /* inline comment */ wizards (
|
||||
id INTEGER PRIMARY KEY, -- end of line comment
|
||||
/* multi-line
|
||||
comment */
|
||||
name TEXT
|
||||
);
|
||||
-- Another comment
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle dollar-quoted strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_messages (
|
||||
id INTEGER PRIMARY KEY,
|
||||
template TEXT DEFAULT $tag$Hello, 'world'!$tag$,
|
||||
content TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Foreign Key Extraction from Unparsed Tables', () => {
|
||||
it('should extract foreign keys from tables that fail to parse', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_artifact (id UUID PRIMARY KEY);
|
||||
|
||||
-- This table has syntax that might fail parsing
|
||||
CREATE TABLE mystical_formula (
|
||||
id UUID PRIMARY KEY,
|
||||
artifact_ref UUID REFERENCES ancient_artifact(id),
|
||||
value NUMERIC(10,
|
||||
2) GENERATED ALWAYS AS (1 + 1) STORED,
|
||||
FOREIGN KEY (artifact_ref) REFERENCES ancient_artifact(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE enchanted_relic (
|
||||
id UUID PRIMARY KEY,
|
||||
formula_ref UUID REFERENCES mystical_formula(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
// Should find foreign keys even if mystical_formula fails to parse
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,199 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Parser Regression Tests', () => {
|
||||
it('should parse all 16 tables from the magical academy example', async () => {
|
||||
// This is a regression test for the issue where 3 tables were missing
|
||||
const sql = `
|
||||
-- Core tables
|
||||
CREATE TABLE magic_schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
wizard_name text NOT NULL,
|
||||
magic_email text NOT NULL,
|
||||
UNIQUE (school_id, wizard_name)
|
||||
);
|
||||
|
||||
-- This function should not prevent the wizards table from being parsed
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
-- Another function that should be skipped
|
||||
CREATE FUNCTION another_function() RETURNS void AS $$
|
||||
BEGIN
|
||||
-- Do nothing
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE magical_ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
-- Row level security should not break parsing
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE TABLE spell_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
action text NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find all 6 tables
|
||||
expect(result.tables).toHaveLength(6);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'magic_schools',
|
||||
'magical_ranks',
|
||||
'spell_logs',
|
||||
'towers',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
]);
|
||||
|
||||
if (result.warnings) {
|
||||
expect(result.warnings.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.warnings.some(
|
||||
(w) => w.includes('Function') || w.includes('security')
|
||||
)
|
||||
).toBe(true);
|
||||
} else {
|
||||
expect(result.tables).toHaveLength(6);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle tables with complex syntax that fail parsing', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE simple_table (
|
||||
id uuid PRIMARY KEY,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
-- This table has complex syntax that might fail parsing
|
||||
CREATE TABLE complex_table (
|
||||
id uuid PRIMARY KEY,
|
||||
value numeric(10,
|
||||
2), -- Multi-line numeric
|
||||
computed numeric(5,2) GENERATED ALWAYS AS (value * 2) STORED,
|
||||
UNIQUE (id, value)
|
||||
);
|
||||
|
||||
CREATE TABLE another_table (
|
||||
id uuid PRIMARY KEY,
|
||||
complex_id uuid REFERENCES complex_table(id),
|
||||
simple_id uuid REFERENCES simple_table(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find all 3 tables even if complex_table fails to parse
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'another_table',
|
||||
'complex_table',
|
||||
'simple_table',
|
||||
]);
|
||||
|
||||
// Should extract foreign keys even from unparsed tables
|
||||
const fksFromAnother = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'another_table'
|
||||
);
|
||||
expect(fksFromAnother).toHaveLength(2);
|
||||
expect(
|
||||
fksFromAnother.some((fk) => fk.targetTable === 'complex_table')
|
||||
).toBe(true);
|
||||
expect(
|
||||
fksFromAnother.some((fk) => fk.targetTable === 'simple_table')
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should count relationships correctly for multi-tenant system', async () => {
|
||||
// Simplified version focusing on relationship counting
|
||||
const sql = `
|
||||
CREATE TABLE tenants(id uuid PRIMARY KEY);
|
||||
CREATE TABLE branches(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE roles(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE permissions(id uuid PRIMARY KEY);
|
||||
CREATE TABLE role_permissions(
|
||||
role_id uuid NOT NULL REFERENCES roles(id),
|
||||
permission_id uuid NOT NULL REFERENCES permissions(id),
|
||||
PRIMARY KEY (role_id, permission_id)
|
||||
);
|
||||
CREATE TABLE record_types(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE users(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id)
|
||||
);
|
||||
CREATE TABLE user_roles(
|
||||
user_id uuid NOT NULL REFERENCES users(id),
|
||||
role_id uuid NOT NULL REFERENCES roles(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id),
|
||||
PRIMARY KEY (user_id, role_id, branch_id)
|
||||
);
|
||||
CREATE TABLE patients(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id),
|
||||
primary_physician uuid REFERENCES users(id),
|
||||
referring_physician uuid REFERENCES users(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Count expected relationships:
|
||||
// branches: 1 (tenant_id -> tenants)
|
||||
// roles: 1 (tenant_id -> tenants)
|
||||
// role_permissions: 2 (role_id -> roles, permission_id -> permissions)
|
||||
// record_types: 1 (tenant_id -> tenants)
|
||||
// users: 2 (tenant_id -> tenants, branch_id -> branches)
|
||||
// user_roles: 3 (user_id -> users, role_id -> roles, branch_id -> branches)
|
||||
// patients: 4 (tenant_id -> tenants, branch_id -> branches, primary_physician -> users, referring_physician -> users)
|
||||
// Total: 14
|
||||
|
||||
expect(result.relationships).toHaveLength(14);
|
||||
});
|
||||
});
|
@@ -0,0 +1,149 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Activities table import - PostgreSQL specific types', () => {
|
||||
it('should correctly parse the activities table with PostgreSQL-specific types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE public.activities (
|
||||
id serial4 NOT NULL,
|
||||
user_id int4 NOT NULL,
|
||||
workflow_id int4 NULL,
|
||||
task_id int4 NULL,
|
||||
"action" character varying(50) NOT NULL,
|
||||
description text NOT NULL,
|
||||
created_at timestamp DEFAULT now() NOT NULL,
|
||||
is_read bool DEFAULT false NOT NULL,
|
||||
CONSTRAINT activities_pkey PRIMARY KEY (id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.name).toBe('activities');
|
||||
expect(table.columns).toHaveLength(8);
|
||||
|
||||
// Check each column
|
||||
const columns = table.columns;
|
||||
|
||||
// id column - serial4 should become INTEGER with auto-increment
|
||||
const idCol = columns.find((c) => c.name === 'id');
|
||||
expect(idCol).toBeDefined();
|
||||
expect(idCol?.type).toBe('INTEGER');
|
||||
expect(idCol?.primaryKey).toBe(true);
|
||||
expect(idCol?.increment).toBe(true);
|
||||
expect(idCol?.nullable).toBe(false);
|
||||
|
||||
// user_id column - int4 should become INTEGER
|
||||
const userIdCol = columns.find((c) => c.name === 'user_id');
|
||||
expect(userIdCol).toBeDefined();
|
||||
expect(userIdCol?.type).toBe('INTEGER');
|
||||
expect(userIdCol?.nullable).toBe(false);
|
||||
|
||||
// workflow_id column - int4 NULL
|
||||
const workflowIdCol = columns.find((c) => c.name === 'workflow_id');
|
||||
expect(workflowIdCol).toBeDefined();
|
||||
expect(workflowIdCol?.type).toBe('INTEGER');
|
||||
expect(workflowIdCol?.nullable).toBe(true);
|
||||
|
||||
// task_id column - int4 NULL
|
||||
const taskIdCol = columns.find((c) => c.name === 'task_id');
|
||||
expect(taskIdCol).toBeDefined();
|
||||
expect(taskIdCol?.type).toBe('INTEGER');
|
||||
expect(taskIdCol?.nullable).toBe(true);
|
||||
|
||||
// action column - character varying(50)
|
||||
const actionCol = columns.find((c) => c.name === 'action');
|
||||
expect(actionCol).toBeDefined();
|
||||
expect(actionCol?.type).toBe('VARCHAR(50)');
|
||||
expect(actionCol?.nullable).toBe(false);
|
||||
|
||||
// description column - text
|
||||
const descriptionCol = columns.find((c) => c.name === 'description');
|
||||
expect(descriptionCol).toBeDefined();
|
||||
expect(descriptionCol?.type).toBe('TEXT');
|
||||
expect(descriptionCol?.nullable).toBe(false);
|
||||
|
||||
// created_at column - timestamp with default
|
||||
const createdAtCol = columns.find((c) => c.name === 'created_at');
|
||||
expect(createdAtCol).toBeDefined();
|
||||
expect(createdAtCol?.type).toBe('TIMESTAMP');
|
||||
expect(createdAtCol?.nullable).toBe(false);
|
||||
expect(createdAtCol?.default).toContain('NOW');
|
||||
|
||||
// is_read column - bool with default
|
||||
const isReadCol = columns.find((c) => c.name === 'is_read');
|
||||
expect(isReadCol).toBeDefined();
|
||||
expect(isReadCol?.type).toBe('BOOLEAN');
|
||||
expect(isReadCol?.nullable).toBe(false);
|
||||
expect(isReadCol?.default).toBe('FALSE');
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL type aliases correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE type_test (
|
||||
id serial4,
|
||||
small_id serial2,
|
||||
big_id serial8,
|
||||
int_col int4,
|
||||
small_int smallint,
|
||||
big_int int8,
|
||||
bool_col bool,
|
||||
boolean_col boolean,
|
||||
varchar_col character varying(100),
|
||||
char_col character(10),
|
||||
text_col text,
|
||||
timestamp_col timestamp,
|
||||
timestamptz_col timestamptz,
|
||||
date_col date,
|
||||
time_col time,
|
||||
json_col json,
|
||||
jsonb_col jsonb
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
const table = result.tables[0];
|
||||
const cols = table.columns;
|
||||
|
||||
// Check serial types
|
||||
expect(cols.find((c) => c.name === 'id')?.type).toBe('INTEGER');
|
||||
expect(cols.find((c) => c.name === 'id')?.increment).toBe(true);
|
||||
expect(cols.find((c) => c.name === 'small_id')?.type).toBe('SMALLINT');
|
||||
expect(cols.find((c) => c.name === 'small_id')?.increment).toBe(true);
|
||||
expect(cols.find((c) => c.name === 'big_id')?.type).toBe('BIGINT');
|
||||
expect(cols.find((c) => c.name === 'big_id')?.increment).toBe(true);
|
||||
|
||||
// Check integer types
|
||||
expect(cols.find((c) => c.name === 'int_col')?.type).toBe('INTEGER');
|
||||
expect(cols.find((c) => c.name === 'small_int')?.type).toBe('SMALLINT');
|
||||
expect(cols.find((c) => c.name === 'big_int')?.type).toBe('BIGINT');
|
||||
|
||||
// Check boolean types
|
||||
expect(cols.find((c) => c.name === 'bool_col')?.type).toBe('BOOLEAN');
|
||||
expect(cols.find((c) => c.name === 'boolean_col')?.type).toBe(
|
||||
'BOOLEAN'
|
||||
);
|
||||
|
||||
// Check string types
|
||||
expect(cols.find((c) => c.name === 'varchar_col')?.type).toBe(
|
||||
'VARCHAR(100)'
|
||||
);
|
||||
expect(cols.find((c) => c.name === 'char_col')?.type).toBe('CHAR(10)');
|
||||
expect(cols.find((c) => c.name === 'text_col')?.type).toBe('TEXT');
|
||||
|
||||
// Check timestamp types
|
||||
expect(cols.find((c) => c.name === 'timestamp_col')?.type).toBe(
|
||||
'TIMESTAMP'
|
||||
);
|
||||
expect(cols.find((c) => c.name === 'timestamptz_col')?.type).toBe(
|
||||
'TIMESTAMPTZ'
|
||||
);
|
||||
|
||||
// Check other types
|
||||
expect(cols.find((c) => c.name === 'date_col')?.type).toBe('DATE');
|
||||
expect(cols.find((c) => c.name === 'time_col')?.type).toBe('TIME');
|
||||
expect(cols.find((c) => c.name === 'json_col')?.type).toBe('JSON');
|
||||
expect(cols.find((c) => c.name === 'jsonb_col')?.type).toBe('JSONB');
|
||||
});
|
||||
});
|
@@ -0,0 +1,307 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('ALTER TABLE FOREIGN KEY parsing with fallback', () => {
|
||||
it('should parse foreign keys from ALTER TABLE ONLY statements with DEFERRABLE', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "public"."wizard" (
|
||||
"id" bigint NOT NULL,
|
||||
"name" character varying(255) NOT NULL,
|
||||
CONSTRAINT "wizard_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."spellbook" (
|
||||
"id" integer NOT NULL,
|
||||
"wizard_id" bigint NOT NULL,
|
||||
"title" character varying(254) NOT NULL,
|
||||
CONSTRAINT "spellbook_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE ONLY "public"."spellbook" ADD CONSTRAINT "spellbook_wizard_id_fk" FOREIGN KEY (wizard_id) REFERENCES wizard(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('spellbook');
|
||||
expect(fk.targetTable).toBe('wizard');
|
||||
expect(fk.sourceColumn).toBe('wizard_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
expect(fk.name).toBe('spellbook_wizard_id_fk');
|
||||
});
|
||||
|
||||
it('should parse foreign keys without schema qualification', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dragon_rider (
|
||||
id UUID PRIMARY KEY,
|
||||
rider_name VARCHAR(100) NOT NULL,
|
||||
dragon_id UUID NOT NULL
|
||||
);
|
||||
|
||||
-- Without ONLY keyword and without schema
|
||||
ALTER TABLE dragon_rider ADD CONSTRAINT dragon_rider_dragon_fk FOREIGN KEY (dragon_id) REFERENCES dragon(id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('dragon_rider');
|
||||
expect(fk.targetTable).toBe('dragon');
|
||||
expect(fk.sourceColumn).toBe('dragon_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
expect(fk.sourceSchema).toBe('public');
|
||||
expect(fk.targetSchema).toBe('public');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with mixed schema specifications', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "magic_school"."instructor" (
|
||||
"id" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
CONSTRAINT "instructor_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."apprentice" (
|
||||
"id" integer NOT NULL,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"instructor_id" bigint NOT NULL,
|
||||
CONSTRAINT "apprentice_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Source table with public schema, target table with magic_school schema
|
||||
ALTER TABLE ONLY "public"."apprentice" ADD CONSTRAINT "apprentice_instructor_fk" FOREIGN KEY (instructor_id) REFERENCES "magic_school"."instructor"(id) ON DELETE CASCADE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('apprentice');
|
||||
expect(fk.targetTable).toBe('instructor');
|
||||
expect(fk.sourceSchema).toBe('public');
|
||||
expect(fk.targetSchema).toBe('magic_school');
|
||||
expect(fk.sourceColumn).toBe('instructor_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with various constraint options', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potion (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE ingredient (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE potion_ingredient (
|
||||
id SERIAL PRIMARY KEY,
|
||||
potion_id UUID NOT NULL,
|
||||
ingredient_id UUID NOT NULL,
|
||||
quantity INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
-- Different variations of ALTER TABLE foreign key syntax
|
||||
ALTER TABLE potion_ingredient ADD CONSTRAINT potion_ingredient_potion_fk FOREIGN KEY (potion_id) REFERENCES potion(id) ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE ONLY potion_ingredient ADD CONSTRAINT potion_ingredient_ingredient_fk FOREIGN KEY (ingredient_id) REFERENCES ingredient(id) DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check first FK (with ON DELETE CASCADE ON UPDATE CASCADE)
|
||||
const potionFK = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'potion_id'
|
||||
);
|
||||
expect(potionFK).toBeDefined();
|
||||
expect(potionFK?.targetTable).toBe('potion');
|
||||
|
||||
// Check second FK (with DEFERRABLE)
|
||||
const ingredientFK = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'ingredient_id'
|
||||
);
|
||||
expect(ingredientFK).toBeDefined();
|
||||
expect(ingredientFK?.targetTable).toBe('ingredient');
|
||||
});
|
||||
|
||||
it('should handle quoted and unquoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "wizard_tower" (
|
||||
id BIGINT PRIMARY KEY,
|
||||
"tower_name" VARCHAR(255)
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_resident (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
tower_id BIGINT
|
||||
);
|
||||
|
||||
-- First ALTER TABLE statement
|
||||
ALTER TABLE wizard_resident ADD CONSTRAINT wizard_tower_fk FOREIGN KEY (tower_id) REFERENCES "wizard_tower"(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
|
||||
-- Second ALTER TABLE statement
|
||||
ALTER TABLE ONLY "wizard_resident" ADD CONSTRAINT "wizard_tower_fk2" FOREIGN KEY ("tower_id") REFERENCES "wizard_tower"("id") ON DELETE SET NULL DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Relationships found:', result.relationships.length);
|
||||
result.relationships.forEach((rel, i) => {
|
||||
console.log(
|
||||
`FK ${i + 1}: ${rel.sourceTable}.${rel.sourceColumn} -> ${rel.targetTable}.${rel.targetColumn}`
|
||||
);
|
||||
});
|
||||
console.log('Warnings:', result.warnings);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// At least one relationship should be found (the regex fallback should catch at least one)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Check the first relationship
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('wizard_resident');
|
||||
expect(fk.targetTable).toBe('wizard_tower');
|
||||
expect(fk.sourceColumn).toBe('tower_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should handle the exact problematic syntax from postgres_seven', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "public"."users_user" (
|
||||
"id" bigint NOT NULL,
|
||||
"email" character varying(254) NOT NULL,
|
||||
CONSTRAINT "users_user_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."account_emailaddress" (
|
||||
"id" integer DEFAULT GENERATED BY DEFAULT AS IDENTITY NOT NULL,
|
||||
"email" character varying(254) NOT NULL,
|
||||
"user_id" bigint NOT NULL,
|
||||
CONSTRAINT "account_emailaddress_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Exact syntax from the problematic file with double DEFERRABLE
|
||||
ALTER TABLE ONLY "public"."account_emailaddress" ADD CONSTRAINT "account_emailaddress_user_id_2c513194_fk_users_user_id" FOREIGN KEY (user_id) REFERENCES users_user(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Warnings:', result.warnings);
|
||||
console.log('Relationships:', result.relationships);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.name).toBe(
|
||||
'account_emailaddress_user_id_2c513194_fk_users_user_id'
|
||||
);
|
||||
expect(fk.sourceTable).toBe('account_emailaddress');
|
||||
expect(fk.targetTable).toBe('users_user');
|
||||
});
|
||||
|
||||
it('should handle multiple foreign keys in different formats', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE realm (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE region (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
realm_id UUID
|
||||
);
|
||||
|
||||
CREATE TABLE city (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
region_id UUID,
|
||||
realm_id UUID
|
||||
);
|
||||
|
||||
-- Mix of syntaxes that might fail parsing
|
||||
ALTER TABLE ONLY region ADD CONSTRAINT region_realm_fk FOREIGN KEY (realm_id) REFERENCES realm(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
ALTER TABLE city ADD CONSTRAINT city_region_fk FOREIGN KEY (region_id) REFERENCES region(id) ON DELETE CASCADE;
|
||||
ALTER TABLE ONLY "public"."city" ADD CONSTRAINT "city_realm_fk" FOREIGN KEY ("realm_id") REFERENCES "public"."realm"("id");
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(3);
|
||||
|
||||
// Verify all three relationships were captured
|
||||
const regionRealmFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'region' && r.targetTable === 'realm'
|
||||
);
|
||||
const cityRegionFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'city' && r.targetTable === 'region'
|
||||
);
|
||||
const cityRealmFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'city' && r.targetTable === 'realm'
|
||||
);
|
||||
|
||||
expect(regionRealmFK).toBeDefined();
|
||||
expect(cityRegionFK).toBeDefined();
|
||||
expect(cityRealmFK).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use regex fallback for unparseable ALTER TABLE statements', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magical_item (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);
|
||||
|
||||
CREATE TABLE enchantment (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255),
|
||||
item_id UUID NOT NULL
|
||||
);
|
||||
|
||||
-- This should fail to parse due to syntax variations and trigger regex fallback
|
||||
ALTER TABLE ONLY enchantment ADD CONSTRAINT enchantment_item_fk FOREIGN KEY (item_id) REFERENCES magical_item(id) ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should find the foreign key even if parser fails
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.name).toBe('enchantment_item_fk');
|
||||
expect(fk.sourceTable).toBe('enchantment');
|
||||
expect(fk.targetTable).toBe('magical_item');
|
||||
expect(fk.sourceColumn).toBe('item_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
|
||||
// Should have a warning about the failed parse
|
||||
expect(result.warnings).toBeDefined();
|
||||
const hasAlterWarning = result.warnings!.some(
|
||||
(w) =>
|
||||
w.includes('Failed to parse statement') &&
|
||||
w.includes('ALTER TABLE')
|
||||
);
|
||||
expect(hasAlterWarning).toBe(true);
|
||||
});
|
||||
});
|
@@ -0,0 +1,84 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Table with Comment Before CREATE TABLE', () => {
|
||||
it('should parse table with single-line comment before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- Junction table for tracking which crystals power which enchantments.
|
||||
CREATE TABLE crystal_enchantments (
|
||||
crystal_id UUID NOT NULL REFERENCES crystals(id) ON DELETE CASCADE,
|
||||
enchantment_id UUID NOT NULL REFERENCES enchantments(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (crystal_id, enchantment_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('\nDebug info:');
|
||||
console.log('Tables found:', result.tables.length);
|
||||
console.log(
|
||||
'Table names:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('crystal_enchantments');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle various comment formats before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- This is a wizards table
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- This table stores
|
||||
-- multiple artifacts
|
||||
CREATE TABLE artifacts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
/* This is a multi-line
|
||||
comment before table */
|
||||
CREATE TABLE quests (
|
||||
id BIGSERIAL PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Comment 1
|
||||
-- Comment 2
|
||||
-- Comment 3
|
||||
CREATE TABLE spell_schools (
|
||||
id INTEGER PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(4);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifacts',
|
||||
'quests',
|
||||
'spell_schools',
|
||||
'wizards',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not confuse comment-only statements with tables', async () => {
|
||||
const sql = `
|
||||
-- This is just a comment, not a table
|
||||
-- Even though it mentions CREATE TABLE in the comment
|
||||
-- It should not be parsed as a table
|
||||
|
||||
CREATE TABLE ancient_tome (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Another standalone comment`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('ancient_tome');
|
||||
});
|
||||
});
|
@@ -0,0 +1,113 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Comment removal before formatting', () => {
|
||||
it('should remove single-line comments', async () => {
|
||||
const sql = `
|
||||
-- This is a comment that will be removed
|
||||
CREATE TABLE magic_items (
|
||||
item_id INTEGER PRIMARY KEY, -- unique identifier
|
||||
spell_power VARCHAR(100) -- mystical energy level
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('magic_items');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should remove multi-line comments', async () => {
|
||||
const sql = `
|
||||
/* This is a multi-line comment
|
||||
that spans multiple lines
|
||||
and will be removed */
|
||||
CREATE TABLE wizard_inventory (
|
||||
wizard_id INTEGER PRIMARY KEY,
|
||||
/* Stores the magical
|
||||
artifacts collected */
|
||||
artifact_name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_inventory');
|
||||
});
|
||||
|
||||
it('should preserve strings that contain comment-like patterns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potion_recipes (
|
||||
recipe_id INTEGER PRIMARY KEY,
|
||||
brewing_note VARCHAR(100) DEFAULT '--shake before use',
|
||||
ingredient_source VARCHAR(200) DEFAULT 'https://alchemy.store',
|
||||
instructions TEXT DEFAULT '/* mix carefully */'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
|
||||
// Check that defaults are preserved
|
||||
const brewingNoteCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'brewing_note'
|
||||
);
|
||||
expect(brewingNoteCol?.default).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle complex scenarios with comments before tables', async () => {
|
||||
const sql = `
|
||||
-- Dragon types catalog
|
||||
CREATE TABLE dragons (dragon_id INTEGER PRIMARY KEY);
|
||||
|
||||
/* Knights registry
|
||||
for the kingdom */
|
||||
CREATE TABLE knights (knight_id INTEGER PRIMARY KEY);
|
||||
|
||||
-- Battle records junction
|
||||
-- Tracks dragon-knight encounters
|
||||
CREATE TABLE dragon_battles (
|
||||
dragon_id INTEGER REFERENCES dragons(dragon_id),
|
||||
knight_id INTEGER REFERENCES knights(knight_id),
|
||||
PRIMARY KEY (dragon_id, knight_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['dragon_battles', 'dragons', 'knights']);
|
||||
});
|
||||
|
||||
it('should handle the exact forth example scenario', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_books (
|
||||
book_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
spell_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(255) NOT NULL,
|
||||
effect TEXT, -- Magical effect description
|
||||
element VARCHAR(50) NOT NULL -- fire, water, earth, air
|
||||
);
|
||||
|
||||
-- Junction table linking spells to their books.
|
||||
CREATE TABLE book_spells (
|
||||
book_id UUID NOT NULL REFERENCES spell_books(book_id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(spell_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (book_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'book_spells',
|
||||
'spell_books',
|
||||
'spells',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,247 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Full Database Import - Quest Management System', () => {
|
||||
it('should parse all 20 tables including quest_sample_rewards', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
-- Enums for quest system
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
claimed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('\nParsing results:');
|
||||
console.log(`- Tables found: ${result.tables.length}`);
|
||||
console.log(`- Enums found: ${result.enums?.length || 0}`);
|
||||
console.log(`- Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// List all table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
console.log('\nTable names:');
|
||||
tableNames.forEach((name, i) => {
|
||||
console.log(` ${i + 1}. ${name}`);
|
||||
});
|
||||
|
||||
// Should have all 20 tables
|
||||
expect(result.tables).toHaveLength(20);
|
||||
|
||||
// Check for quest_sample_rewards specifically
|
||||
const questSampleRewards = result.tables.find(
|
||||
(t) => t.name === 'quest_sample_rewards'
|
||||
);
|
||||
expect(questSampleRewards).toBeDefined();
|
||||
|
||||
if (questSampleRewards) {
|
||||
console.log('\nquest_sample_rewards table details:');
|
||||
console.log(`- Columns: ${questSampleRewards.columns.length}`);
|
||||
questSampleRewards.columns.forEach((col) => {
|
||||
console.log(
|
||||
` - ${col.name}: ${col.type} (nullable: ${col.nullable})`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Expected tables
|
||||
const expectedTables = [
|
||||
'adventurers',
|
||||
'guild_masters',
|
||||
'regions',
|
||||
'outposts',
|
||||
'scouts',
|
||||
'scout_region_assignments',
|
||||
'quest_givers',
|
||||
'quest_templates',
|
||||
'quests',
|
||||
'quest_sample_rewards',
|
||||
'quest_rotations',
|
||||
'rotation_quests',
|
||||
'contracts',
|
||||
'completion_events',
|
||||
'bounties',
|
||||
'guild_ledgers',
|
||||
'reputation_logs',
|
||||
'quest_suspensions',
|
||||
'guild_master_actions',
|
||||
'rewards',
|
||||
];
|
||||
|
||||
expect(tableNames).toEqual(expectedTables.sort());
|
||||
|
||||
// Check that quest_sample_rewards has the expected columns
|
||||
expect(questSampleRewards!.columns).toHaveLength(2);
|
||||
const columnNames = questSampleRewards!.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
expect(columnNames).toEqual(['quest_template_id', 'reward_id']);
|
||||
});
|
||||
});
|
@@ -0,0 +1,157 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Complex enum scenarios from real files', () => {
|
||||
it('should handle multiple schema-qualified enums with various syntax issues', async () => {
|
||||
// This test mimics the issues found in postgres_six_example_sql_script.sql
|
||||
const sql = `
|
||||
CREATE TYPE "public"."wizard_status" AS ENUM('active', 'suspended', 'banned', 'inactive');
|
||||
CREATE TYPE "public"."magic_school" AS ENUM('fire', 'water', 'earth', 'air', 'spirit');
|
||||
CREATE TYPE "public"."spell_tier" AS ENUM('cantrip', 'novice', 'adept', 'expert', 'master', 'legendary');
|
||||
CREATE TYPE "public"."potion_type" AS ENUM('healing', 'mana', 'strength', 'speed', 'invisibility', 'flying', 'resistance');
|
||||
CREATE TYPE "public"."creature_type" AS ENUM('beast', 'dragon', 'elemental', 'undead', 'demon', 'fey', 'construct', 'aberration');
|
||||
CREATE TYPE "public"."quest_status" AS ENUM('available', 'accepted', 'in_progress', 'completed', 'failed', 'abandoned');
|
||||
CREATE TYPE "public"."item_rarity" AS ENUM('common', 'uncommon', 'rare', 'epic', 'legendary', 'mythic');
|
||||
|
||||
CREATE TABLE "wizard_account" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"wizardId" text NOT NULL,
|
||||
"account_id" text NOT NULL,
|
||||
"provider_id" text NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "wizard" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"username" text,
|
||||
"email" text NOT NULL,
|
||||
"email_verified" boolean DEFAULT false NOT NULL,
|
||||
"status""wizard_status" DEFAULT 'active' NOT NULL,
|
||||
"primary_school""magic_school" DEFAULT 'fire' NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL,
|
||||
CONSTRAINT "wizard_username_unique" UNIQUE("username"),
|
||||
CONSTRAINT "wizard_email_unique" UNIQUE("email")
|
||||
);
|
||||
|
||||
CREATE TABLE "spells" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"wizard_id" text NOT NULL,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"tier""spell_tier" DEFAULT 'cantrip' NOT NULL,
|
||||
"school""magic_school" DEFAULT 'fire' NOT NULL,
|
||||
"mana_cost" integer DEFAULT 10 NOT NULL,
|
||||
"metadata" jsonb DEFAULT '{}',
|
||||
"created_at" timestamp with time zone DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE "items" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"rarity""item_rarity" DEFAULT 'common' NOT NULL,
|
||||
"metadata" jsonb DEFAULT '{}': :jsonb,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "wizard_account" ADD CONSTRAINT "wizard_account_wizardId_wizard_id_fk"
|
||||
FOREIGN KEY ("wizardId") REFERENCES "public"."wizard"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "spells" ADD CONSTRAINT "spells_wizard_id_wizard_id_fk"
|
||||
FOREIGN KEY ("wizard_id") REFERENCES "public"."wizard"("id") ON DELETE cascade ON UPDATE no action;
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Check enum parsing
|
||||
console.log('\n=== ENUMS FOUND ===');
|
||||
console.log('Count:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should find all 7 enums
|
||||
expect(result.enums).toHaveLength(7);
|
||||
|
||||
// Check specific enums
|
||||
const wizardStatus = result.enums?.find(
|
||||
(e) => e.name === 'wizard_status'
|
||||
);
|
||||
expect(wizardStatus).toBeDefined();
|
||||
expect(wizardStatus?.values).toEqual([
|
||||
'active',
|
||||
'suspended',
|
||||
'banned',
|
||||
'inactive',
|
||||
]);
|
||||
|
||||
const itemRarity = result.enums?.find((e) => e.name === 'item_rarity');
|
||||
expect(itemRarity).toBeDefined();
|
||||
expect(itemRarity?.values).toEqual([
|
||||
'common',
|
||||
'uncommon',
|
||||
'rare',
|
||||
'epic',
|
||||
'legendary',
|
||||
'mythic',
|
||||
]);
|
||||
|
||||
// Check table parsing
|
||||
console.log('\n=== TABLES FOUND ===');
|
||||
console.log('Count:', result.tables.length);
|
||||
console.log('Names:', result.tables.map((t) => t.name).join(', '));
|
||||
|
||||
// Should find all 4 tables
|
||||
expect(result.tables).toHaveLength(4);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'items',
|
||||
'spells',
|
||||
'wizard',
|
||||
'wizard_account',
|
||||
]);
|
||||
|
||||
// Check warnings for syntax issues
|
||||
console.log('\n=== WARNINGS ===');
|
||||
console.log('Count:', result.warnings?.length || 0);
|
||||
if (result.warnings) {
|
||||
result.warnings.forEach((w) => {
|
||||
console.log(` - ${w}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should have warnings about custom types and parsing failures
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// Check that the tables with missing spaces in column definitions still got parsed
|
||||
const wizardTable = result.tables.find((t) => t.name === 'wizard');
|
||||
expect(wizardTable).toBeDefined();
|
||||
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(spellsTable).toBeDefined();
|
||||
});
|
||||
|
||||
it('should parse enums used in column definitions even with syntax errors', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."dragon_element" AS ENUM('fire', 'ice', 'lightning', 'poison', 'shadow');
|
||||
|
||||
CREATE TABLE "dragons" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
"name" varchar(255) NOT NULL,
|
||||
"element""dragon_element" NOT NULL,
|
||||
"power_level" integer DEFAULT 100,
|
||||
"metadata" jsonb DEFAULT '{}'::jsonb
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Enum should be parsed
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('dragon_element');
|
||||
|
||||
// Table might have issues due to missing space
|
||||
console.log('Tables:', result.tables.length);
|
||||
console.log('Warnings:', result.warnings);
|
||||
});
|
||||
});
|
@@ -0,0 +1,74 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Minimal junction table test', () => {
|
||||
it('should parse junction table with exact SQL structure', async () => {
|
||||
// Junction table for tracking which dragons have been tamed by which dragon masters
|
||||
const sql = `-- Junction table for tracking dragon-master bonds.
|
||||
CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
console.log('Testing with SQL:', sql);
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Result:', {
|
||||
tableCount: result.tables.length,
|
||||
tables: result.tables.map((t) => ({
|
||||
name: t.name,
|
||||
columns: t.columns.length,
|
||||
})),
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('dragon_bonds');
|
||||
});
|
||||
|
||||
it('should parse without the comment', async () => {
|
||||
const sql = `CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('dragon_bonds');
|
||||
});
|
||||
|
||||
it('should parse with dependencies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon_masters (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for tracking dragon-master bonds.
|
||||
CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('With dependencies:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const dragonBonds = result.tables.find(
|
||||
(t) => t.name === 'dragon_bonds'
|
||||
);
|
||||
expect(dragonBonds).toBeDefined();
|
||||
});
|
||||
});
|
@@ -0,0 +1,66 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Dragon Status Enum Test', () => {
|
||||
it('should parse dragon_status enum specifically', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE dragon_status AS ENUM ('sleeping', 'hunting', 'guarding', 'hibernating', 'enraged');
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
status dragon_status DEFAULT 'sleeping'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Check that the enum was parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('dragon_status');
|
||||
expect(result.enums![0].values).toEqual([
|
||||
'sleeping',
|
||||
'hunting',
|
||||
'guarding',
|
||||
'hibernating',
|
||||
'enraged',
|
||||
]);
|
||||
|
||||
// Check that the table uses the enum
|
||||
const table = result.tables.find((t) => t.name === 'dragons');
|
||||
expect(table).toBeDefined();
|
||||
|
||||
const statusColumn = table!.columns.find((c) => c.name === 'status');
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn!.type).toBe('dragon_status');
|
||||
});
|
||||
|
||||
it('should handle multiple enums including dragon_status', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE dragon_status AS ENUM ('sleeping', 'hunting', 'guarding', 'hibernating', 'enraged');
|
||||
CREATE TYPE spell_power AS ENUM ('weak', 'strong');
|
||||
CREATE TYPE magic_element AS ENUM ('fire', 'ice', 'both');
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
status dragon_status DEFAULT 'sleeping',
|
||||
breath_power spell_power NOT NULL,
|
||||
breath_element magic_element NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log(
|
||||
'Parsed enums:',
|
||||
result.enums?.map((e) => e.name)
|
||||
);
|
||||
|
||||
expect(result.enums).toHaveLength(3);
|
||||
|
||||
// Specifically check for dragon_status
|
||||
const dragonStatus = result.enums!.find(
|
||||
(e) => e.name === 'dragon_status'
|
||||
);
|
||||
expect(dragonStatus).toBeDefined();
|
||||
expect(dragonStatus!.name).toBe('dragon_status');
|
||||
});
|
||||
});
|
@@ -0,0 +1,37 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Empty table parsing', () => {
|
||||
it('should parse empty tables', async () => {
|
||||
const sql = `CREATE TABLE empty_table ();`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('empty_table');
|
||||
expect(result.tables[0].columns).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should parse mix of empty and non-empty tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE normal_table (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE empty_table ();
|
||||
|
||||
CREATE TABLE another_table (
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'another_table',
|
||||
'empty_table',
|
||||
'normal_table',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,160 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Complete Enum Test with Fantasy Example', () => {
|
||||
it('should parse all enums and use them in tables', async () => {
|
||||
const sql = `
|
||||
-- Fantasy realm database with multiple enum types
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'journeyman', 'master', 'archmage', 'legendary');
|
||||
CREATE TYPE spell_frequency AS ENUM ('hourly', 'daily');
|
||||
CREATE TYPE magic_school AS ENUM ('fire', 'water', 'earth');
|
||||
CREATE TYPE quest_status AS ENUM ('pending', 'active', 'completed');
|
||||
CREATE TYPE dragon_mood AS ENUM ('happy', 'grumpy', 'sleepy');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
rank wizard_rank DEFAULT 'apprentice'
|
||||
);
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
primary_school magic_school NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dragon_quests (
|
||||
id UUID PRIMARY KEY,
|
||||
status quest_status DEFAULT 'pending',
|
||||
dragon_mood dragon_mood
|
||||
);
|
||||
`;
|
||||
|
||||
// Parse the SQL
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Check enums
|
||||
console.log('\nEnum parsing results:');
|
||||
console.log(`Found ${result.enums?.length || 0} enum types`);
|
||||
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Expected enums
|
||||
const expectedEnums = [
|
||||
'wizard_rank',
|
||||
'spell_frequency',
|
||||
'magic_school',
|
||||
'quest_status',
|
||||
'dragon_mood',
|
||||
];
|
||||
|
||||
// Check which are missing
|
||||
const foundEnumNames = result.enums?.map((e) => e.name) || [];
|
||||
const missingEnums = expectedEnums.filter(
|
||||
(e) => !foundEnumNames.includes(e)
|
||||
);
|
||||
|
||||
if (missingEnums.length > 0) {
|
||||
console.log('\nMissing enums:', missingEnums);
|
||||
|
||||
// Let's check if they're in the SQL at all
|
||||
missingEnums.forEach((enumName) => {
|
||||
const regex = new RegExp(`CREATE\\s+TYPE\\s+${enumName}`, 'i');
|
||||
if (regex.test(sql)) {
|
||||
console.log(
|
||||
` ${enumName} exists in SQL but wasn't parsed`
|
||||
);
|
||||
|
||||
// Find the line
|
||||
const lines = sql.split('\n');
|
||||
const lineIndex = lines.findIndex((line) =>
|
||||
regex.test(line)
|
||||
);
|
||||
if (lineIndex !== -1) {
|
||||
console.log(
|
||||
` Line ${lineIndex + 1}: ${lines[lineIndex].trim()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
result,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Check custom types in diagram
|
||||
console.log(
|
||||
'\nCustom types in diagram:',
|
||||
diagram.customTypes?.length || 0
|
||||
);
|
||||
|
||||
// Check wizards table
|
||||
const wizardsTable = diagram.tables?.find((t) => t.name === 'wizards');
|
||||
if (wizardsTable) {
|
||||
console.log('\nWizards table:');
|
||||
const rankField = wizardsTable.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
if (rankField) {
|
||||
console.log(
|
||||
` rank field type: ${rankField.type.name} (id: ${rankField.type.id})`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check spellbooks table
|
||||
const spellbooksTable = diagram.tables?.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
if (spellbooksTable) {
|
||||
console.log('\nSpellbooks table:');
|
||||
const frequencyField = spellbooksTable.fields.find(
|
||||
(f) => f.name === 'cast_frequency'
|
||||
);
|
||||
if (frequencyField) {
|
||||
console.log(
|
||||
` cast_frequency field type: ${frequencyField.type.name}`
|
||||
);
|
||||
}
|
||||
|
||||
const schoolField = spellbooksTable.fields.find(
|
||||
(f) => f.name === 'primary_school'
|
||||
);
|
||||
if (schoolField) {
|
||||
console.log(
|
||||
` primary_school field type: ${schoolField.type.name}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Assertions
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
expect(diagram.customTypes).toHaveLength(5);
|
||||
|
||||
// Check that wizard_rank is present
|
||||
const wizardRankEnum = result.enums!.find(
|
||||
(e) => e.name === 'wizard_rank'
|
||||
);
|
||||
expect(wizardRankEnum).toBeDefined();
|
||||
|
||||
// Check that the rank field uses wizard_rank type
|
||||
if (wizardsTable) {
|
||||
const rankField = wizardsTable.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
expect(rankField?.type.name.toLowerCase()).toBe('wizard_rank');
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,64 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Enum to Diagram Conversion', () => {
|
||||
it('should convert all enums and use them in table columns', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'journeyman', 'master', 'archmage', 'legendary');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_school AS ENUM ('fire', 'water', 'both');
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID NOT NULL,
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
primary_school magic_school NOT NULL,
|
||||
rank wizard_rank DEFAULT 'apprentice',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);`;
|
||||
|
||||
// Parse SQL
|
||||
const parserResult = await fromPostgres(sql);
|
||||
|
||||
// Should find all 3 enums
|
||||
expect(parserResult.enums).toHaveLength(3);
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Should have 3 custom types
|
||||
expect(diagram.customTypes).toHaveLength(3);
|
||||
|
||||
// Check spellbooks table
|
||||
const spellbooksTable = diagram.tables?.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
expect(spellbooksTable).toBeDefined();
|
||||
|
||||
// Check that enum columns use the correct types
|
||||
const rankField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
expect(rankField).toBeDefined();
|
||||
expect(rankField!.type.name).toBe('wizard_rank');
|
||||
expect(rankField!.type.id).toBe('wizard_rank');
|
||||
|
||||
const frequencyField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'cast_frequency'
|
||||
);
|
||||
expect(frequencyField).toBeDefined();
|
||||
expect(frequencyField!.type.name).toBe('spell_frequency');
|
||||
|
||||
const schoolField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'primary_school'
|
||||
);
|
||||
expect(schoolField).toBeDefined();
|
||||
expect(schoolField!.type.name).toBe('magic_school');
|
||||
});
|
||||
});
|
@@ -0,0 +1,133 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Enum Type Parsing', () => {
|
||||
it('should parse CREATE TYPE ENUM statements', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('pending', 'in_progress', 'completed');
|
||||
CREATE TYPE difficulty_level AS ENUM ('easy', 'medium', 'hard');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY,
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
status quest_status DEFAULT 'pending',
|
||||
difficulty difficulty_level NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Check that enum types were parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
// Check first enum
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'pending',
|
||||
'in_progress',
|
||||
'completed',
|
||||
]);
|
||||
|
||||
// Check second enum
|
||||
const difficultyLevel = result.enums!.find(
|
||||
(e) => e.name === 'difficulty_level'
|
||||
);
|
||||
expect(difficultyLevel).toBeDefined();
|
||||
expect(difficultyLevel!.values).toEqual(['easy', 'medium', 'hard']);
|
||||
|
||||
// Check that tables were parsed
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check that columns have the correct enum types
|
||||
const questsTable = result.tables.find((t) => t.name === 'quests');
|
||||
expect(questsTable).toBeDefined();
|
||||
|
||||
const statusColumn = questsTable!.columns.find(
|
||||
(c) => c.name === 'status'
|
||||
);
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn!.type.toLowerCase()).toBe('quest_status');
|
||||
|
||||
const difficultyColumn = questsTable!.columns.find(
|
||||
(c) => c.name === 'difficulty'
|
||||
);
|
||||
expect(difficultyColumn).toBeDefined();
|
||||
expect(difficultyColumn!.type.toLowerCase()).toBe('difficulty_level');
|
||||
});
|
||||
|
||||
it('should handle enum types with various quote styles', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quote_test AS ENUM ('single', "double", 'mixed"quotes');
|
||||
CREATE TYPE number_status AS ENUM ('1', '2', '3-inactive');
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const quoteTest = result.enums!.find((e) => e.name === 'quote_test');
|
||||
expect(quoteTest).toBeDefined();
|
||||
expect(quoteTest!.values).toEqual(['single', 'double', 'mixed"quotes']);
|
||||
|
||||
const numberStatus = result.enums!.find(
|
||||
(e) => e.name === 'number_status'
|
||||
);
|
||||
expect(numberStatus).toBeDefined();
|
||||
expect(numberStatus!.values).toEqual(['1', '2', '3-inactive']);
|
||||
});
|
||||
|
||||
it('should handle enums with special characters and longer values', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE spell_status AS ENUM ('learning', 'mastered', 'forgotten', 'partially_learned', 'fully_mastered', 'forbidden', 'failed');
|
||||
CREATE TYPE portal_status AS ENUM ('inactive', 'charging', 'active', 'unstable', 'collapsed');
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const spellStatus = result.enums!.find(
|
||||
(e) => e.name === 'spell_status'
|
||||
);
|
||||
expect(spellStatus).toBeDefined();
|
||||
expect(spellStatus!.values).toHaveLength(7);
|
||||
expect(spellStatus!.values).toContain('partially_learned');
|
||||
|
||||
const portalStatus = result.enums!.find(
|
||||
(e) => e.name === 'portal_status'
|
||||
);
|
||||
expect(portalStatus).toBeDefined();
|
||||
expect(portalStatus!.values).toHaveLength(5);
|
||||
expect(portalStatus!.values).toContain('collapsed');
|
||||
});
|
||||
|
||||
it('should include warning for unsupported CREATE TYPE statements', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE creature_status AS ENUM ('dormant', 'awakened');
|
||||
|
||||
CREATE TABLE creatures (
|
||||
id INTEGER PRIMARY KEY,
|
||||
status creature_status
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// With the updated parser, enum types don't generate warnings
|
||||
// Only non-enum custom types generate warnings
|
||||
|
||||
// But still parse the enum
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('creature_status');
|
||||
});
|
||||
});
|
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Diagnostic tests for magical spell parsing cases', () => {
|
||||
it('should correctly parse spells table with Ancient Fire Blast descriptions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Overall description of the spell, e.g., "Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Spells table result:', {
|
||||
tableCount: result.tables.length,
|
||||
columns: result.tables[0]?.columns.map((c) => ({
|
||||
name: c.name,
|
||||
type: c.type,
|
||||
})),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const spellsTable = result.tables[0];
|
||||
expect(spellsTable.name).toBe('spells');
|
||||
|
||||
// Debug: list all columns found
|
||||
console.log('Columns found:', spellsTable.columns.length);
|
||||
spellsTable.columns.forEach((col, idx) => {
|
||||
console.log(` ${idx + 1}. ${col.name}: ${col.type}`);
|
||||
});
|
||||
|
||||
expect(spellsTable.columns).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should handle magical enum types with mixed quotes', async () => {
|
||||
const sql = `CREATE TYPE quote_test AS ENUM ('single', "double", 'mixed"quotes');`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Enum result:', {
|
||||
enumCount: result.enums?.length || 0,
|
||||
values: result.enums?.[0]?.values,
|
||||
});
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].values).toEqual([
|
||||
'single',
|
||||
'double',
|
||||
'mixed"quotes',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Test All 5 Enums', () => {
|
||||
it('should parse all 5 enum types', async () => {
|
||||
// Test with exact SQL from the file
|
||||
const sql = `
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY,
|
||||
status quest_status DEFAULT 'active',
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
cast_time magic_time NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Debug output
|
||||
console.log('Enums found:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Check all enum names
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
|
||||
// Check quest_status specifically
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,79 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL parser - CREATE EXTENSION and CREATE TYPE', () => {
|
||||
it('should handle CREATE EXTENSION and CREATE TYPE statements', async () => {
|
||||
const testSQL = `
|
||||
-- Enable UUID extension
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Create custom type for creature alignment
|
||||
CREATE TYPE creature_alignment AS ENUM ('lawful', 'neutral', 'chaotic');
|
||||
|
||||
-- Create a table that uses the custom type
|
||||
CREATE TABLE mystical_creatures (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
species VARCHAR(255) UNIQUE NOT NULL,
|
||||
alignment creature_alignment DEFAULT 'neutral',
|
||||
discovered_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create another custom type
|
||||
CREATE TYPE magic_school AS ENUM ('illusion', 'evocation', 'necromancy', 'divination');
|
||||
|
||||
-- Create a table with foreign key
|
||||
CREATE TABLE creature_abilities (
|
||||
id SERIAL PRIMARY KEY,
|
||||
creature_id UUID REFERENCES mystical_creatures(id),
|
||||
ability_name VARCHAR(255) NOT NULL,
|
||||
school magic_school DEFAULT 'evocation',
|
||||
is_innate BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
`;
|
||||
|
||||
console.log(
|
||||
'Testing PostgreSQL parser with CREATE EXTENSION and CREATE TYPE...\n'
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await fromPostgresImproved(testSQL);
|
||||
|
||||
console.log('Parse successful!');
|
||||
console.log('\nTables found:', result.tables.length);
|
||||
result.tables.forEach((table) => {
|
||||
console.log(`\n- Table: ${table.name}`);
|
||||
console.log(' Columns:');
|
||||
table.columns.forEach((col) => {
|
||||
console.log(
|
||||
` - ${col.name}: ${col.type}${col.nullable ? '' : ' NOT NULL'}${col.primaryKey ? ' PRIMARY KEY' : ''}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
console.log('\nRelationships found:', result.relationships.length);
|
||||
result.relationships.forEach((rel) => {
|
||||
console.log(
|
||||
`- ${rel.sourceTable}.${rel.sourceColumn} -> ${rel.targetTable}.${rel.targetColumn}`
|
||||
);
|
||||
});
|
||||
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\nWarnings:');
|
||||
result.warnings.forEach((warning) => {
|
||||
console.log(`- ${warning}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Basic assertions
|
||||
expect(result.tables.length).toBe(2);
|
||||
expect(result.tables[0].name).toBe('mystical_creatures');
|
||||
expect(result.tables[1].name).toBe('creature_abilities');
|
||||
expect(result.relationships.length).toBe(1);
|
||||
} catch (error) {
|
||||
console.error('Error parsing SQL:', (error as Error).message);
|
||||
console.error('\nStack trace:', (error as Error).stack);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,203 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Debug Missing Junction Table', () => {
|
||||
it('should find quest_sample_rewards junction table in the quest management system', async () => {
|
||||
const sql = `-- Quest Management System Database with Junction Tables
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze'
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id)
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft'
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for quest template sample rewards
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active'
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id)
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL
|
||||
);`;
|
||||
|
||||
// First, verify the table exists in the SQL
|
||||
const tableExists = sql.includes('CREATE TABLE quest_sample_rewards');
|
||||
console.log('\nDebugging quest_sample_rewards:');
|
||||
console.log('- Table exists in SQL:', tableExists);
|
||||
|
||||
// Extract the specific table definition
|
||||
const tableMatch = sql.match(
|
||||
/-- Junction table[\s\S]*?CREATE TABLE quest_sample_rewards[\s\S]*?;/
|
||||
);
|
||||
if (tableMatch) {
|
||||
console.log('- Table definition found, first 200 chars:');
|
||||
console.log(tableMatch[0].substring(0, 200) + '...');
|
||||
}
|
||||
|
||||
// Now parse
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('\nParsing results:');
|
||||
console.log('- Total tables:', result.tables.length);
|
||||
console.log(
|
||||
'- Table names:',
|
||||
result.tables.map((t) => t.name).join(', ')
|
||||
);
|
||||
|
||||
// Look for quest_sample_rewards
|
||||
const questSampleRewards = result.tables.find(
|
||||
(t) => t.name === 'quest_sample_rewards'
|
||||
);
|
||||
console.log('- quest_sample_rewards found:', !!questSampleRewards);
|
||||
|
||||
if (!questSampleRewards) {
|
||||
// Check warnings for clues
|
||||
console.log('\nWarnings that might be relevant:');
|
||||
result.warnings?.forEach((w, i) => {
|
||||
if (
|
||||
w.includes('quest_sample_rewards') ||
|
||||
w.includes('Failed to parse')
|
||||
) {
|
||||
console.log(` ${i}: ${w}`);
|
||||
}
|
||||
});
|
||||
|
||||
// List all tables to see what's missing
|
||||
console.log('\nAll parsed tables:');
|
||||
result.tables.forEach((t, i) => {
|
||||
console.log(
|
||||
` ${i + 1}. ${t.name} (${t.columns.length} columns)`
|
||||
);
|
||||
});
|
||||
} else {
|
||||
console.log('\nquest_sample_rewards details:');
|
||||
console.log('- Columns:', questSampleRewards.columns.length);
|
||||
questSampleRewards.columns.forEach((c) => {
|
||||
console.log(` - ${c.name}: ${c.type}`);
|
||||
});
|
||||
}
|
||||
|
||||
// The test expectation
|
||||
expect(tableExists).toBe(true);
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(19); // At least 19 tables
|
||||
expect(questSampleRewards).toBeDefined();
|
||||
});
|
||||
});
|
@@ -0,0 +1,56 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Relationships Debug', () => {
|
||||
it('should parse simple foreign key', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
);
|
||||
|
||||
CREATE TABLE towers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('towers');
|
||||
expect(result.relationships[0].targetTable).toBe('wizards');
|
||||
});
|
||||
|
||||
it('should handle custom types and foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'completed');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
status quest_status DEFAULT 'active'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships);
|
||||
console.log('Warnings:', result.warnings);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
});
|
||||
});
|
@@ -0,0 +1,93 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Junction Table Parsing - Spell Plans Database', () => {
|
||||
it('should parse all 3 tables (spell_plans, spells, plan_sample_spells) and 2 relationships', async () => {
|
||||
const sql = `-- Spell Plans Database with Enums and Junction Table
|
||||
CREATE TYPE casting_difficulty AS ENUM ('simple', 'moderate', 'complex', 'arcane', 'forbidden');
|
||||
CREATE TYPE magic_school AS ENUM ('elemental', 'healing', 'illusion', 'necromancy', 'transmutation');
|
||||
CREATE TYPE spell_range AS ENUM ('touch', 'short', 'medium', 'long', 'sight');
|
||||
CREATE TYPE component_type AS ENUM ('verbal', 'somatic', 'material', 'focus', 'divine');
|
||||
CREATE TYPE power_source AS ENUM ('arcane', 'divine', 'nature', 'psionic', 'primal');
|
||||
|
||||
CREATE TABLE spell_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty casting_difficulty NOT NULL,
|
||||
school magic_school NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
mana_cost INTEGER NOT NULL,
|
||||
cast_time VARCHAR(100),
|
||||
range spell_range NOT NULL,
|
||||
components component_type[] NOT NULL,
|
||||
power_source power_source NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Junction table for showing sample spells in a spell plan
|
||||
CREATE TABLE plan_sample_spells (
|
||||
spell_plan_id UUID NOT NULL REFERENCES spell_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Parsing results:');
|
||||
console.log(
|
||||
'- Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('- Table count:', result.tables.length);
|
||||
console.log('- Relationships:', result.relationships.length);
|
||||
console.log('- Enums:', result.enums?.length || 0);
|
||||
|
||||
// Should have 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// Check table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'plan_sample_spells',
|
||||
'spell_plans',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Should have 2 relationships (both from plan_sample_spells)
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check plan_sample_spells specifically
|
||||
const planSampleSpells = result.tables.find(
|
||||
(t) => t.name === 'plan_sample_spells'
|
||||
);
|
||||
expect(planSampleSpells).toBeDefined();
|
||||
expect(planSampleSpells!.columns).toHaveLength(2);
|
||||
|
||||
// Should have 5 enum types
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should parse the exact junction table definition', async () => {
|
||||
const sql = `
|
||||
-- Junction table for showing sample spells on a grimoire's page.
|
||||
CREATE TABLE grimoire_sample_spells (
|
||||
grimoire_plan_id UUID NOT NULL REFERENCES grimoire_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (grimoire_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('grimoire_sample_spells');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
});
|
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Invalid multi-line string in SQL', () => {
|
||||
it('should handle SQL with orphaned string literal', async () => {
|
||||
// This SQL has a syntax error - string literal on its own line
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Example description
|
||||
"This is an orphaned string"
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Even with syntax error, it should try to parse what it can
|
||||
console.log('Result:', {
|
||||
tables: result.tables.length,
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
// Should attempt to parse the table even if parser fails
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it('should parse all tables even if one has syntax errors', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE table1 (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE table2 (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Example
|
||||
"Orphaned string"
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE table3 (
|
||||
id UUID PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Multi-table result:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings?.length || 0,
|
||||
});
|
||||
|
||||
// Should parse at least table1 and table3
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('table1');
|
||||
expect(tableNames).toContain('table3');
|
||||
});
|
||||
});
|
@@ -0,0 +1,246 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Magical junction table parsing for wizard spell associations', () => {
|
||||
it('should parse the wizard-spell junction table for tracking spell knowledge', async () => {
|
||||
// Test with a junction table for spells and wizards
|
||||
const sql = `
|
||||
-- Junction table for tracking which wizards know which spells.
|
||||
CREATE TABLE wizard_spells (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Test results:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_spells');
|
||||
});
|
||||
|
||||
it('should count all CREATE TABLE statements for magical entities in quest system', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
// Count CREATE TABLE statements
|
||||
const createTableMatches = sql.match(/CREATE TABLE/gi) || [];
|
||||
console.log(
|
||||
`\nFound ${createTableMatches.length} CREATE TABLE statements in file`
|
||||
);
|
||||
|
||||
// Find all table names
|
||||
const tableNameMatches =
|
||||
sql.match(
|
||||
/CREATE TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["']?(\w+)["']?/gi
|
||||
) || [];
|
||||
const tableNames = tableNameMatches
|
||||
.map((match) => {
|
||||
const nameMatch = match.match(
|
||||
/CREATE TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["']?(\w+)["']?/i
|
||||
);
|
||||
return nameMatch ? nameMatch[1] : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
console.log('Table names found in SQL:', tableNames);
|
||||
console.log(
|
||||
'quest_sample_rewards in list?',
|
||||
tableNames.includes('quest_sample_rewards')
|
||||
);
|
||||
|
||||
// Parse the file
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log(`\nParsed ${result.tables.length} tables`);
|
||||
console.log(
|
||||
'Parsed table names:',
|
||||
result.tables.map((t) => t.name).sort()
|
||||
);
|
||||
|
||||
const junctionTable = result.tables.find(
|
||||
(t) => t.name.includes('_') && t.columns.length >= 2
|
||||
);
|
||||
console.log('junction table found?', !!junctionTable);
|
||||
|
||||
// All CREATE TABLE statements should be parsed
|
||||
expect(result.tables.length).toBe(createTableMatches.length);
|
||||
expect(junctionTable).toBeDefined();
|
||||
});
|
||||
});
|
@@ -0,0 +1,134 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('junction table parsing fix', () => {
|
||||
it('should parse table with single-line comment before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- Junction table for tracking which wizards have learned which spells.
|
||||
CREATE TABLE wizard_spellbook (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_spellbook');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
expect(result.tables[0].columns[0].name).toBe('wizard_id');
|
||||
expect(result.tables[0].columns[1].name).toBe('spell_id');
|
||||
});
|
||||
|
||||
it('should handle multiple tables with comments', async () => {
|
||||
const sql = `
|
||||
-- First table
|
||||
CREATE TABLE mages (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for tracking spellbook contents.
|
||||
CREATE TABLE mage_grimoires (
|
||||
mage_id UUID NOT NULL REFERENCES mages(id) ON DELETE CASCADE,
|
||||
grimoire_id UUID NOT NULL REFERENCES grimoires(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (mage_id, grimoire_id)
|
||||
);
|
||||
|
||||
-- Another table
|
||||
CREATE TABLE grimoires (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE enchantments (
|
||||
id UUID PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(4);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'enchantments',
|
||||
'grimoires',
|
||||
'mage_grimoires',
|
||||
'mages',
|
||||
]);
|
||||
|
||||
// Verify mage_grimoires specifically
|
||||
const mageGrimoires = result.tables.find(
|
||||
(t) => t.name === 'mage_grimoires'
|
||||
);
|
||||
expect(mageGrimoires).toBeDefined();
|
||||
expect(mageGrimoires?.columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle statements that start with comment but include CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- This comment mentions CREATE TABLE artifacts in the comment
|
||||
-- but it's just a comment
|
||||
;
|
||||
-- This is the actual table
|
||||
CREATE TABLE mystical_artifacts (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for artifact_enchantments
|
||||
CREATE TABLE artifact_enchantments (
|
||||
artifact_id INTEGER,
|
||||
enchantment_id INTEGER
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifact_enchantments',
|
||||
'mystical_artifacts',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse all three tables including junction table', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_categories (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(255) NOT NULL,
|
||||
power_level INTEGER DEFAULT 1,
|
||||
mana_cost INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for categorizing spells
|
||||
CREATE TABLE spell_categorization (
|
||||
category_id UUID NOT NULL REFERENCES spell_categories(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES arcane_spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (category_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'arcane_spells',
|
||||
'spell_categories',
|
||||
'spell_categorization',
|
||||
]);
|
||||
|
||||
// Check the junction table exists and has correct structure
|
||||
const spellCategorization = result.tables.find(
|
||||
(t) => t.name === 'spell_categorization'
|
||||
);
|
||||
expect(spellCategorization).toBeDefined();
|
||||
expect(spellCategorization!.columns).toHaveLength(2);
|
||||
expect(spellCategorization!.columns.map((c) => c.name).sort()).toEqual([
|
||||
'category_id',
|
||||
'spell_id',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,322 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Complex Database - Enchanted Bazaar', () => {
|
||||
it('should parse the complete magical marketplace database', async () => {
|
||||
const sql = `-- Enchanted Bazaar Database Schema
|
||||
-- A complex magical marketplace system with many enums and relationships
|
||||
|
||||
-- Enums for the magical marketplace
|
||||
CREATE TYPE wizard_status AS ENUM ('active', 'suspended', 'banned', 'inactive');
|
||||
CREATE TYPE spell_category AS ENUM ('attack', 'defense', 'utility', 'healing', 'summoning');
|
||||
CREATE TYPE artifact_rarity AS ENUM ('common', 'uncommon', 'rare', 'epic', 'legendary');
|
||||
CREATE TYPE shop_status AS ENUM ('open', 'closed', 'under_renovation', 'abandoned');
|
||||
CREATE TYPE transaction_status AS ENUM ('pending', 'completed', 'failed', 'refunded');
|
||||
CREATE TYPE payment_method AS ENUM ('gold', 'crystals', 'barter', 'credit', 'quest_reward');
|
||||
CREATE TYPE listing_status AS ENUM ('draft', 'active', 'sold', 'expired', 'removed');
|
||||
CREATE TYPE enchantment_type AS ENUM ('fire', 'ice', 'lightning', 'holy', 'dark');
|
||||
CREATE TYPE potion_effect AS ENUM ('healing', 'mana', 'strength', 'speed', 'invisibility');
|
||||
CREATE TYPE scroll_type AS ENUM ('spell', 'recipe', 'map', 'contract', 'prophecy');
|
||||
CREATE TYPE merchant_tier AS ENUM ('novice', 'apprentice', 'journeyman', 'master', 'grandmaster');
|
||||
CREATE TYPE review_rating AS ENUM ('terrible', 'poor', 'average', 'good', 'excellent');
|
||||
CREATE TYPE dispute_status AS ENUM ('open', 'investigating', 'resolved', 'escalated');
|
||||
CREATE TYPE delivery_method AS ENUM ('instant', 'owl', 'portal', 'courier', 'pickup');
|
||||
CREATE TYPE market_zone AS ENUM ('north', 'south', 'east', 'west', 'central');
|
||||
|
||||
-- Core tables
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
username VARCHAR(255) UNIQUE NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
status wizard_status DEFAULT 'active',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spell_verifications (
|
||||
wizard_id UUID PRIMARY KEY REFERENCES wizards(id),
|
||||
verified_at TIMESTAMP NOT NULL,
|
||||
verification_level INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE realms (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
zone market_zone NOT NULL,
|
||||
magical_tax_rate DECIMAL(5,4) DEFAULT 0.0500
|
||||
);
|
||||
|
||||
CREATE TABLE sanctuaries (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
realm_id UUID REFERENCES realms(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
protection_level INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE magic_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
merchant_tier merchant_tier NOT NULL,
|
||||
monthly_fee INTEGER NOT NULL,
|
||||
listing_limit INTEGER DEFAULT 10
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_subscriptions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
plan_id UUID REFERENCES magic_plans(id),
|
||||
status transaction_status DEFAULT 'pending',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE shops (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
realm_id UUID REFERENCES realms(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
status shop_status DEFAULT 'open',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE shop_sanctuaries (
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
sanctuary_id UUID REFERENCES sanctuaries(id),
|
||||
assigned_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (shop_id, sanctuary_id)
|
||||
);
|
||||
|
||||
CREATE TABLE artifact_categories (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
parent_id UUID REFERENCES artifact_categories(id),
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE enchantments (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
type enchantment_type NOT NULL,
|
||||
power_level INTEGER DEFAULT 1,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE listings (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
category_id UUID REFERENCES artifact_categories(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
price INTEGER NOT NULL,
|
||||
quantity INTEGER DEFAULT 1,
|
||||
rarity artifact_rarity DEFAULT 'common',
|
||||
status listing_status DEFAULT 'draft',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE listing_enchantments (
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
enchantment_id UUID REFERENCES enchantments(id),
|
||||
strength INTEGER DEFAULT 1,
|
||||
PRIMARY KEY (listing_id, enchantment_id)
|
||||
);
|
||||
|
||||
CREATE TABLE potions (
|
||||
listing_id UUID PRIMARY KEY REFERENCES listings(id),
|
||||
effect potion_effect NOT NULL,
|
||||
duration_minutes INTEGER DEFAULT 30,
|
||||
potency INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE scrolls (
|
||||
listing_id UUID PRIMARY KEY REFERENCES listings(id),
|
||||
type scroll_type NOT NULL,
|
||||
spell_category spell_category,
|
||||
uses_remaining INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE transactions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
buyer_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
quantity INTEGER NOT NULL,
|
||||
total_price INTEGER NOT NULL,
|
||||
payment_method payment_method NOT NULL,
|
||||
status transaction_status DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
transaction_id UUID REFERENCES transactions(id),
|
||||
reviewer_id UUID REFERENCES wizards(id),
|
||||
rating review_rating NOT NULL,
|
||||
comment TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE disputes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
transaction_id UUID REFERENCES transactions(id),
|
||||
filed_by UUID REFERENCES wizards(id),
|
||||
reason TEXT NOT NULL,
|
||||
status dispute_status DEFAULT 'open',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE messages (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
sender_id UUID REFERENCES wizards(id),
|
||||
recipient_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
content TEXT NOT NULL,
|
||||
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE favorites (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, listing_id)
|
||||
);
|
||||
|
||||
CREATE TABLE shop_followers (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
followed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, shop_id)
|
||||
);
|
||||
|
||||
CREATE TABLE delivery_options (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
method delivery_method NOT NULL,
|
||||
cost INTEGER DEFAULT 0,
|
||||
estimated_time_hours INTEGER DEFAULT 24
|
||||
);
|
||||
|
||||
CREATE TABLE transaction_deliveries (
|
||||
transaction_id UUID PRIMARY KEY REFERENCES transactions(id),
|
||||
delivery_option_id UUID REFERENCES delivery_options(id),
|
||||
tracking_number VARCHAR(100),
|
||||
delivered_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_badges (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
icon_url VARCHAR(500)
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_achievements (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
badge_id UUID REFERENCES wizard_badges(id),
|
||||
earned_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, badge_id)
|
||||
);
|
||||
|
||||
CREATE TABLE market_analytics (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
view_count INTEGER DEFAULT 0,
|
||||
favorite_count INTEGER DEFAULT 0,
|
||||
last_viewed TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE price_history (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
old_price INTEGER NOT NULL,
|
||||
new_price INTEGER NOT NULL,
|
||||
changed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE audit_logs (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
action VARCHAR(100) NOT NULL,
|
||||
table_name VARCHAR(100),
|
||||
record_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
console.log('Parsing SQL...');
|
||||
const startTime = Date.now();
|
||||
const result = await fromPostgresImproved(sql);
|
||||
const parseTime = Date.now() - startTime;
|
||||
|
||||
console.log(`Parse completed in ${parseTime}ms`);
|
||||
|
||||
// Expected counts
|
||||
const expectedTables = 27;
|
||||
const expectedEnums = 15;
|
||||
const minExpectedRelationships = 36; // Adjusted based on actual relationships in the schema
|
||||
|
||||
console.log('\n=== PARSING RESULTS ===');
|
||||
console.log(
|
||||
`Tables parsed: ${result.tables.length} (expected: ${expectedTables})`
|
||||
);
|
||||
console.log(
|
||||
`Enums parsed: ${result.enums?.length || 0} (expected: ${expectedEnums})`
|
||||
);
|
||||
console.log(
|
||||
`Relationships parsed: ${result.relationships.length} (expected min: ${minExpectedRelationships})`
|
||||
);
|
||||
console.log(`Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// List parsed tables
|
||||
console.log('\n=== TABLES PARSED ===');
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
tableNames.forEach((name) => console.log(`- ${name}`));
|
||||
|
||||
// List enums
|
||||
if (result.enums && result.enums.length > 0) {
|
||||
console.log('\n=== ENUMS PARSED ===');
|
||||
result.enums.forEach((e) => {
|
||||
console.log(`- ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Show warnings if any
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\n=== WARNINGS ===');
|
||||
result.warnings.forEach((w) => console.log(`- ${w}`));
|
||||
}
|
||||
|
||||
// Verify counts
|
||||
expect(result.tables).toHaveLength(expectedTables);
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(expectedEnums);
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(
|
||||
minExpectedRelationships
|
||||
);
|
||||
|
||||
// Check specific tables exist
|
||||
const criticalTables = [
|
||||
'wizards',
|
||||
'shops',
|
||||
'listings',
|
||||
'transactions',
|
||||
'reviews',
|
||||
];
|
||||
criticalTables.forEach((tableName) => {
|
||||
const table = result.tables.find((t) => t.name === tableName);
|
||||
expect(table).toBeDefined();
|
||||
});
|
||||
|
||||
// Check junction tables
|
||||
const junctionTables = [
|
||||
'shop_sanctuaries',
|
||||
'listing_enchantments',
|
||||
'favorites',
|
||||
'shop_followers',
|
||||
'wizard_achievements',
|
||||
];
|
||||
junctionTables.forEach((tableName) => {
|
||||
const table = result.tables.find((t) => t.name === tableName);
|
||||
expect(table).toBeDefined();
|
||||
expect(table!.columns.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,66 @@
|
||||
import { describe, it } from 'vitest';
|
||||
|
||||
describe('node-sql-parser - CREATE TYPE handling', () => {
|
||||
it('should show exact parser error for CREATE TYPE', async () => {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
const parserOpts = {
|
||||
database: 'PostgreSQL',
|
||||
};
|
||||
|
||||
console.log('\n=== Testing CREATE TYPE statement ===');
|
||||
const createTypeSQL = `CREATE TYPE spell_element AS ENUM ('fire', 'water', 'earth', 'air');`;
|
||||
|
||||
try {
|
||||
parser.astify(createTypeSQL, parserOpts);
|
||||
console.log('CREATE TYPE parsed successfully');
|
||||
} catch (error) {
|
||||
console.log('CREATE TYPE parse error:', (error as Error).message);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE EXTENSION statement ===');
|
||||
const createExtensionSQL = `CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`;
|
||||
|
||||
try {
|
||||
parser.astify(createExtensionSQL, parserOpts);
|
||||
console.log('CREATE EXTENSION parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE EXTENSION parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with custom type ===');
|
||||
const createTableWithTypeSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element spell_element DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableWithTypeSQL, parserOpts);
|
||||
console.log('CREATE TABLE with custom type parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with custom type parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with standard types only ===');
|
||||
const createTableStandardSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element VARCHAR(20) DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableStandardSQL, parserOpts);
|
||||
console.log('CREATE TABLE with standard types parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with standard types parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,61 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Minimal Type Test', () => {
|
||||
it('should handle CREATE EXTENSION, CREATE TYPE, and multi-line comments', async () => {
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
CREATE TYPE spell_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Overall description of the spell, e.g., "Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rituals (
|
||||
id UUID PRIMARY KEY,
|
||||
day_of_week INTEGER NOT NULL, -- 1=Monday, 7=Sunday
|
||||
cast_time spell_time NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should parse tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'rituals',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Should have warnings about extension and type
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Extension'))).toBe(
|
||||
true
|
||||
);
|
||||
// Enum types no longer generate warnings with the updated parser
|
||||
|
||||
// Check that the enum was parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('spell_time');
|
||||
expect(result.enums![0].values).toEqual(['dawn', 'dusk', 'both']);
|
||||
|
||||
// Check that multi-line comments were handled
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(spellsTable).toBeDefined();
|
||||
expect(spellsTable!.columns).toHaveLength(3); // id, description, category
|
||||
|
||||
const ritualsTable = result.tables.find((t) => t.name === 'rituals');
|
||||
expect(ritualsTable).toBeDefined();
|
||||
expect(ritualsTable!.columns).toHaveLength(3); // id, day_of_week, cast_time
|
||||
|
||||
// Custom type should be preserved (possibly uppercase)
|
||||
const castTimeColumn = ritualsTable!.columns.find(
|
||||
(c) => c.name === 'cast_time'
|
||||
);
|
||||
expect(castTimeColumn).toBeDefined();
|
||||
expect(castTimeColumn!.type.toLowerCase()).toBe('spell_time');
|
||||
});
|
||||
});
|
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Test All Five Enums', () => {
|
||||
it('should find all 5 enums from the exact SQL in the file', async () => {
|
||||
// Exact copy from the file
|
||||
const sql = `
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Check we got all 5
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Check each one exists
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle CREATE TYPE statements with semicolons on same line', async () => {
|
||||
// Test different formatting
|
||||
const sql = `CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Specifically check quest_status
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toHaveLength(5);
|
||||
expect(questStatus!.values).toContain('grace_period');
|
||||
});
|
||||
});
|
@@ -0,0 +1,101 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Table Count Validation', () => {
|
||||
it('should parse all CREATE TABLE statements without missing any', async () => {
|
||||
const sql = `
|
||||
-- Table 1 comment
|
||||
CREATE TABLE table1 (id INTEGER PRIMARY KEY);
|
||||
|
||||
/* Multi-line comment
|
||||
for table 2 */
|
||||
CREATE TABLE table2 (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS table3 (id INTEGER PRIMARY KEY);
|
||||
|
||||
-- Junction table
|
||||
CREATE TABLE table1_table2 (
|
||||
table1_id INTEGER REFERENCES table1(id),
|
||||
table2_id INTEGER REFERENCES table2(id),
|
||||
PRIMARY KEY (table1_id, table2_id)
|
||||
);
|
||||
|
||||
CREATE TABLE "quoted_table" (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TABLE schema1.table_with_schema (id INTEGER PRIMARY KEY);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Count CREATE TABLE statements in the SQL
|
||||
const createTableCount = (sql.match(/CREATE TABLE/gi) || []).length;
|
||||
|
||||
console.log(`\nValidation:`);
|
||||
console.log(`- CREATE TABLE statements in SQL: ${createTableCount}`);
|
||||
console.log(`- Tables parsed: ${result.tables.length}`);
|
||||
console.log(
|
||||
`- Table names: ${result.tables.map((t) => t.name).join(', ')}`
|
||||
);
|
||||
|
||||
// All CREATE TABLE statements should result in a parsed table
|
||||
expect(result.tables).toHaveLength(createTableCount);
|
||||
|
||||
// Verify specific tables
|
||||
const expectedTables = [
|
||||
'table1',
|
||||
'table2',
|
||||
'table3',
|
||||
'table1_table2',
|
||||
'quoted_table',
|
||||
'table_with_schema',
|
||||
];
|
||||
const actualTables = result.tables.map((t) => t.name).sort();
|
||||
expect(actualTables).toEqual(expectedTables.sort());
|
||||
});
|
||||
|
||||
it('should handle edge cases that might cause tables to be missed', async () => {
|
||||
const sql = `
|
||||
-- This tests various edge cases
|
||||
|
||||
-- 1. Table with only foreign key columns (no regular columns)
|
||||
CREATE TABLE only_fks (
|
||||
user_id UUID REFERENCES users(id),
|
||||
role_id UUID REFERENCES roles(id),
|
||||
PRIMARY KEY (user_id, role_id)
|
||||
);
|
||||
|
||||
-- 2. Table with no PRIMARY KEY
|
||||
CREATE TABLE no_pk (
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- 3. Empty table (pathological case)
|
||||
CREATE TABLE empty_table ();
|
||||
|
||||
-- 4. Table with complex constraints
|
||||
CREATE TABLE complex_constraints (
|
||||
id INTEGER,
|
||||
CONSTRAINT pk_complex PRIMARY KEY (id),
|
||||
CONSTRAINT chk_positive CHECK (id > 0)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
const createTableCount = (sql.match(/CREATE TABLE/gi) || []).length;
|
||||
|
||||
console.log(`\nEdge case validation:`);
|
||||
console.log(`- CREATE TABLE statements: ${createTableCount}`);
|
||||
console.log(`- Tables parsed: ${result.tables.length}`);
|
||||
console.log(
|
||||
`- Expected tables: only_fks, no_pk, empty_table, complex_constraints`
|
||||
);
|
||||
console.log(
|
||||
`- Actual tables: ${result.tables.map((t) => t.name).join(', ')}`
|
||||
);
|
||||
result.tables.forEach((t) => {
|
||||
console.log(`- ${t.name}: ${t.columns.length} columns`);
|
||||
});
|
||||
|
||||
// Even edge cases should be parsed
|
||||
expect(result.tables).toHaveLength(createTableCount);
|
||||
});
|
||||
});
|
@@ -0,0 +1,258 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('PostgreSQL Quest Management Database', () => {
|
||||
it('should parse the magical quest management database', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
-- Enable UUID extension
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Type definitions
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should parse tables despite extensions and custom types
|
||||
expect(result.tables.length).toBeGreaterThan(0);
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(
|
||||
result.warnings!.some(
|
||||
(w) => w.includes('Extension') || w.includes('type')
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have parsed all 20 tables
|
||||
expect(result.tables).toHaveLength(20);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
const expectedTables = [
|
||||
'adventurers',
|
||||
'guild_masters',
|
||||
'regions',
|
||||
'outposts',
|
||||
'scouts',
|
||||
'scout_region_assignments',
|
||||
'quest_givers',
|
||||
'quest_templates',
|
||||
'quests',
|
||||
'rewards',
|
||||
'quest_sample_rewards',
|
||||
'quest_rotations',
|
||||
'rotation_quests',
|
||||
'contracts',
|
||||
'completion_events',
|
||||
'bounties',
|
||||
'guild_ledgers',
|
||||
'reputation_logs',
|
||||
'quest_suspensions',
|
||||
'guild_master_actions',
|
||||
];
|
||||
expect(tableNames).toEqual(expectedTables.sort());
|
||||
|
||||
// Check that enum types were parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums!.length).toBe(5);
|
||||
|
||||
// Check specific enums
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'draft',
|
||||
'active',
|
||||
'on_hold',
|
||||
'completed',
|
||||
'abandoned',
|
||||
]);
|
||||
|
||||
// Check that custom enum types are handled in columns
|
||||
const contractsTable = result.tables.find(
|
||||
(t) => t.name === 'contracts'
|
||||
);
|
||||
expect(contractsTable).toBeDefined();
|
||||
const statusColumn = contractsTable!.columns.find(
|
||||
(c) => c.name === 'status'
|
||||
);
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn?.type).toMatch(/quest_status/i);
|
||||
|
||||
// Verify foreign keys are still extracted
|
||||
if (result.tables.length > 3) {
|
||||
expect(result.relationships.length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,70 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Missing quest_status Bug - Magical Quest Management System', () => {
|
||||
it('should parse all 5 magical enums including quest_status for adventurer tracking', async () => {
|
||||
// Exact content from the file
|
||||
const sql = `
|
||||
-- ##################################################
|
||||
-- # TYPE DEFINITIONS
|
||||
-- ##################################################
|
||||
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
console.log('Testing with fromPostgresImproved...');
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log(
|
||||
'Enums found:',
|
||||
result.enums?.map((e) => e.name)
|
||||
);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Specifically check for quest_status
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.name).toBe('quest_status');
|
||||
expect(questStatus!.values).toEqual([
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with fromPostgres main entry point for magical quest and spell enums', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,142 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Real-world PostgreSQL import examples', () => {
|
||||
it('should successfully parse a complex real-world schema with enums', async () => {
|
||||
// This example demonstrates how the parser handles real-world PostgreSQL exports
|
||||
// that may contain schema-qualified identifiers and syntax variations
|
||||
const sql = `
|
||||
-- Example of a real PostgreSQL database export with schema-qualified types
|
||||
CREATE TYPE "public"."mage_rank" AS ENUM('novice', 'apprentice', 'journeyman', 'expert', 'master', 'archmage');
|
||||
CREATE TYPE "public"."spell_category" AS ENUM('combat', 'healing', 'utility', 'summoning', 'enchantment');
|
||||
CREATE TYPE "public"."artifact_quality" AS ENUM('crude', 'common', 'fine', 'exceptional', 'masterwork', 'legendary');
|
||||
|
||||
-- Tables with proper spacing in column definitions
|
||||
CREATE TABLE "mages" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"email" text NOT NULL,
|
||||
"rank" "mage_rank" DEFAULT 'novice' NOT NULL,
|
||||
"specialization" "spell_category",
|
||||
"created_at" timestamp with time zone NOT NULL,
|
||||
"updated_at" timestamp with time zone NOT NULL,
|
||||
CONSTRAINT "mages_email_unique" UNIQUE("email")
|
||||
);
|
||||
|
||||
-- Example of a table with missing spaces (common in some exports)
|
||||
CREATE TABLE "grimoires" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"mage_id" text NOT NULL,
|
||||
"title" varchar(255) NOT NULL,
|
||||
"category""spell_category" NOT NULL,
|
||||
"quality""artifact_quality" DEFAULT 'common' NOT NULL,
|
||||
"pages" integer DEFAULT 100 NOT NULL,
|
||||
"created_at" timestamp DEFAULT now()
|
||||
);
|
||||
|
||||
-- Table with JSON syntax issues (: :jsonb instead of ::jsonb)
|
||||
CREATE TABLE "spell_components" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"spell_id" uuid NOT NULL,
|
||||
"component_name" text NOT NULL,
|
||||
"quantity" integer DEFAULT 1,
|
||||
"properties" jsonb DEFAULT '{}': :jsonb,
|
||||
"created_at" timestamp DEFAULT now()
|
||||
);
|
||||
|
||||
-- Foreign key constraints using schema-qualified references
|
||||
ALTER TABLE "grimoires" ADD CONSTRAINT "grimoires_mage_id_mages_id_fk"
|
||||
FOREIGN KEY ("mage_id") REFERENCES "public"."mages"("id") ON DELETE cascade;
|
||||
|
||||
-- Indexes
|
||||
CREATE UNIQUE INDEX "mages_rank_email_idx" ON "mages" ("rank", "email");
|
||||
CREATE INDEX "grimoires_category_idx" ON "grimoires" ("category");
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Verify enum parsing
|
||||
console.log('\n=== IMPORT RESULTS ===');
|
||||
console.log(`Enums parsed: ${result.enums?.length || 0}`);
|
||||
console.log(`Tables parsed: ${result.tables.length}`);
|
||||
console.log(`Relationships found: ${result.relationships.length}`);
|
||||
console.log(`Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// All enums should be parsed despite schema qualification
|
||||
expect(result.enums).toHaveLength(3);
|
||||
expect(result.enums?.map((e) => e.name).sort()).toEqual([
|
||||
'artifact_quality',
|
||||
'mage_rank',
|
||||
'spell_category',
|
||||
]);
|
||||
|
||||
// All tables should be parsed, even with syntax issues
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'grimoires',
|
||||
'mages',
|
||||
'spell_components',
|
||||
]);
|
||||
|
||||
// Foreign keys should be recognized
|
||||
expect(result.relationships.length).toBeGreaterThan(0);
|
||||
const fk = result.relationships.find(
|
||||
(r) => r.sourceTable === 'grimoires' && r.targetTable === 'mages'
|
||||
);
|
||||
expect(fk).toBeDefined();
|
||||
|
||||
// Note: Index parsing may not be fully implemented in the current parser
|
||||
// This is acceptable as the main focus is on tables, enums, and relationships
|
||||
|
||||
// Check specific enum values
|
||||
const mageRank = result.enums?.find((e) => e.name === 'mage_rank');
|
||||
expect(mageRank?.values).toEqual([
|
||||
'novice',
|
||||
'apprentice',
|
||||
'journeyman',
|
||||
'expert',
|
||||
'master',
|
||||
'archmage',
|
||||
]);
|
||||
|
||||
// Log warnings for visibility
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\n=== WARNINGS ===');
|
||||
result.warnings.forEach((w) => console.log(`- ${w}`));
|
||||
}
|
||||
});
|
||||
|
||||
it('should provide actionable feedback for common syntax issues', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."potion_effect" AS ENUM('healing', 'mana', 'strength', 'speed');
|
||||
|
||||
CREATE TABLE "potions" (
|
||||
"id" uuid PRIMARY KEY,
|
||||
"name" text NOT NULL,
|
||||
"effect""potion_effect" NOT NULL,
|
||||
"duration" interval DEFAULT '30 minutes': :interval,
|
||||
"power" integer DEFAULT 50
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Enum should still be parsed
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('potion_effect');
|
||||
|
||||
// Table should be parsed despite issues
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('potions');
|
||||
|
||||
// Should have warnings about parsing issues
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// The warning should indicate which statement failed
|
||||
const hasParseWarning = result.warnings!.some(
|
||||
(w) =>
|
||||
w.includes('Failed to parse statement') && w.includes('potions')
|
||||
);
|
||||
expect(hasParseWarning).toBe(true);
|
||||
});
|
||||
});
|
@@ -0,0 +1,71 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Schema-qualified enum parsing', () => {
|
||||
it('should parse enums with schema prefix', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."wizard_rank" AS ENUM('apprentice', 'journeyman', 'master', 'grandmaster');
|
||||
CREATE TYPE "public"."spell_school" AS ENUM('fire', 'water', 'earth', 'air', 'spirit');
|
||||
|
||||
CREATE TABLE "wizards" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"rank" "wizard_rank" DEFAULT 'apprentice' NOT NULL,
|
||||
"primary_school" "spell_school" NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Enums found:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.join(', ')}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should find both enums
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const wizardRank = result.enums?.find((e) => e.name === 'wizard_rank');
|
||||
expect(wizardRank).toBeDefined();
|
||||
expect(wizardRank?.values).toEqual([
|
||||
'apprentice',
|
||||
'journeyman',
|
||||
'master',
|
||||
'grandmaster',
|
||||
]);
|
||||
|
||||
const spellSchool = result.enums?.find(
|
||||
(e) => e.name === 'spell_school'
|
||||
);
|
||||
expect(spellSchool).toBeDefined();
|
||||
expect(spellSchool?.values).toEqual([
|
||||
'fire',
|
||||
'water',
|
||||
'earth',
|
||||
'air',
|
||||
'spirit',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle missing spaces between column name and type', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."dragon_type" AS ENUM('fire', 'ice', 'storm', 'earth');
|
||||
|
||||
CREATE TABLE "dragons" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type""dragon_type" DEFAULT 'fire' NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should still parse the enum
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('dragon_type');
|
||||
|
||||
// Table parsing might fail due to syntax error
|
||||
console.log('Tables found:', result.tables.length);
|
||||
console.log('Warnings:', result.warnings);
|
||||
});
|
||||
});
|
@@ -0,0 +1,60 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Simple Enum Test', () => {
|
||||
it('should parse 5 simple enum types', async () => {
|
||||
// Test with just the enum definitions
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Result enums:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should parse enums one by one', async () => {
|
||||
const enums = [
|
||||
{
|
||||
sql: "CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');",
|
||||
name: 'quest_status',
|
||||
values: [
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
],
|
||||
},
|
||||
{
|
||||
sql: "CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');",
|
||||
name: 'spell_frequency',
|
||||
values: ['daily', 'weekly'],
|
||||
},
|
||||
];
|
||||
|
||||
for (const enumDef of enums) {
|
||||
const result = await fromPostgresImproved(enumDef.sql);
|
||||
|
||||
console.log(`\nTesting ${enumDef.name}:`);
|
||||
console.log(` Found enums: ${result.enums?.length || 0}`);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe(enumDef.name);
|
||||
expect(result.enums![0].values).toEqual(enumDef.values);
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,110 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Junction Table Parsing', () => {
|
||||
it('should parse junction table with composite primary key', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_books (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for tracking which spells are contained in which books.
|
||||
CREATE TABLE book_spells (
|
||||
spell_book_id UUID NOT NULL REFERENCES spell_books(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_book_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
// Should parse all 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['book_spells', 'spell_books', 'spells']);
|
||||
|
||||
// Check book_spells specifically
|
||||
const bookSpells = result.tables.find((t) => t.name === 'book_spells');
|
||||
expect(bookSpells).toBeDefined();
|
||||
expect(bookSpells!.columns).toHaveLength(2);
|
||||
|
||||
const columnNames = bookSpells!.columns.map((c) => c.name).sort();
|
||||
expect(columnNames).toEqual(['spell_book_id', 'spell_id']);
|
||||
|
||||
// Check that both columns are recognized as foreign keys
|
||||
const spellBookIdColumn = bookSpells!.columns.find(
|
||||
(c) => c.name === 'spell_book_id'
|
||||
);
|
||||
expect(spellBookIdColumn).toBeDefined();
|
||||
expect(spellBookIdColumn!.type).toBe('UUID');
|
||||
expect(spellBookIdColumn!.nullable).toBe(false);
|
||||
|
||||
const spellIdColumn = bookSpells!.columns.find(
|
||||
(c) => c.name === 'spell_id'
|
||||
);
|
||||
expect(spellIdColumn).toBeDefined();
|
||||
expect(spellIdColumn!.type).toBe('UUID');
|
||||
expect(spellIdColumn!.nullable).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle various junction table formats', async () => {
|
||||
const sql = `
|
||||
-- Format 1: Inline references
|
||||
CREATE TABLE artifact_enchantments (
|
||||
artifact_id INTEGER NOT NULL REFERENCES artifacts(id),
|
||||
enchantment_id INTEGER NOT NULL REFERENCES enchantments(id),
|
||||
PRIMARY KEY (artifact_id, enchantment_id)
|
||||
);
|
||||
|
||||
-- Format 2: With additional columns
|
||||
CREATE TABLE wizard_guilds (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id),
|
||||
guild_id UUID NOT NULL REFERENCES guilds(id),
|
||||
joined_at TIMESTAMP DEFAULT NOW(),
|
||||
recruited_by UUID REFERENCES wizards(id),
|
||||
PRIMARY KEY (wizard_id, guild_id)
|
||||
);
|
||||
|
||||
-- Format 3: With named constraint
|
||||
CREATE TABLE potion_ingredients (
|
||||
potion_id BIGINT NOT NULL REFERENCES potions(id) ON DELETE CASCADE,
|
||||
ingredient_id BIGINT NOT NULL REFERENCES ingredients(id) ON DELETE CASCADE,
|
||||
quantity INTEGER DEFAULT 1,
|
||||
CONSTRAINT pk_potion_ingredients PRIMARY KEY (potion_id, ingredient_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// All tables should be found
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifact_enchantments',
|
||||
'potion_ingredients',
|
||||
'wizard_guilds',
|
||||
]);
|
||||
|
||||
// Check each table has the expected columns
|
||||
const artifactEnchantments = result.tables.find(
|
||||
(t) => t.name === 'artifact_enchantments'
|
||||
);
|
||||
expect(artifactEnchantments!.columns).toHaveLength(2);
|
||||
|
||||
const wizardGuilds = result.tables.find(
|
||||
(t) => t.name === 'wizard_guilds'
|
||||
);
|
||||
expect(wizardGuilds!.columns).toHaveLength(4); // Including joined_at and recruited_by
|
||||
|
||||
const potionIngredients = result.tables.find(
|
||||
(t) => t.name === 'potion_ingredients'
|
||||
);
|
||||
expect(potionIngredients!.columns).toHaveLength(3); // Including quantity
|
||||
});
|
||||
});
|
@@ -0,0 +1,75 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Exact forth example reproduction - Spell Plans Database', () => {
|
||||
it('should parse the exact SQL from forth example with spell plans and magical components', async () => {
|
||||
// Exact copy of the SQL that's failing
|
||||
const sql = `-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
|
||||
CREATE TABLE spell_plans (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
duration_days INTEGER NOT NULL,
|
||||
total_skips INTEGER NOT NULL,
|
||||
validity_days INTEGER NOT NULL,
|
||||
mana_cost INTEGER NOT NULL,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_tower_id UUID NOT NULL REFERENCES wizard_towers(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT, -- Overall description of the spell, e.g.,"Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL, -- combat, healing
|
||||
-- Structured breakdown of the spell's components.
|
||||
-- Example: [{"name": "Dragon Scale", "category": "Reagent"}, {"name": "Phoenix Feather", "category": "Catalyst"} ]
|
||||
components JSONB,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Junction table for showing sample spells on a plan's grimoire page.
|
||||
CREATE TABLE plan_sample_spells (
|
||||
spell_plan_id UUID NOT NULL REFERENCES spell_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
console.log('Testing exact SQL from forth example...');
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Results:', {
|
||||
tables: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings?.length || 0,
|
||||
});
|
||||
|
||||
// Should have 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// Check all table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'plan_sample_spells',
|
||||
'spell_plans',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Verify plan_sample_spells exists
|
||||
const planSampleSpells = result.tables.find(
|
||||
(t) => t.name === 'plan_sample_spells'
|
||||
);
|
||||
expect(planSampleSpells).toBeDefined();
|
||||
expect(planSampleSpells!.columns).toHaveLength(2);
|
||||
});
|
||||
});
|
@@ -0,0 +1,142 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { importPostgreSQLWithValidation } from '../../../import-with-validation';
|
||||
|
||||
describe('PostgreSQL Import - Split DECIMAL Handling', () => {
|
||||
it('should successfully import tables with split DECIMAL declarations using auto-fix', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE financial_records (
|
||||
id SERIAL PRIMARY KEY,
|
||||
account_balance DECIMAL(15,
|
||||
2) NOT NULL,
|
||||
interest_rate NUMERIC(5,
|
||||
4) DEFAULT 0.0000,
|
||||
transaction_fee DECIMAL(10,
|
||||
2) DEFAULT 0.00
|
||||
);
|
||||
|
||||
CREATE TABLE market_data (
|
||||
id INTEGER PRIMARY KEY,
|
||||
price DECIMAL(18,
|
||||
8) NOT NULL,
|
||||
volume NUMERIC(20,
|
||||
0) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await importPostgreSQLWithValidation(sql);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data?.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const financialTable = result.data?.tables.find(
|
||||
(t) => t.name === 'financial_records'
|
||||
);
|
||||
expect(financialTable).toBeDefined();
|
||||
expect(financialTable?.columns).toHaveLength(4);
|
||||
|
||||
// Check that DECIMAL columns were parsed correctly
|
||||
const balanceColumn = financialTable?.columns.find(
|
||||
(c) => c.name === 'account_balance'
|
||||
);
|
||||
expect(balanceColumn?.type).toMatch(/DECIMAL|NUMERIC/i);
|
||||
|
||||
const interestColumn = financialTable?.columns.find(
|
||||
(c) => c.name === 'interest_rate'
|
||||
);
|
||||
expect(interestColumn?.type).toMatch(/DECIMAL|NUMERIC/i);
|
||||
|
||||
// Check second table
|
||||
const marketTable = result.data?.tables.find(
|
||||
(t) => t.name === 'market_data'
|
||||
);
|
||||
expect(marketTable).toBeDefined();
|
||||
expect(marketTable?.columns).toHaveLength(3);
|
||||
|
||||
// Verify warnings about auto-fix
|
||||
expect(result.data?.warnings).toBeDefined();
|
||||
expect(
|
||||
result.data?.warnings?.some((w) =>
|
||||
w.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle complex SQL with multiple issues including split DECIMAL', async () => {
|
||||
const sql = `
|
||||
-- Financial system with various data types
|
||||
CREATE TABLE accounts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
balance DECIMAL(20,
|
||||
2) NOT NULL DEFAULT 0.00,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Query with cast operator issues
|
||||
SELECT
|
||||
id: :text AS account_id,
|
||||
balance: :DECIMAL(10,
|
||||
2) AS rounded_balance
|
||||
FROM accounts;
|
||||
|
||||
CREATE TABLE transactions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
account_id UUID REFERENCES accounts(id),
|
||||
amount DECIMAL(15,
|
||||
2) NOT NULL,
|
||||
fee NUMERIC(10,
|
||||
4) DEFAULT 0.0000
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await importPostgreSQLWithValidation(sql);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data?.tables).toHaveLength(2);
|
||||
|
||||
// Verify both types of fixes were applied
|
||||
expect(result.data?.warnings).toBeDefined();
|
||||
expect(
|
||||
result.data?.warnings?.some((w) =>
|
||||
w.includes('Auto-fixed cast operator')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.data?.warnings?.some((w) =>
|
||||
w.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check foreign key relationship was preserved
|
||||
expect(result.data?.relationships).toHaveLength(1);
|
||||
const fk = result.data?.relationships[0];
|
||||
expect(fk?.sourceTable).toBe('transactions');
|
||||
expect(fk?.targetTable).toBe('accounts');
|
||||
});
|
||||
|
||||
it('should fallback to regex extraction for tables with split DECIMAL that cause parser errors', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE complex_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
-- This might cause parser issues
|
||||
weird_decimal DECIMAL(10,
|
||||
2) ARRAY NOT NULL,
|
||||
normal_column VARCHAR(100),
|
||||
another_decimal NUMERIC(5,
|
||||
3) CHECK (another_decimal > 0)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await importPostgreSQLWithValidation(sql);
|
||||
|
||||
// Even if parser fails, should still import with regex fallback
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.tables).toHaveLength(1);
|
||||
|
||||
const table = result.data?.tables[0];
|
||||
expect(table?.name).toBe('complex_table');
|
||||
expect(table?.columns.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
});
|
@@ -0,0 +1,48 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('String preservation during comment removal', () => {
|
||||
it('should preserve strings containing -- pattern', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_ingredients (
|
||||
ingredient_id INTEGER PRIMARY KEY,
|
||||
preparation_note VARCHAR(100) DEFAULT '--grind finely'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('String preservation result:', {
|
||||
tableCount: result.tables.length,
|
||||
columns: result.tables[0]?.columns.map((c) => ({
|
||||
name: c.name,
|
||||
type: c.type,
|
||||
default: c.default,
|
||||
})),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
|
||||
const noteCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'preparation_note'
|
||||
);
|
||||
expect(noteCol).toBeDefined();
|
||||
expect(noteCol?.default).toBeDefined();
|
||||
});
|
||||
|
||||
it('should preserve URL strings with double slashes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE artifact_sources (
|
||||
artifact_id INTEGER,
|
||||
origin_url VARCHAR(200) DEFAULT 'https://ancient-library.realm'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
const urlCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'origin_url'
|
||||
);
|
||||
expect(urlCol).toBeDefined();
|
||||
});
|
||||
});
|
@@ -0,0 +1,65 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Tables with undefined magical references', () => {
|
||||
it('should parse tables even with references to non-existent magical entities', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE table1 (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE table2 (
|
||||
id UUID PRIMARY KEY,
|
||||
nonexistent_id UUID REFERENCES nonexistent_table(id)
|
||||
);
|
||||
|
||||
CREATE TABLE table3 (
|
||||
table1_id UUID REFERENCES table1(id),
|
||||
table2_id UUID REFERENCES table2(id),
|
||||
PRIMARY KEY (table1_id, table2_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('Test results:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
// Should parse all 3 tables even though table2 has undefined reference
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['table1', 'table2', 'table3']);
|
||||
});
|
||||
|
||||
it('should handle the wizard tower spells and spell plans scenario', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_plans (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_tower_id UUID NOT NULL REFERENCES wizard_towers(id),
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table
|
||||
CREATE TABLE plan_sample_spells (
|
||||
spell_plan_id UUID NOT NULL REFERENCES spell_plans(id),
|
||||
spell_id UUID NOT NULL REFERENCES spells(id),
|
||||
PRIMARY KEY (spell_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'plan_sample_spells',
|
||||
'spell_plans',
|
||||
'spells',
|
||||
]);
|
||||
});
|
||||
});
|
@@ -0,0 +1,131 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Enum Parsing Test - Quest Management System', () => {
|
||||
it('should parse all 5 enums from the quest management database', async () => {
|
||||
const sql = `-- Quest Management System with Enums
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL
|
||||
);`;
|
||||
|
||||
// Use the main entry point
|
||||
const parserResult = await fromPostgres(sql);
|
||||
|
||||
console.log('\nParser Result:');
|
||||
console.log('- Enums found:', parserResult.enums?.length || 0);
|
||||
if (parserResult.enums) {
|
||||
parserResult.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
console.log('\nDiagram Result:');
|
||||
console.log('- Custom types:', diagram.customTypes?.length || 0);
|
||||
if (diagram.customTypes) {
|
||||
diagram.customTypes.forEach((t) => {
|
||||
console.log(` - ${t.name} (${t.kind})`);
|
||||
});
|
||||
}
|
||||
|
||||
// Check contracts table
|
||||
const contractsTable = diagram.tables?.find(
|
||||
(t) => t.name === 'contracts'
|
||||
);
|
||||
if (contractsTable) {
|
||||
console.log('\nContracts table enum fields:');
|
||||
const enumFields = ['status'];
|
||||
enumFields.forEach((fieldName) => {
|
||||
const field = contractsTable.fields.find(
|
||||
(f) => f.name === fieldName
|
||||
);
|
||||
if (field) {
|
||||
console.log(
|
||||
` - ${field.name}: ${field.type.name} (id: ${field.type.id})`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Assertions
|
||||
expect(parserResult.enums).toHaveLength(5);
|
||||
expect(diagram.customTypes).toHaveLength(5);
|
||||
|
||||
// Check quest_status specifically
|
||||
const questStatusParser = parserResult.enums?.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatusParser).toBeDefined();
|
||||
|
||||
const questStatusDiagram = diagram.customTypes?.find(
|
||||
(t) => t.name === 'quest_status'
|
||||
);
|
||||
expect(questStatusDiagram).toBeDefined();
|
||||
|
||||
// Check that status field uses the enum
|
||||
const questsTable = diagram.tables?.find((t) => t.name === 'quests');
|
||||
if (questsTable) {
|
||||
const statusField = questsTable.fields.find(
|
||||
(f) => f.name === 'status'
|
||||
);
|
||||
expect(statusField?.type.name).toBe('quest_status');
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,259 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
|
||||
describe('Full database import - 20 tables verification', () => {
|
||||
it('should parse all 20 tables from quest management system', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
// Expected tables for the quest management system
|
||||
const expectedTables = [
|
||||
'adventurers',
|
||||
'guild_masters',
|
||||
'regions',
|
||||
'outposts',
|
||||
'scouts',
|
||||
'scout_region_assignments',
|
||||
'quest_givers',
|
||||
'quest_templates',
|
||||
'quests',
|
||||
'rewards',
|
||||
'quest_sample_rewards', // Junction table that must be included!
|
||||
'quest_rotations',
|
||||
'rotation_quests',
|
||||
'contracts',
|
||||
'completion_events',
|
||||
'bounties',
|
||||
'guild_ledgers',
|
||||
'reputation_logs',
|
||||
'quest_suspensions',
|
||||
'guild_master_actions',
|
||||
];
|
||||
|
||||
const result = await fromPostgresImproved(sql);
|
||||
|
||||
console.log('\n=== PARSING RESULTS ===');
|
||||
console.log(`Tables parsed: ${result.tables.length}`);
|
||||
console.log(`Expected: ${expectedTables.length}`);
|
||||
|
||||
const parsedTableNames = result.tables.map((t) => t.name).sort();
|
||||
console.log('\nParsed tables:');
|
||||
parsedTableNames.forEach((name, i) => {
|
||||
console.log(` ${i + 1}. ${name}`);
|
||||
});
|
||||
|
||||
// Find missing tables
|
||||
const missingTables = expectedTables.filter(
|
||||
(expected) => !parsedTableNames.includes(expected)
|
||||
);
|
||||
if (missingTables.length > 0) {
|
||||
console.log('\nMissing tables:');
|
||||
missingTables.forEach((name) => {
|
||||
console.log(` - ${name}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Check for quest_sample_rewards specifically
|
||||
const questSampleRewards = result.tables.find(
|
||||
(t) => t.name === 'quest_sample_rewards'
|
||||
);
|
||||
console.log(`\nquest_sample_rewards found: ${!!questSampleRewards}`);
|
||||
if (questSampleRewards) {
|
||||
console.log('quest_sample_rewards details:');
|
||||
console.log(` - Columns: ${questSampleRewards.columns.length}`);
|
||||
questSampleRewards.columns.forEach((col) => {
|
||||
console.log(` - ${col.name}: ${col.type}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify all tables were parsed
|
||||
expect(result.tables).toHaveLength(expectedTables.length);
|
||||
expect(parsedTableNames).toEqual(expectedTables.sort());
|
||||
|
||||
// Specifically check quest_sample_rewards junction table
|
||||
expect(questSampleRewards).toBeDefined();
|
||||
expect(questSampleRewards!.columns).toHaveLength(2);
|
||||
|
||||
const columnNames = questSampleRewards!.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
expect(columnNames).toEqual(['quest_template_id', 'reward_id']);
|
||||
|
||||
// Check warnings if any
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\nWarnings:');
|
||||
result.warnings.forEach((w) => console.log(` - ${w}`));
|
||||
}
|
||||
});
|
||||
});
|
@@ -0,0 +1,164 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgresImproved } from '../postgresql-improved';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
|
||||
describe('PostgreSQL Enum Type Conversion to Diagram', () => {
|
||||
it('should convert enum types to custom types in diagram', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'master', 'archmage');
|
||||
CREATE TYPE spell_element AS ENUM ('fire', 'water', 'both');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
email VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
rank wizard_rank DEFAULT 'apprentice',
|
||||
primary_element spell_element NOT NULL
|
||||
);`;
|
||||
|
||||
// Parse SQL
|
||||
const parserResult = await fromPostgresImproved(sql);
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Check that custom types were created in the diagram
|
||||
expect(diagram.customTypes).toBeDefined();
|
||||
expect(diagram.customTypes).toHaveLength(2);
|
||||
|
||||
// Check first custom type
|
||||
const wizardRankType = diagram.customTypes!.find(
|
||||
(t) => t.name === 'wizard_rank'
|
||||
);
|
||||
expect(wizardRankType).toBeDefined();
|
||||
expect(wizardRankType!.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(wizardRankType!.values).toEqual([
|
||||
'apprentice',
|
||||
'master',
|
||||
'archmage',
|
||||
]);
|
||||
expect(wizardRankType!.schema).toBe('public');
|
||||
|
||||
// Check second custom type
|
||||
const spellElementType = diagram.customTypes!.find(
|
||||
(t) => t.name === 'spell_element'
|
||||
);
|
||||
expect(spellElementType).toBeDefined();
|
||||
expect(spellElementType!.kind).toBe(DBCustomTypeKind.enum);
|
||||
expect(spellElementType!.values).toEqual(['fire', 'water', 'both']);
|
||||
|
||||
// Check that tables use the enum types
|
||||
const spellbooksTable = diagram.tables!.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
expect(spellbooksTable).toBeDefined();
|
||||
|
||||
// Find columns that use enum types
|
||||
const rankField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
expect(rankField).toBeDefined();
|
||||
// The type should be preserved as the enum name
|
||||
expect(rankField!.type.name.toLowerCase()).toBe('wizard_rank');
|
||||
|
||||
const elementField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'primary_element'
|
||||
);
|
||||
expect(elementField).toBeDefined();
|
||||
expect(elementField!.type.name.toLowerCase()).toBe('spell_element');
|
||||
});
|
||||
|
||||
it('should handle fantasy realm SQL with all enum types', async () => {
|
||||
// Fantasy realm example with all enum types
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'journeyman', 'master', 'archmage', 'legendary');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_element AS ENUM ('fire', 'water', 'earth');
|
||||
CREATE TYPE quest_status AS ENUM ('pending', 'active', 'completed', 'failed', 'abandoned');
|
||||
CREATE TYPE dragon_mood AS ENUM ('happy', 'content', 'grumpy');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
magic_id VARCHAR(15) UNIQUE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id),
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
primary_element magic_element NOT NULL,
|
||||
owner_rank wizard_rank DEFAULT 'apprentice'
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
spellbook_id UUID NOT NULL REFERENCES spellbooks(id),
|
||||
status quest_status DEFAULT 'pending'
|
||||
);
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id),
|
||||
mood dragon_mood NOT NULL
|
||||
);`;
|
||||
|
||||
const parserResult = await fromPostgresImproved(sql);
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Should have all 5 enum types
|
||||
expect(diagram.customTypes).toBeDefined();
|
||||
expect(diagram.customTypes).toHaveLength(5);
|
||||
|
||||
// Check all enum types are present
|
||||
const enumNames = diagram.customTypes!.map((t) => t.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'dragon_mood',
|
||||
'magic_element',
|
||||
'quest_status',
|
||||
'spell_frequency',
|
||||
'wizard_rank',
|
||||
]);
|
||||
|
||||
// Verify each enum has the correct values
|
||||
const spellFreq = diagram.customTypes!.find(
|
||||
(t) => t.name === 'spell_frequency'
|
||||
);
|
||||
expect(spellFreq!.values).toEqual(['daily', 'weekly']);
|
||||
|
||||
const questStatus = diagram.customTypes!.find(
|
||||
(t) => t.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus!.values).toEqual([
|
||||
'pending',
|
||||
'active',
|
||||
'completed',
|
||||
'failed',
|
||||
'abandoned',
|
||||
]);
|
||||
|
||||
// Check that tables reference the enum types correctly
|
||||
const spellbooksTable = diagram.tables!.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
const castFreqField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'cast_frequency'
|
||||
);
|
||||
expect(castFreqField!.type.name.toLowerCase()).toBe('spell_frequency');
|
||||
});
|
||||
});
|
@@ -146,13 +146,42 @@ function processForeignKeyConstraint(
|
||||
|
||||
// Look up table IDs
|
||||
const sourceTableKey = `${sourceSchema ? sourceSchema + '.' : ''}${sourceTable}`;
|
||||
const sourceTableId = tableMap[sourceTableKey];
|
||||
let sourceTableId = tableMap[sourceTableKey];
|
||||
|
||||
const targetTableKey = `${targetSchema ? targetSchema + '.' : ''}${targetTable}`;
|
||||
const targetTableId = tableMap[targetTableKey];
|
||||
let targetTableId = tableMap[targetTableKey];
|
||||
|
||||
if (!sourceTableId || !targetTableId) {
|
||||
return;
|
||||
// Try without schema if not found
|
||||
if (!sourceTableId && sourceSchema) {
|
||||
sourceTableId = tableMap[sourceTable];
|
||||
}
|
||||
if (!targetTableId && targetSchema) {
|
||||
targetTableId = tableMap[targetTable];
|
||||
}
|
||||
|
||||
// If still not found, try with 'public' schema
|
||||
if (!sourceTableId && !sourceSchema) {
|
||||
sourceTableId = tableMap[`public.${sourceTable}`];
|
||||
}
|
||||
if (!targetTableId && !targetSchema) {
|
||||
targetTableId = tableMap[`public.${targetTable}`];
|
||||
}
|
||||
|
||||
// If we still can't find them, log and return
|
||||
if (!sourceTableId || !targetTableId) {
|
||||
if (!sourceTableId) {
|
||||
console.warn(
|
||||
`No table ID found for source table: ${sourceTable} (tried: ${sourceTableKey}, ${sourceTable}, public.${sourceTable})`
|
||||
);
|
||||
}
|
||||
if (!targetTableId) {
|
||||
console.warn(
|
||||
`No table ID found for target table: ${targetTable} (tried: ${targetTableKey}, ${targetTable}, public.${targetTable})`
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Create relationships for each column pair
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -25,6 +25,7 @@ import {
|
||||
findTableWithSchemaSupport,
|
||||
getTableIdWithSchemaSupport,
|
||||
} from './postgresql-common';
|
||||
import { fromPostgresImproved } from './postgresql-improved';
|
||||
|
||||
/**
|
||||
* Uses regular expressions to find foreign key relationships in PostgreSQL SQL content.
|
||||
@@ -241,6 +242,36 @@ function getDefaultValueString(
|
||||
// PostgreSQL-specific parsing logic
|
||||
export async function fromPostgres(
|
||||
sqlContent: string
|
||||
): Promise<SQLParserResult> {
|
||||
// Check if the SQL contains unsupported statements
|
||||
const upperSQL = sqlContent.toUpperCase();
|
||||
const hasUnsupportedStatements =
|
||||
upperSQL.includes('CREATE FUNCTION') ||
|
||||
upperSQL.includes('CREATE OR REPLACE FUNCTION') ||
|
||||
upperSQL.includes('CREATE POLICY') ||
|
||||
upperSQL.includes('CREATE TRIGGER') ||
|
||||
upperSQL.includes('ENABLE ROW LEVEL SECURITY') ||
|
||||
upperSQL.includes('CREATE EXTENSION') ||
|
||||
upperSQL.includes('CREATE TYPE');
|
||||
|
||||
// If SQL contains unsupported statements, use the improved parser
|
||||
if (hasUnsupportedStatements) {
|
||||
const result = await fromPostgresImproved(sqlContent);
|
||||
// Return without warnings for backward compatibility
|
||||
return {
|
||||
tables: result.tables,
|
||||
relationships: result.relationships,
|
||||
enums: result.enums,
|
||||
};
|
||||
}
|
||||
|
||||
// Otherwise, use the original parser for backward compatibility
|
||||
return fromPostgresOriginal(sqlContent);
|
||||
}
|
||||
|
||||
// Original PostgreSQL parsing logic (renamed)
|
||||
async function fromPostgresOriginal(
|
||||
sqlContent: string
|
||||
): Promise<SQLParserResult> {
|
||||
const tables: SQLTable[] = [];
|
||||
const relationships: SQLForeignKey[] = [];
|
||||
|
@@ -0,0 +1,150 @@
|
||||
export interface SQLiteValidationResult {
|
||||
isValid: boolean;
|
||||
errors: SQLiteValidationError[];
|
||||
warnings: SQLiteValidationWarning[];
|
||||
}
|
||||
|
||||
export interface SQLiteValidationError {
|
||||
line?: number;
|
||||
column?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
export interface SQLiteValidationWarning {
|
||||
line?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
}
|
||||
|
||||
export function validateSQLiteSyntax(sql: string): SQLiteValidationResult {
|
||||
const errors: SQLiteValidationError[] = [];
|
||||
const warnings: SQLiteValidationWarning[] = [];
|
||||
|
||||
const lines = sql.split('\n');
|
||||
|
||||
// Check for common SQLite syntax issues
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const lineNum = i + 1;
|
||||
|
||||
// 1. Check for square brackets (SQL Server style)
|
||||
if (/\[[^\]]+\]/.test(line) && !line.includes('--')) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'SQLite supports square brackets but double quotes are preferred for identifiers',
|
||||
code: 'SQUARE_BRACKETS',
|
||||
});
|
||||
}
|
||||
|
||||
// 2. Check for unsupported data types
|
||||
const unsupportedTypes = [
|
||||
{ type: 'DATETIME2', suggestion: 'Use DATETIME or TEXT' },
|
||||
{ type: 'NVARCHAR', suggestion: 'Use TEXT' },
|
||||
{ type: 'MONEY', suggestion: 'Use REAL or NUMERIC' },
|
||||
{ type: 'UNIQUEIDENTIFIER', suggestion: 'Use TEXT' },
|
||||
{ type: 'XML', suggestion: 'Use TEXT' },
|
||||
{ type: 'GEOGRAPHY', suggestion: 'Use TEXT or BLOB' },
|
||||
{ type: 'GEOMETRY', suggestion: 'Use TEXT or BLOB' },
|
||||
];
|
||||
|
||||
for (const { type, suggestion } of unsupportedTypes) {
|
||||
const regex = new RegExp(`\\b${type}\\b`, 'i');
|
||||
if (regex.test(line)) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message: `SQLite does not support ${type} data type`,
|
||||
code: `UNSUPPORTED_TYPE_${type}`,
|
||||
suggestion: suggestion,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Check for CASCADE DELETE/UPDATE (limited support)
|
||||
if (/ON\s+(DELETE|UPDATE)\s+CASCADE/i.test(line)) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'CASCADE actions require foreign keys to be enabled in SQLite (PRAGMA foreign_keys = ON)',
|
||||
code: 'CASCADE_REQUIRES_FK',
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Check for multiple primary keys in CREATE TABLE
|
||||
if (/PRIMARY\s+KEY/i.test(line)) {
|
||||
// Check if this is a column-level primary key
|
||||
const beforePK = line.substring(0, line.search(/PRIMARY\s+KEY/i));
|
||||
if (beforePK.trim() && !beforePK.includes('CONSTRAINT')) {
|
||||
// This is likely a column-level PRIMARY KEY
|
||||
// Check if there's already been a PRIMARY KEY in this table
|
||||
let tableStartLine = i;
|
||||
for (let j = i - 1; j >= 0; j--) {
|
||||
if (/CREATE\s+TABLE/i.test(lines[j])) {
|
||||
tableStartLine = j;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Count PRIMARY KEY occurrences in this table
|
||||
let pkCount = 0;
|
||||
for (let j = tableStartLine; j <= i; j++) {
|
||||
if (/PRIMARY\s+KEY/i.test(lines[j])) {
|
||||
pkCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (pkCount > 1) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'Multiple PRIMARY KEY definitions found. Consider using a composite primary key.',
|
||||
code: 'MULTIPLE_PRIMARY_KEYS',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Check for WITH clause (not fully supported)
|
||||
if (/\bWITH\s+\(/i.test(line) && /CREATE\s+TABLE/i.test(line)) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'WITH clause in CREATE TABLE has limited support in SQLite',
|
||||
code: 'LIMITED_WITH_SUPPORT',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unsupported SQLite features in DDL import
|
||||
const unsupportedFeatures = [
|
||||
{ pattern: /CREATE\s+PROCEDURE/i, feature: 'Stored Procedures' },
|
||||
{ pattern: /CREATE\s+FUNCTION/i, feature: 'User-defined Functions' },
|
||||
{ pattern: /DECLARE\s+@/i, feature: 'Variables' },
|
||||
{ pattern: /CREATE\s+VIEW/i, feature: 'Views' },
|
||||
];
|
||||
|
||||
for (const { pattern, feature } of unsupportedFeatures) {
|
||||
if (pattern.test(sql)) {
|
||||
warnings.push({
|
||||
message: `${feature} are not supported and will be ignored during import`,
|
||||
code: `UNSUPPORTED_${feature.toUpperCase().replace(' ', '_')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// SQLite-specific warnings
|
||||
if (/ALTER\s+TABLE.*DROP\s+COLUMN/i.test(sql)) {
|
||||
warnings.push({
|
||||
message: 'ALTER TABLE DROP COLUMN requires SQLite 3.35.0 or later',
|
||||
code: 'DROP_COLUMN_VERSION',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
@@ -0,0 +1,113 @@
|
||||
export interface SQLServerValidationResult {
|
||||
isValid: boolean;
|
||||
errors: SQLServerValidationError[];
|
||||
warnings: SQLServerValidationWarning[];
|
||||
}
|
||||
|
||||
export interface SQLServerValidationError {
|
||||
line?: number;
|
||||
column?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
export interface SQLServerValidationWarning {
|
||||
line?: number;
|
||||
message: string;
|
||||
code: string;
|
||||
}
|
||||
|
||||
export function validateSQLServerSyntax(
|
||||
sql: string
|
||||
): SQLServerValidationResult {
|
||||
const errors: SQLServerValidationError[] = [];
|
||||
const warnings: SQLServerValidationWarning[] = [];
|
||||
|
||||
const lines = sql.split('\n');
|
||||
|
||||
// Check for common SQL Server syntax issues
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const lineNum = i + 1;
|
||||
|
||||
// 1. Check for MySQL-style backticks (should use square brackets)
|
||||
if (line.includes('`')) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'SQL Server uses square brackets [name] instead of backticks `name` for identifiers',
|
||||
code: 'INVALID_IDENTIFIER_QUOTES',
|
||||
suggestion:
|
||||
'Replace backticks with square brackets: `name` → [name]',
|
||||
});
|
||||
}
|
||||
|
||||
// 2. Check for PostgreSQL-style :: cast operator
|
||||
if (line.includes('::')) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'SQL Server uses CAST() or CONVERT() instead of :: for type casting',
|
||||
code: 'INVALID_CAST_OPERATOR',
|
||||
suggestion:
|
||||
'Use CAST(expression AS type) or CONVERT(type, expression)',
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Check for AUTO_INCREMENT (MySQL style)
|
||||
if (/AUTO_INCREMENT/i.test(line)) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message: 'SQL Server uses IDENTITY instead of AUTO_INCREMENT',
|
||||
code: 'INVALID_AUTO_INCREMENT',
|
||||
suggestion: 'Replace AUTO_INCREMENT with IDENTITY(1,1)',
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Check for LIMIT clause (not supported in SQL Server)
|
||||
if (/\bLIMIT\s+\d+/i.test(line)) {
|
||||
errors.push({
|
||||
line: lineNum,
|
||||
message: 'SQL Server does not support LIMIT clause',
|
||||
code: 'UNSUPPORTED_LIMIT',
|
||||
suggestion:
|
||||
'Use TOP clause instead: SELECT TOP 10 * FROM table',
|
||||
});
|
||||
}
|
||||
|
||||
// 5. Check for BOOLEAN type (not native in SQL Server)
|
||||
if (/\bBOOLEAN\b/i.test(line)) {
|
||||
warnings.push({
|
||||
line: lineNum,
|
||||
message:
|
||||
'SQL Server does not have a native BOOLEAN type. Use BIT instead.',
|
||||
code: 'NO_BOOLEAN_TYPE',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unsupported SQL Server features in DDL import
|
||||
const unsupportedFeatures = [
|
||||
{ pattern: /CREATE\s+PROCEDURE/i, feature: 'Stored Procedures' },
|
||||
{ pattern: /CREATE\s+FUNCTION/i, feature: 'Functions' },
|
||||
{ pattern: /CREATE\s+TRIGGER/i, feature: 'Triggers' },
|
||||
{ pattern: /CREATE\s+VIEW/i, feature: 'Views' },
|
||||
{ pattern: /CREATE\s+ASSEMBLY/i, feature: 'Assemblies' },
|
||||
];
|
||||
|
||||
for (const { pattern, feature } of unsupportedFeatures) {
|
||||
if (pattern.test(sql)) {
|
||||
warnings.push({
|
||||
message: `${feature} are not supported and will be ignored during import`,
|
||||
code: `UNSUPPORTED_${feature.toUpperCase().replace(' ', '_')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
138
src/lib/data/sql-import/import-with-validation.ts
Normal file
138
src/lib/data/sql-import/import-with-validation.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { validatePostgreSQLSyntax } from './sql-validator';
|
||||
import { fromPostgresImproved } from './dialect-importers/postgresql/postgresql-improved';
|
||||
import type { SQLParserResult } from './common';
|
||||
|
||||
export interface ImportResult {
|
||||
success: boolean;
|
||||
data?: SQLParserResult & { warnings?: string[] };
|
||||
error?: {
|
||||
message: string;
|
||||
details?: string;
|
||||
line?: number;
|
||||
suggestion?: string;
|
||||
};
|
||||
validationErrors?: Array<{
|
||||
line: number;
|
||||
message: string;
|
||||
suggestion?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Import PostgreSQL with validation and error handling
|
||||
*/
|
||||
export async function importPostgreSQLWithValidation(
|
||||
sql: string
|
||||
): Promise<ImportResult> {
|
||||
try {
|
||||
// Step 1: Validate SQL syntax
|
||||
const validation = validatePostgreSQLSyntax(sql);
|
||||
|
||||
// If there are syntax errors, check if we can auto-fix
|
||||
let sqlToImport = sql;
|
||||
if (!validation.isValid) {
|
||||
if (validation.fixedSQL) {
|
||||
// Use auto-fixed SQL
|
||||
sqlToImport = validation.fixedSQL;
|
||||
console.log('Auto-fixing SQL syntax errors...');
|
||||
} else {
|
||||
// Return validation errors
|
||||
return {
|
||||
success: false,
|
||||
validationErrors: validation.errors.map((e) => ({
|
||||
line: e.line,
|
||||
message: e.message,
|
||||
suggestion: e.suggestion,
|
||||
})),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Attempt to parse
|
||||
const result = await fromPostgresImproved(sqlToImport);
|
||||
|
||||
// Step 3: Check if we got meaningful results
|
||||
if (!result.tables || result.tables.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: 'No tables found in SQL',
|
||||
details:
|
||||
'The SQL was parsed successfully but no tables were found. Please check your SQL contains CREATE TABLE statements.',
|
||||
suggestion:
|
||||
'Ensure your SQL contains valid CREATE TABLE statements',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Step 4: Return successful result with any warnings
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
...result,
|
||||
warnings: [
|
||||
...(result.warnings || []),
|
||||
...(validation.warnings?.map((w) => w.message) || []),
|
||||
],
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
// Step 5: Handle parsing errors
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Unknown error';
|
||||
|
||||
// Try to extract line number from parser error
|
||||
const lineMatch = errorMessage.match(/line (\d+)/i);
|
||||
const line = lineMatch ? parseInt(lineMatch[1]) : undefined;
|
||||
|
||||
// Provide helpful error messages based on common issues
|
||||
let suggestion: string | undefined;
|
||||
if (errorMessage.includes('Unexpected token')) {
|
||||
suggestion =
|
||||
'Check for missing semicolons, unclosed quotes, or invalid syntax';
|
||||
} else if (errorMessage.includes('Expected')) {
|
||||
suggestion = 'Check for incomplete statements or missing keywords';
|
||||
} else if (errorMessage.includes('syntax error')) {
|
||||
suggestion =
|
||||
'Review the SQL syntax, especially around special PostgreSQL features';
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: 'Failed to parse SQL',
|
||||
details: errorMessage,
|
||||
line,
|
||||
suggestion,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick check if SQL is likely to import successfully
|
||||
*/
|
||||
export function canImportSQL(sql: string): {
|
||||
canImport: boolean;
|
||||
reason?: string;
|
||||
} {
|
||||
if (!sql || !sql.trim()) {
|
||||
return { canImport: false, reason: 'SQL is empty' };
|
||||
}
|
||||
|
||||
// Check for at least one CREATE TABLE statement
|
||||
if (!/CREATE\s+TABLE/i.test(sql)) {
|
||||
return { canImport: false, reason: 'No CREATE TABLE statements found' };
|
||||
}
|
||||
|
||||
// Quick syntax check
|
||||
const validation = validatePostgreSQLSyntax(sql);
|
||||
if (!validation.isValid && !validation.fixedSQL) {
|
||||
return {
|
||||
canImport: false,
|
||||
reason: 'SQL contains syntax errors that cannot be auto-fixed',
|
||||
};
|
||||
}
|
||||
|
||||
return { canImport: true };
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import { fromPostgres } from './dialect-importers/postgresql/postgresql';
|
||||
import { fromPostgresDump } from './dialect-importers/postgresql/postgresql-dump';
|
||||
import { fromPostgresImproved } from './dialect-importers/postgresql/postgresql-improved';
|
||||
|
||||
import { fromSQLServer } from './dialect-importers/sqlserver/sqlserver';
|
||||
import { fromSQLite } from './dialect-importers/sqlite/sqlite';
|
||||
@@ -127,8 +127,17 @@ export function detectDatabaseType(sqlContent: string): DatabaseType | null {
|
||||
return DatabaseType.SQL_SERVER;
|
||||
}
|
||||
|
||||
// Check for MySQL dump format
|
||||
// Check for MySQL/MariaDB dump format
|
||||
if (isMySQLFormat(sqlContent)) {
|
||||
// Try to detect if it's specifically MariaDB
|
||||
if (
|
||||
sqlContent.includes('MariaDB dump') ||
|
||||
sqlContent.includes('/*!100100') ||
|
||||
sqlContent.includes('ENGINE=Aria') ||
|
||||
sqlContent.includes('ENGINE=COLUMNSTORE')
|
||||
) {
|
||||
return DatabaseType.MARIADB;
|
||||
}
|
||||
return DatabaseType.MYSQL;
|
||||
}
|
||||
|
||||
@@ -174,7 +183,7 @@ export async function sqlImportToDiagram({
|
||||
sqlContent: string;
|
||||
sourceDatabaseType: DatabaseType;
|
||||
targetDatabaseType: DatabaseType;
|
||||
}): Promise<Diagram> {
|
||||
}): Promise<Diagram & { warnings?: string[] }> {
|
||||
// If source database type is GENERIC, try to auto-detect the type
|
||||
if (sourceDatabaseType === DatabaseType.GENERIC) {
|
||||
const detectedType = detectDatabaseType(sqlContent);
|
||||
@@ -194,13 +203,15 @@ export async function sqlImportToDiagram({
|
||||
if (isPgDumpFormat(sqlContent)) {
|
||||
parserResult = await fromPostgresDump(sqlContent);
|
||||
} else {
|
||||
parserResult = await fromPostgres(sqlContent);
|
||||
// Use the improved parser that handles enums and better error recovery
|
||||
parserResult = await fromPostgresImproved(sqlContent);
|
||||
}
|
||||
break;
|
||||
case DatabaseType.MYSQL:
|
||||
// Check if the SQL is from MySQL dump and use the appropriate parser
|
||||
case DatabaseType.MARIADB:
|
||||
// Check if the SQL is from MySQL/MariaDB dump and use the appropriate parser
|
||||
// MariaDB uses the same parser as MySQL due to high compatibility
|
||||
parserResult = await fromMySQL(sqlContent);
|
||||
|
||||
break;
|
||||
case DatabaseType.SQL_SERVER:
|
||||
parserResult = await fromSQLServer(sqlContent);
|
||||
@@ -237,6 +248,7 @@ export async function sqlImportToDiagram({
|
||||
return {
|
||||
...diagram,
|
||||
tables: sortedTables,
|
||||
warnings: parserResult.warnings,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -266,12 +278,14 @@ export async function parseSQLError({
|
||||
if (isPgDumpFormat(sqlContent)) {
|
||||
await fromPostgresDump(sqlContent);
|
||||
} else {
|
||||
await fromPostgres(sqlContent);
|
||||
// Use the improved parser for validation too
|
||||
await fromPostgresImproved(sqlContent);
|
||||
}
|
||||
break;
|
||||
case DatabaseType.MYSQL:
|
||||
case DatabaseType.MARIADB:
|
||||
// MariaDB uses the same parser as MySQL
|
||||
await fromMySQL(sqlContent);
|
||||
|
||||
break;
|
||||
case DatabaseType.SQL_SERVER:
|
||||
// SQL Server validation
|
||||
|
229
src/lib/data/sql-import/sql-import-validator-ui.tsx
Normal file
229
src/lib/data/sql-import/sql-import-validator-ui.tsx
Normal file
@@ -0,0 +1,229 @@
|
||||
import React from 'react';
|
||||
import {
|
||||
AlertCircle,
|
||||
CheckCircle,
|
||||
AlertTriangle,
|
||||
Lightbulb,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
validatePostgreSQLSyntax,
|
||||
type ValidationResult,
|
||||
} from './sql-validator';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Alert, AlertDescription, AlertTitle } from '@/components/alert/alert';
|
||||
|
||||
interface SQLImportValidatorProps {
|
||||
sql: string;
|
||||
onImport: (sql: string) => void;
|
||||
onCancel: () => void;
|
||||
}
|
||||
|
||||
export function SQLImportValidator({
|
||||
sql,
|
||||
onImport,
|
||||
onCancel,
|
||||
}: SQLImportValidatorProps) {
|
||||
const [validationResult, setValidationResult] =
|
||||
React.useState<ValidationResult | null>(null);
|
||||
const [isValidating, setIsValidating] = React.useState(false);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (sql && sql.trim()) {
|
||||
setIsValidating(true);
|
||||
// Debounce validation
|
||||
const timer = setTimeout(() => {
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
setValidationResult(result);
|
||||
setIsValidating(false);
|
||||
}, 500);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [sql]);
|
||||
|
||||
const handleImport = () => {
|
||||
if (validationResult?.isValid) {
|
||||
onImport(sql);
|
||||
} else if (validationResult?.fixedSQL) {
|
||||
// Use the auto-fixed SQL
|
||||
onImport(validationResult.fixedSQL);
|
||||
}
|
||||
};
|
||||
|
||||
const handleAutoFix = () => {
|
||||
if (validationResult?.fixedSQL) {
|
||||
// You might want to update the editor content here
|
||||
onImport(validationResult.fixedSQL);
|
||||
}
|
||||
};
|
||||
|
||||
if (!validationResult || isValidating) {
|
||||
return (
|
||||
<div className="flex items-center justify-between border-t p-4">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
Validating SQL...
|
||||
</span>
|
||||
<Button variant="outline" onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const { errors, warnings, fixedSQL, tableCount = 0 } = validationResult;
|
||||
const hasErrors = errors.length > 0;
|
||||
const hasWarnings = warnings.length > 0;
|
||||
const hasTables = tableCount > 0;
|
||||
|
||||
return (
|
||||
<div className="space-y-4 border-t p-4">
|
||||
{/* Validation Status */}
|
||||
<div className="space-y-2">
|
||||
{hasErrors && (
|
||||
<Alert variant="destructive">
|
||||
<AlertCircle className="size-4" />
|
||||
<AlertTitle>SQL Syntax Errors Found</AlertTitle>
|
||||
<AlertDescription className="mt-2 space-y-1">
|
||||
{errors.slice(0, 3).map((error, idx) => (
|
||||
<div key={idx} className="text-sm">
|
||||
<strong>Line {error.line}:</strong>{' '}
|
||||
{error.message}
|
||||
{error.suggestion && (
|
||||
<div className="ml-4 text-xs opacity-80">
|
||||
→ {error.suggestion}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
{errors.length > 3 && (
|
||||
<div className="text-sm opacity-70">
|
||||
... and {errors.length - 3} more errors
|
||||
</div>
|
||||
)}
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{hasWarnings && !hasErrors && (
|
||||
<Alert>
|
||||
<AlertTriangle className="size-4" />
|
||||
<AlertTitle>Import Info</AlertTitle>
|
||||
<AlertDescription className="mt-2 space-y-1">
|
||||
{warnings.map((warning, idx) => (
|
||||
<div key={idx} className="text-sm">
|
||||
• {warning.message}
|
||||
</div>
|
||||
))}
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{!hasErrors && !hasWarnings && hasTables && (
|
||||
<Alert className="border-green-200 bg-green-50">
|
||||
<CheckCircle className="size-4 text-green-600" />
|
||||
<AlertTitle className="text-green-800">
|
||||
SQL Validated Successfully
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-green-700">
|
||||
Found {tableCount} table{tableCount > 1 ? 's' : ''}{' '}
|
||||
ready to import.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{!hasErrors && !hasWarnings && !hasTables && (
|
||||
<Alert>
|
||||
<AlertTriangle className="size-4" />
|
||||
<AlertTitle>No Tables Found</AlertTitle>
|
||||
<AlertDescription>
|
||||
No CREATE TABLE statements were found in the SQL.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{fixedSQL && (
|
||||
<Alert className="border-blue-200 bg-blue-50">
|
||||
<Lightbulb className="size-4 text-blue-600" />
|
||||
<AlertTitle className="text-blue-800">
|
||||
Auto-fix Available
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-blue-700">
|
||||
We can automatically fix the syntax errors in your
|
||||
SQL.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Action Buttons */}
|
||||
<div className="flex items-center justify-end gap-2">
|
||||
<Button variant="outline" onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
|
||||
{fixedSQL && hasTables && (
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={handleAutoFix}
|
||||
className="bg-blue-600 hover:bg-blue-700"
|
||||
>
|
||||
Auto-fix & Import
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{!hasErrors && hasTables && (
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={handleImport}
|
||||
className="bg-green-600 hover:bg-green-700"
|
||||
>
|
||||
Import
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Detailed Error Log (Collapsible) */}
|
||||
{hasErrors && errors.length > 3 && (
|
||||
<details className="mt-4">
|
||||
<summary className="cursor-pointer text-sm text-muted-foreground hover:text-foreground">
|
||||
Show all {errors.length} errors
|
||||
</summary>
|
||||
<div className="mt-2 space-y-1 rounded bg-muted p-2 font-mono text-xs">
|
||||
{errors.map((error, idx) => (
|
||||
<div key={idx}>
|
||||
Line {error.line}: {error.message}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inline validation indicator for the SQL editor
|
||||
*/
|
||||
export function SQLValidationIndicator({ sql }: { sql: string }) {
|
||||
const [hasErrors, setHasErrors] = React.useState(false);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (sql) {
|
||||
const timer = setTimeout(() => {
|
||||
const result = validatePostgreSQLSyntax(sql);
|
||||
setHasErrors(result.errors.length > 0);
|
||||
}, 1000);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [sql]);
|
||||
|
||||
if (!sql || !hasErrors) return null;
|
||||
|
||||
return (
|
||||
<div className="absolute right-2 top-2 flex items-center gap-2 rounded bg-red-100 px-2 py-1 text-xs text-red-700">
|
||||
<AlertCircle className="size-3" />
|
||||
SQL syntax errors detected
|
||||
</div>
|
||||
);
|
||||
}
|
288
src/lib/data/sql-import/sql-validator.ts
Normal file
288
src/lib/data/sql-import/sql-validator.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
/**
|
||||
* SQL Validator for pre-import validation
|
||||
* Provides user-friendly error messages for common SQL syntax issues
|
||||
*/
|
||||
|
||||
export interface ValidationResult {
|
||||
isValid: boolean;
|
||||
errors: ValidationError[];
|
||||
warnings: ValidationWarning[];
|
||||
fixedSQL?: string;
|
||||
tableCount?: number;
|
||||
}
|
||||
|
||||
export interface ValidationError {
|
||||
line: number;
|
||||
column?: number;
|
||||
message: string;
|
||||
type: 'syntax' | 'unsupported' | 'parser';
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
export interface ValidationWarning {
|
||||
message: string;
|
||||
type: 'compatibility' | 'data_loss' | 'performance';
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-validates SQL before attempting to parse
|
||||
* Detects common syntax errors and provides helpful feedback
|
||||
*/
|
||||
export function validatePostgreSQLSyntax(sql: string): ValidationResult {
|
||||
const errors: ValidationError[] = [];
|
||||
const warnings: ValidationWarning[] = [];
|
||||
let fixedSQL = sql;
|
||||
|
||||
// First check if the SQL is empty or just whitespace
|
||||
if (!sql || !sql.trim()) {
|
||||
errors.push({
|
||||
line: 1,
|
||||
message: 'SQL script is empty',
|
||||
type: 'syntax',
|
||||
suggestion: 'Add CREATE TABLE statements to import',
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
errors,
|
||||
warnings,
|
||||
tableCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if the SQL contains any valid SQL keywords
|
||||
const sqlKeywords =
|
||||
/\b(CREATE|ALTER|DROP|INSERT|UPDATE|DELETE|SELECT|TABLE|INDEX|VIEW|TRIGGER|FUNCTION|PROCEDURE|GRANT|REVOKE)\b/i;
|
||||
if (!sqlKeywords.test(sql)) {
|
||||
errors.push({
|
||||
line: 1,
|
||||
message: 'No valid SQL statements found',
|
||||
type: 'syntax',
|
||||
suggestion:
|
||||
'Ensure your SQL contains valid statements like CREATE TABLE',
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
errors,
|
||||
warnings,
|
||||
tableCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for common PostgreSQL syntax errors
|
||||
const lines = sql.split('\n');
|
||||
|
||||
// Check for statements without proper termination
|
||||
// Check if there are non-comment lines that don't end with semicolon
|
||||
const nonCommentLines = lines.filter((line) => {
|
||||
const trimmed = line.trim();
|
||||
return (
|
||||
trimmed && !trimmed.startsWith('--') && !trimmed.startsWith('/*')
|
||||
);
|
||||
});
|
||||
|
||||
if (nonCommentLines.length > 0) {
|
||||
// Check if SQL has any complete statements (ending with semicolon)
|
||||
const hasCompleteStatements =
|
||||
/;\s*($|\n|--)/m.test(sql) || sql.trim().endsWith(';');
|
||||
if (!hasCompleteStatements && !sql.match(/^\s*--/)) {
|
||||
warnings.push({
|
||||
message: 'SQL statements should end with semicolons (;)',
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Check for malformed cast operators (: : instead of ::)
|
||||
const castOperatorRegex = /:\s+:/g;
|
||||
lines.forEach((line, index) => {
|
||||
const matches = line.matchAll(castOperatorRegex);
|
||||
for (const match of matches) {
|
||||
errors.push({
|
||||
line: index + 1,
|
||||
column: match.index,
|
||||
message: `Invalid cast operator ": :" found. PostgreSQL uses "::" for type casting.`,
|
||||
type: 'syntax',
|
||||
suggestion: 'Replace ": :" with "::"',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// 2. Check for split DECIMAL declarations
|
||||
const decimalSplitRegex = /DECIMAL\s*\(\s*\d+\s*,\s*$/i;
|
||||
lines.forEach((line, index) => {
|
||||
if (decimalSplitRegex.test(line) && index < lines.length - 1) {
|
||||
const nextLine = lines[index + 1].trim();
|
||||
if (/^\d+\s*\)/.test(nextLine)) {
|
||||
errors.push({
|
||||
line: index + 1,
|
||||
message: `DECIMAL type declaration is split across lines. This may cause parsing errors.`,
|
||||
type: 'syntax',
|
||||
suggestion:
|
||||
'Keep DECIMAL(precision, scale) on a single line',
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 3. Check for unsupported PostgreSQL extensions
|
||||
const extensionRegex =
|
||||
/CREATE\s+EXTENSION\s+.*?(postgis|uuid-ossp|pgcrypto)/i;
|
||||
const extensionMatches = sql.match(extensionRegex);
|
||||
if (extensionMatches) {
|
||||
warnings.push({
|
||||
message: `CREATE EXTENSION statements found. These will be skipped during import.`,
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Check for functions and triggers
|
||||
if (/CREATE\s+(OR\s+REPLACE\s+)?FUNCTION/i.test(sql)) {
|
||||
warnings.push({
|
||||
message: `Function definitions found. These will not be imported.`,
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
if (/CREATE\s+TRIGGER/i.test(sql)) {
|
||||
warnings.push({
|
||||
message: `Trigger definitions found. These will not be imported.`,
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
// 5. Check for views
|
||||
if (/CREATE\s+(OR\s+REPLACE\s+)?VIEW/i.test(sql)) {
|
||||
warnings.push({
|
||||
message: `View definitions found. These will not be imported.`,
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
// 6. Attempt to auto-fix common issues
|
||||
let hasAutoFixes = false;
|
||||
|
||||
// Fix cast operator errors
|
||||
if (errors.some((e) => e.message.includes('": :"'))) {
|
||||
fixedSQL = fixedSQL.replace(/:\s+:/g, '::');
|
||||
hasAutoFixes = true;
|
||||
warnings.push({
|
||||
message: 'Auto-fixed cast operator syntax errors (": :" → "::").',
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
// Fix split DECIMAL declarations
|
||||
if (
|
||||
errors.some((e) =>
|
||||
e.message.includes('DECIMAL type declaration is split')
|
||||
)
|
||||
) {
|
||||
// Fix DECIMAL(precision,\nscale) pattern
|
||||
fixedSQL = fixedSQL.replace(
|
||||
/DECIMAL\s*\(\s*(\d+)\s*,\s*\n\s*(\d+)\s*\)/gi,
|
||||
'DECIMAL($1,$2)'
|
||||
);
|
||||
// Also fix other numeric types that might be split
|
||||
fixedSQL = fixedSQL.replace(
|
||||
/NUMERIC\s*\(\s*(\d+)\s*,\s*\n\s*(\d+)\s*\)/gi,
|
||||
'NUMERIC($1,$2)'
|
||||
);
|
||||
hasAutoFixes = true;
|
||||
warnings.push({
|
||||
message: 'Auto-fixed split DECIMAL/NUMERIC type declarations.',
|
||||
type: 'compatibility',
|
||||
});
|
||||
}
|
||||
|
||||
// 7. Check for very large files that might cause performance issues
|
||||
const statementCount = (sql.match(/;\s*$/gm) || []).length;
|
||||
if (statementCount > 100) {
|
||||
warnings.push({
|
||||
message: `Large SQL file detected (${statementCount} statements). Import may take some time.`,
|
||||
type: 'performance',
|
||||
});
|
||||
}
|
||||
|
||||
// 8. Check for PostGIS-specific types that might not render properly
|
||||
if (/GEOGRAPHY\s*\(/i.test(sql) || /GEOMETRY\s*\(/i.test(sql)) {
|
||||
warnings.push({
|
||||
message:
|
||||
'PostGIS geographic types detected. These will be imported but may not display geometric data.',
|
||||
type: 'data_loss',
|
||||
});
|
||||
}
|
||||
|
||||
// 9. Count CREATE TABLE statements
|
||||
let tableCount = 0;
|
||||
const createTableRegex =
|
||||
/CREATE\s+TABLE(?:\s+IF\s+NOT\s+EXISTS)?(?:\s+ONLY)?\s+(?:"?[^"\s.]+?"?\.)?["'`]?[^"'`\s.(]+["'`]?/gi;
|
||||
const matches = sql.match(createTableRegex);
|
||||
if (matches) {
|
||||
tableCount = matches.length;
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
fixedSQL: hasAutoFixes && fixedSQL !== sql ? fixedSQL : undefined,
|
||||
tableCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format validation results for display to user
|
||||
*/
|
||||
export function formatValidationMessage(result: ValidationResult): string {
|
||||
let message = '';
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
message += '❌ SQL Syntax Errors Found:\n\n';
|
||||
|
||||
// Group errors by type
|
||||
const syntaxErrors = result.errors.filter((e) => e.type === 'syntax');
|
||||
if (syntaxErrors.length > 0) {
|
||||
message += 'Syntax Issues:\n';
|
||||
syntaxErrors.slice(0, 5).forEach((error) => {
|
||||
message += `• Line ${error.line}: ${error.message}\n`;
|
||||
if (error.suggestion) {
|
||||
message += ` → ${error.suggestion}\n`;
|
||||
}
|
||||
});
|
||||
if (syntaxErrors.length > 5) {
|
||||
message += ` ... and ${syntaxErrors.length - 5} more syntax errors\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result.warnings.length > 0) {
|
||||
if (message) message += '\n';
|
||||
message += '⚠️ Warnings:\n';
|
||||
result.warnings.forEach((warning) => {
|
||||
message += `• ${warning.message}\n`;
|
||||
});
|
||||
}
|
||||
|
||||
if (result.fixedSQL) {
|
||||
message +=
|
||||
'\n💡 Auto-fix available: The syntax errors can be automatically corrected.';
|
||||
}
|
||||
|
||||
return message || '✅ SQL syntax appears valid.';
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick validation that can be run as user types
|
||||
*/
|
||||
export function quickValidate(sql: string): {
|
||||
hasErrors: boolean;
|
||||
errorCount: number;
|
||||
} {
|
||||
// Just check for the most common error (cast operators)
|
||||
const castOperatorMatches = (sql.match(/:\s+:/g) || []).length;
|
||||
|
||||
return {
|
||||
hasErrors: castOperatorMatches > 0,
|
||||
errorCount: castOperatorMatches,
|
||||
};
|
||||
}
|
181
src/lib/data/sql-import/unified-sql-validator.ts
Normal file
181
src/lib/data/sql-import/unified-sql-validator.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
/**
|
||||
* Unified SQL Validator that delegates to appropriate dialect validators
|
||||
* Ensures consistent error format with clickable line numbers for all dialects
|
||||
*/
|
||||
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import {
|
||||
validatePostgreSQLSyntax,
|
||||
type ValidationResult,
|
||||
} from './sql-validator';
|
||||
import { validateMySQLSyntax } from './dialect-importers/mysql/mysql-validator';
|
||||
import { validateSQLServerSyntax } from './dialect-importers/sqlserver/sqlserver-validator';
|
||||
import { validateSQLiteSyntax } from './dialect-importers/sqlite/sqlite-validator';
|
||||
|
||||
/**
|
||||
* Validate SQL based on the database type
|
||||
* Returns a unified ValidationResult format for consistent UI display
|
||||
*/
|
||||
export function validateSQL(
|
||||
sql: string,
|
||||
databaseType: DatabaseType
|
||||
): ValidationResult {
|
||||
switch (databaseType) {
|
||||
case DatabaseType.POSTGRESQL:
|
||||
// PostgreSQL already returns the correct format
|
||||
return validatePostgreSQLSyntax(sql);
|
||||
|
||||
case DatabaseType.MYSQL: {
|
||||
// Convert MySQL validation result to standard format
|
||||
const mysqlResult = validateMySQLSyntax(sql);
|
||||
|
||||
// If there are only warnings (no errors), consolidate them for cleaner display
|
||||
let warnings = mysqlResult.warnings.map((warn) => ({
|
||||
message: warn.message,
|
||||
type: 'compatibility' as const,
|
||||
}));
|
||||
|
||||
if (mysqlResult.isValid && mysqlResult.warnings.length > 10) {
|
||||
// Too many warnings, just show a summary
|
||||
const warningTypes = new Map<string, number>();
|
||||
for (const warn of mysqlResult.warnings) {
|
||||
const type = warn.code || 'other';
|
||||
warningTypes.set(type, (warningTypes.get(type) || 0) + 1);
|
||||
}
|
||||
|
||||
warnings = [
|
||||
{
|
||||
message: `Import successful. Found ${mysqlResult.warnings.length} minor syntax notes (mostly quote formatting).`,
|
||||
type: 'compatibility' as const,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: mysqlResult.isValid,
|
||||
errors: mysqlResult.errors.map((err) => ({
|
||||
line: err.line || 1,
|
||||
column: err.column,
|
||||
message: err.message,
|
||||
type: 'syntax' as const,
|
||||
suggestion: err.suggestion,
|
||||
})),
|
||||
warnings,
|
||||
fixedSQL: undefined,
|
||||
tableCount: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
case DatabaseType.SQL_SERVER: {
|
||||
// Convert SQL Server validation result to standard format
|
||||
const sqlServerResult = validateSQLServerSyntax(sql);
|
||||
return {
|
||||
isValid: sqlServerResult.isValid,
|
||||
errors: sqlServerResult.errors.map((err) => ({
|
||||
line: err.line || 1,
|
||||
column: err.column,
|
||||
message: err.message,
|
||||
type: 'syntax' as const,
|
||||
suggestion: err.suggestion,
|
||||
})),
|
||||
warnings: sqlServerResult.warnings.map((warn) => ({
|
||||
message: warn.message,
|
||||
type: 'compatibility' as const,
|
||||
})),
|
||||
fixedSQL: undefined,
|
||||
tableCount: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
case DatabaseType.SQLITE: {
|
||||
// Convert SQLite validation result to standard format
|
||||
const sqliteResult = validateSQLiteSyntax(sql);
|
||||
return {
|
||||
isValid: sqliteResult.isValid,
|
||||
errors: sqliteResult.errors.map((err) => ({
|
||||
line: err.line || 1,
|
||||
column: err.column,
|
||||
message: err.message,
|
||||
type: 'syntax' as const,
|
||||
suggestion: err.suggestion,
|
||||
})),
|
||||
warnings: sqliteResult.warnings.map((warn) => ({
|
||||
message: warn.message,
|
||||
type: 'compatibility' as const,
|
||||
})),
|
||||
fixedSQL: undefined,
|
||||
tableCount: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
case DatabaseType.MARIADB:
|
||||
// MariaDB uses MySQL validator
|
||||
return validateSQL(sql, DatabaseType.MYSQL);
|
||||
|
||||
case DatabaseType.GENERIC:
|
||||
// For generic, try to detect the type or use basic validation
|
||||
return {
|
||||
isValid: true, // Let the parser determine validity
|
||||
errors: [],
|
||||
warnings: [
|
||||
{
|
||||
message:
|
||||
'Using generic SQL validation. Some dialect-specific issues may not be detected.',
|
||||
type: 'compatibility',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
default:
|
||||
return {
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract line number from parser error messages
|
||||
* Used as fallback when dialect validators don't catch errors
|
||||
*/
|
||||
export function extractLineFromError(errorMessage: string): number | undefined {
|
||||
// Common patterns for line numbers in error messages
|
||||
const patterns = [
|
||||
/line\s+(\d+)/i,
|
||||
/Line\s+(\d+)/,
|
||||
/at line (\d+)/i,
|
||||
/\((\d+):\d+\)/, // (line:column) format
|
||||
/row (\d+)/i,
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = errorMessage.match(pattern);
|
||||
if (match && match[1]) {
|
||||
return parseInt(match[1], 10);
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format parser errors into ValidationResult format
|
||||
* This ensures parser errors can be displayed with clickable line numbers
|
||||
*/
|
||||
export function formatParserError(errorMessage: string): ValidationResult {
|
||||
const line = extractLineFromError(errorMessage);
|
||||
|
||||
return {
|
||||
isValid: false,
|
||||
errors: [
|
||||
{
|
||||
line: line || 1,
|
||||
message: errorMessage,
|
||||
type: 'parser' as const,
|
||||
suggestion: 'Check your SQL syntax near the reported line',
|
||||
},
|
||||
],
|
||||
warnings: [],
|
||||
};
|
||||
}
|
10
src/test/setup.ts
Normal file
10
src/test/setup.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import '@testing-library/jest-dom';
|
||||
import { expect, afterEach } from 'vitest';
|
||||
import { cleanup } from '@testing-library/react';
|
||||
import * as matchers from '@testing-library/jest-dom/matchers';
|
||||
|
||||
expect.extend(matchers);
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
@@ -29,5 +29,5 @@
|
||||
"@/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src"]
|
||||
"include": ["src", "vitest.config.ts"]
|
||||
}
|
||||
|
21
vitest.config.ts
Normal file
21
vitest.config.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
import react from '@vitejs/plugin-react';
|
||||
import path from 'path';
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'happy-dom',
|
||||
setupFiles: './src/test/setup.ts',
|
||||
coverage: {
|
||||
reporter: ['text', 'json', 'html'],
|
||||
exclude: ['node_modules/', 'src/test/setup.ts'],
|
||||
},
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'@': path.resolve(__dirname, './src'),
|
||||
},
|
||||
},
|
||||
});
|
Reference in New Issue
Block a user