mirror of
https://github.com/chartdb/chartdb.git
synced 2025-11-04 14:03:15 +00:00
Compare commits
14 Commits
jf/fix_sql
...
jf/fix_fk_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1377bd524b | ||
|
|
0d9f57a9c9 | ||
|
|
b7dbe54c83 | ||
|
|
43d1dfff71 | ||
|
|
9949a46ee3 | ||
|
|
dfbcf05b2f | ||
|
|
f56fab9876 | ||
|
|
c9ea7da092 | ||
|
|
22d46e1e90 | ||
|
|
6af94afc56 | ||
|
|
f7f92903de | ||
|
|
b35e17526b | ||
|
|
bf32c08d37 | ||
|
|
5d337409d6 |
5
.github/workflows/ci.yaml
vendored
5
.github/workflows/ci.yaml
vendored
@@ -24,4 +24,7 @@ jobs:
|
||||
run: npm run lint
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
run: npm run build
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test:ci
|
||||
1042
package-lock.json
generated
1042
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
19
package.json
19
package.json
@@ -9,7 +9,11 @@
|
||||
"lint": "eslint . --report-unused-disable-directives --max-warnings 0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"preview": "vite preview",
|
||||
"prepare": "husky"
|
||||
"prepare": "husky",
|
||||
"test": "vitest",
|
||||
"test:ci": "vitest run --reporter=verbose --bail=1",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:coverage": "vitest --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^0.0.51",
|
||||
@@ -32,7 +36,7 @@
|
||||
"@radix-ui/react-scroll-area": "1.2.0",
|
||||
"@radix-ui/react-select": "^2.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.2",
|
||||
"@radix-ui/react-slot": "^1.1.2",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-tabs": "^1.1.0",
|
||||
"@radix-ui/react-toast": "^1.2.1",
|
||||
"@radix-ui/react-toggle": "^1.1.0",
|
||||
@@ -50,8 +54,9 @@
|
||||
"html-to-image": "^1.11.11",
|
||||
"i18next": "^23.14.0",
|
||||
"i18next-browser-languagedetector": "^8.0.0",
|
||||
"lucide-react": "^0.441.0",
|
||||
"lucide-react": "^0.525.0",
|
||||
"monaco-editor": "^0.52.0",
|
||||
"motion": "^12.23.6",
|
||||
"nanoid": "^5.0.7",
|
||||
"node-sql-parser": "^5.3.2",
|
||||
"react": "^18.3.1",
|
||||
@@ -73,12 +78,16 @@
|
||||
"@eslint/compat": "^1.2.4",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.16.0",
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^22.1.0",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.0",
|
||||
"@typescript-eslint/parser": "^8.18.0",
|
||||
"@vitejs/plugin-react": "^4.3.1",
|
||||
"@vitest/ui": "^3.2.4",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"eslint": "^9.16.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
@@ -90,6 +99,7 @@
|
||||
"eslint-plugin-react-refresh": "^0.4.7",
|
||||
"eslint-plugin-tailwindcss": "^3.17.4",
|
||||
"globals": "^15.13.0",
|
||||
"happy-dom": "^18.0.1",
|
||||
"husky": "^9.1.5",
|
||||
"postcss": "^8.4.40",
|
||||
"prettier": "^3.3.3",
|
||||
@@ -97,6 +107,7 @@
|
||||
"tailwindcss": "^3.4.7",
|
||||
"typescript": "^5.2.2",
|
||||
"unplugin-inject-preload": "^3.0.0",
|
||||
"vite": "^5.3.4"
|
||||
"vite": "^5.3.4",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ export const EmptyState = forwardRef<
|
||||
</Label>
|
||||
<Label
|
||||
className={cn(
|
||||
'text-sm font-normal text-muted-foreground',
|
||||
'text-sm text-center font-normal text-muted-foreground',
|
||||
descriptionClassName
|
||||
)}
|
||||
>
|
||||
|
||||
121
src/components/pagination/pagination.tsx
Normal file
121
src/components/pagination/pagination.tsx
Normal file
@@ -0,0 +1,121 @@
|
||||
import React from 'react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import type { ButtonProps } from '../button/button';
|
||||
import { buttonVariants } from '../button/button-variants';
|
||||
import {
|
||||
ChevronLeftIcon,
|
||||
ChevronRightIcon,
|
||||
DotsHorizontalIcon,
|
||||
} from '@radix-ui/react-icons';
|
||||
|
||||
const Pagination = ({ className, ...props }: React.ComponentProps<'nav'>) => (
|
||||
<nav
|
||||
role="navigation"
|
||||
aria-label="pagination"
|
||||
className={cn('mx-auto flex w-full justify-center', className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
Pagination.displayName = 'Pagination';
|
||||
|
||||
const PaginationContent = React.forwardRef<
|
||||
HTMLUListElement,
|
||||
React.ComponentProps<'ul'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ul
|
||||
ref={ref}
|
||||
className={cn('flex flex-row items-center gap-1', className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
PaginationContent.displayName = 'PaginationContent';
|
||||
|
||||
const PaginationItem = React.forwardRef<
|
||||
HTMLLIElement,
|
||||
React.ComponentProps<'li'>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<li ref={ref} className={cn('', className)} {...props} />
|
||||
));
|
||||
PaginationItem.displayName = 'PaginationItem';
|
||||
|
||||
type PaginationLinkProps = {
|
||||
isActive?: boolean;
|
||||
} & Pick<ButtonProps, 'size'> &
|
||||
React.ComponentProps<'a'>;
|
||||
|
||||
const PaginationLink = ({
|
||||
className,
|
||||
isActive,
|
||||
size = 'icon',
|
||||
...props
|
||||
}: PaginationLinkProps) => (
|
||||
<a
|
||||
aria-current={isActive ? 'page' : undefined}
|
||||
className={cn(
|
||||
buttonVariants({
|
||||
variant: isActive ? 'outline' : 'ghost',
|
||||
size,
|
||||
}),
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
PaginationLink.displayName = 'PaginationLink';
|
||||
|
||||
const PaginationPrevious = ({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof PaginationLink>) => (
|
||||
<PaginationLink
|
||||
aria-label="Go to previous page"
|
||||
size="default"
|
||||
className={cn('gap-1 pl-2.5', className)}
|
||||
{...props}
|
||||
>
|
||||
<ChevronLeftIcon className="size-4" />
|
||||
<span>Previous</span>
|
||||
</PaginationLink>
|
||||
);
|
||||
PaginationPrevious.displayName = 'PaginationPrevious';
|
||||
|
||||
const PaginationNext = ({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof PaginationLink>) => (
|
||||
<PaginationLink
|
||||
aria-label="Go to next page"
|
||||
size="default"
|
||||
className={cn('gap-1 pr-2.5', className)}
|
||||
{...props}
|
||||
>
|
||||
<span>Next</span>
|
||||
<ChevronRightIcon className="size-4" />
|
||||
</PaginationLink>
|
||||
);
|
||||
PaginationNext.displayName = 'PaginationNext';
|
||||
|
||||
const PaginationEllipsis = ({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<'span'>) => (
|
||||
<span
|
||||
aria-hidden
|
||||
className={cn('flex h-9 w-9 items-center justify-center', className)}
|
||||
{...props}
|
||||
>
|
||||
<DotsHorizontalIcon className="size-4" />
|
||||
<span className="sr-only">More pages</span>
|
||||
</span>
|
||||
);
|
||||
PaginationEllipsis.displayName = 'PaginationEllipsis';
|
||||
|
||||
export {
|
||||
Pagination,
|
||||
PaginationContent,
|
||||
PaginationLink,
|
||||
PaginationItem,
|
||||
PaginationPrevious,
|
||||
PaginationNext,
|
||||
PaginationEllipsis,
|
||||
};
|
||||
@@ -20,6 +20,7 @@ export function Toaster() {
|
||||
description,
|
||||
action,
|
||||
layout = 'row',
|
||||
hideCloseButton = false,
|
||||
...props
|
||||
}) {
|
||||
return (
|
||||
@@ -38,7 +39,7 @@ export function Toaster() {
|
||||
) : null}
|
||||
</div>
|
||||
{layout === 'row' ? action : null}
|
||||
<ToastClose />
|
||||
{!hideCloseButton ? <ToastClose /> : null}
|
||||
</Toast>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -12,6 +12,7 @@ type ToasterToast = ToastProps & {
|
||||
description?: React.ReactNode;
|
||||
action?: ToastActionElement;
|
||||
layout?: 'row' | 'column';
|
||||
hideCloseButton?: boolean;
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
|
||||
17
src/components/tree-view/tree-item-skeleton.tsx
Normal file
17
src/components/tree-view/tree-item-skeleton.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
import React from 'react';
|
||||
import { Skeleton } from '../skeleton/skeleton';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
export interface TreeItemSkeletonProps
|
||||
extends React.HTMLAttributes<HTMLDivElement> {}
|
||||
|
||||
export const TreeItemSkeleton: React.FC<TreeItemSkeletonProps> = ({
|
||||
className,
|
||||
style,
|
||||
}) => {
|
||||
return (
|
||||
<div className={cn('px-2 py-1', className)} style={style}>
|
||||
<Skeleton className="h-3.5 w-full rounded-sm" />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
461
src/components/tree-view/tree-view.tsx
Normal file
461
src/components/tree-view/tree-view.tsx
Normal file
@@ -0,0 +1,461 @@
|
||||
import {
|
||||
ChevronRight,
|
||||
File,
|
||||
Folder,
|
||||
Loader2,
|
||||
type LucideIcon,
|
||||
} from 'lucide-react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Button } from '@/components/button/button';
|
||||
import type {
|
||||
TreeNode,
|
||||
FetchChildrenFunction,
|
||||
SelectableTreeProps,
|
||||
} from './tree';
|
||||
import type { ExpandedState } from './use-tree';
|
||||
import { useTree } from './use-tree';
|
||||
import type { Dispatch, ReactNode, SetStateAction } from 'react';
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { TreeItemSkeleton } from './tree-item-skeleton';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/tooltip/tooltip';
|
||||
|
||||
interface TreeViewProps<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> {
|
||||
data: TreeNode<Type, Context>[];
|
||||
fetchChildren?: FetchChildrenFunction<Type, Context>;
|
||||
onNodeClick?: (node: TreeNode<Type, Context>) => void;
|
||||
className?: string;
|
||||
defaultIcon?: LucideIcon;
|
||||
defaultFolderIcon?: LucideIcon;
|
||||
defaultIconProps?: React.ComponentProps<LucideIcon>;
|
||||
defaultFolderIconProps?: React.ComponentProps<LucideIcon>;
|
||||
selectable?: SelectableTreeProps<Type, Context>;
|
||||
expanded?: ExpandedState;
|
||||
setExpanded?: Dispatch<SetStateAction<ExpandedState>>;
|
||||
renderHoverComponent?: (node: TreeNode<Type, Context>) => ReactNode;
|
||||
renderActionsComponent?: (node: TreeNode<Type, Context>) => ReactNode;
|
||||
loadingNodeIds?: string[];
|
||||
}
|
||||
|
||||
export function TreeView<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
>({
|
||||
data,
|
||||
fetchChildren,
|
||||
onNodeClick,
|
||||
className,
|
||||
defaultIcon = File,
|
||||
defaultFolderIcon = Folder,
|
||||
defaultIconProps,
|
||||
defaultFolderIconProps,
|
||||
selectable,
|
||||
expanded: expandedProp,
|
||||
setExpanded: setExpandedProp,
|
||||
renderHoverComponent,
|
||||
renderActionsComponent,
|
||||
loadingNodeIds,
|
||||
}: TreeViewProps<Type, Context>) {
|
||||
const { expanded, loading, loadedChildren, hasMoreChildren, toggleNode } =
|
||||
useTree({
|
||||
fetchChildren,
|
||||
expanded: expandedProp,
|
||||
setExpanded: setExpandedProp,
|
||||
});
|
||||
const [selectedIdInternal, setSelectedIdInternal] = React.useState<
|
||||
string | undefined
|
||||
>(selectable?.defaultSelectedId);
|
||||
|
||||
const selectedId = useMemo(() => {
|
||||
return selectable?.selectedId ?? selectedIdInternal;
|
||||
}, [selectable?.selectedId, selectedIdInternal]);
|
||||
|
||||
const setSelectedId = useCallback(
|
||||
(value: SetStateAction<string | undefined>) => {
|
||||
if (selectable?.setSelectedId) {
|
||||
selectable.setSelectedId(value);
|
||||
} else {
|
||||
setSelectedIdInternal(value);
|
||||
}
|
||||
},
|
||||
[selectable, setSelectedIdInternal]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectable?.enabled && selectable.defaultSelectedId) {
|
||||
if (selectable.defaultSelectedId === selectedId) return;
|
||||
setSelectedId(selectable.defaultSelectedId);
|
||||
const { node, path } = findNodeById(
|
||||
data,
|
||||
selectable.defaultSelectedId
|
||||
);
|
||||
|
||||
if (node) {
|
||||
selectable.onSelectedChange?.(node);
|
||||
|
||||
// Expand all parent nodes
|
||||
for (const parent of path) {
|
||||
if (expanded[parent.id]) continue;
|
||||
toggleNode(
|
||||
parent.id,
|
||||
parent.type,
|
||||
parent.context,
|
||||
parent.children
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [selectable, toggleNode, selectedId, data, expanded, setSelectedId]);
|
||||
|
||||
const handleNodeSelect = (node: TreeNode<Type, Context>) => {
|
||||
if (selectable?.enabled) {
|
||||
setSelectedId(node.id);
|
||||
selectable.onSelectedChange?.(node);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('w-full', className)}>
|
||||
{data.map((node, index) => (
|
||||
<TreeNode
|
||||
key={node.id}
|
||||
node={node}
|
||||
level={0}
|
||||
expanded={expanded}
|
||||
loading={loading}
|
||||
loadedChildren={loadedChildren}
|
||||
hasMoreChildren={hasMoreChildren}
|
||||
onToggle={toggleNode}
|
||||
onNodeClick={onNodeClick}
|
||||
defaultIcon={defaultIcon}
|
||||
defaultFolderIcon={defaultFolderIcon}
|
||||
defaultIconProps={defaultIconProps}
|
||||
defaultFolderIconProps={defaultFolderIconProps}
|
||||
selectable={selectable?.enabled}
|
||||
selectedId={selectedId}
|
||||
onSelect={handleNodeSelect}
|
||||
className={index > 0 ? 'mt-0.5' : ''}
|
||||
renderHoverComponent={renderHoverComponent}
|
||||
renderActionsComponent={renderActionsComponent}
|
||||
loadingNodeIds={loadingNodeIds}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface TreeNodeProps<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> {
|
||||
node: TreeNode<Type, Context>;
|
||||
level: number;
|
||||
expanded: Record<string, boolean>;
|
||||
loading: Record<string, boolean>;
|
||||
loadedChildren: Record<string, TreeNode<Type, Context>[]>;
|
||||
hasMoreChildren: Record<string, boolean>;
|
||||
onToggle: (
|
||||
nodeId: string,
|
||||
nodeType: Type,
|
||||
nodeContext: Context[Type],
|
||||
staticChildren?: TreeNode<Type, Context>[]
|
||||
) => void;
|
||||
onNodeClick?: (node: TreeNode<Type, Context>) => void;
|
||||
defaultIcon: LucideIcon;
|
||||
defaultFolderIcon: LucideIcon;
|
||||
defaultIconProps?: React.ComponentProps<LucideIcon>;
|
||||
defaultFolderIconProps?: React.ComponentProps<LucideIcon>;
|
||||
selectable?: boolean;
|
||||
selectedId?: string;
|
||||
onSelect: (node: TreeNode<Type, Context>) => void;
|
||||
className?: string;
|
||||
renderHoverComponent?: (node: TreeNode<Type, Context>) => ReactNode;
|
||||
renderActionsComponent?: (node: TreeNode<Type, Context>) => ReactNode;
|
||||
loadingNodeIds?: string[];
|
||||
}
|
||||
|
||||
function TreeNode<Type extends string, Context extends Record<Type, unknown>>({
|
||||
node,
|
||||
level,
|
||||
expanded,
|
||||
loading,
|
||||
loadedChildren,
|
||||
hasMoreChildren,
|
||||
onToggle,
|
||||
onNodeClick,
|
||||
defaultIcon: DefaultIcon,
|
||||
defaultFolderIcon: DefaultFolderIcon,
|
||||
defaultIconProps,
|
||||
defaultFolderIconProps,
|
||||
selectable,
|
||||
selectedId,
|
||||
onSelect,
|
||||
className,
|
||||
renderHoverComponent,
|
||||
renderActionsComponent,
|
||||
loadingNodeIds,
|
||||
}: TreeNodeProps<Type, Context>) {
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const isExpanded = expanded[node.id];
|
||||
const isLoading = loading[node.id];
|
||||
const children = loadedChildren[node.id] || node.children;
|
||||
const isSelected = selectedId === node.id;
|
||||
|
||||
const IconComponent =
|
||||
node.icon || (node.isFolder ? DefaultFolderIcon : DefaultIcon);
|
||||
const iconProps: React.ComponentProps<LucideIcon> = {
|
||||
strokeWidth: isSelected ? 2.5 : 2,
|
||||
...(node.isFolder ? defaultFolderIconProps : defaultIconProps),
|
||||
...node.iconProps,
|
||||
className: cn(
|
||||
'h-3.5 w-3.5 text-muted-foreground flex-none',
|
||||
isSelected && 'text-primary text-white',
|
||||
node.iconProps?.className
|
||||
),
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn(className)}>
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center gap-1.5 px-2 py-1 rounded-lg cursor-pointer group h-6',
|
||||
'transition-colors duration-200',
|
||||
isSelected
|
||||
? 'bg-sky-500 border border-sky-600 border dark:bg-sky-600 dark:border-sky-700'
|
||||
: 'hover:bg-gray-200/50 border border-transparent dark:hover:bg-gray-700/50',
|
||||
node.className
|
||||
)}
|
||||
{...(isSelected ? { 'data-selected': true } : {})}
|
||||
style={{ paddingLeft: `${level * 16 + 8}px` }}
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (selectable && !node.unselectable) {
|
||||
onSelect(node);
|
||||
}
|
||||
// if (node.isFolder) {
|
||||
// onToggle(node.id, node.children);
|
||||
// }
|
||||
|
||||
// called only once in case of double click
|
||||
if (e.detail !== 2) {
|
||||
onNodeClick?.(node);
|
||||
}
|
||||
}}
|
||||
onDoubleClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (node.isFolder) {
|
||||
onToggle(
|
||||
node.id,
|
||||
node.type,
|
||||
node.context,
|
||||
node.children
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-none items-center gap-1.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className={cn(
|
||||
'h-3.5 w-3.5 p-0 hover:bg-transparent flex-none',
|
||||
isExpanded && 'rotate-90',
|
||||
'transition-transform duration-200'
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (node.isFolder) {
|
||||
onToggle(
|
||||
node.id,
|
||||
node.type,
|
||||
node.context,
|
||||
node.children
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{node.isFolder &&
|
||||
(isLoading ? (
|
||||
<Loader2
|
||||
className={cn('size-3.5 animate-spin', {
|
||||
'text-white': isSelected,
|
||||
})}
|
||||
/>
|
||||
) : (
|
||||
<ChevronRight
|
||||
className={cn('size-3.5', {
|
||||
'text-white': isSelected,
|
||||
})}
|
||||
strokeWidth={2}
|
||||
/>
|
||||
))}
|
||||
</Button>
|
||||
|
||||
{node.tooltip ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{loadingNodeIds?.includes(node.id) ? (
|
||||
<Loader2
|
||||
className={cn('size-3.5 animate-spin', {
|
||||
'text-white': isSelected,
|
||||
})}
|
||||
/>
|
||||
) : (
|
||||
<IconComponent
|
||||
{...(isSelected
|
||||
? { 'data-selected': true }
|
||||
: {})}
|
||||
{...iconProps}
|
||||
/>
|
||||
)}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent
|
||||
align="center"
|
||||
className="max-w-[400px]"
|
||||
>
|
||||
{node.tooltip}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : node.empty ? null : loadingNodeIds?.includes(
|
||||
node.id
|
||||
) ? (
|
||||
<Loader2
|
||||
className={cn('size-3.5 animate-spin', {
|
||||
// 'text-white': isSelected,
|
||||
})}
|
||||
/>
|
||||
) : (
|
||||
<IconComponent
|
||||
{...(isSelected ? { 'data-selected': true } : {})}
|
||||
{...iconProps}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<span
|
||||
{...node.labelProps}
|
||||
className={cn(
|
||||
'text-xs truncate min-w-0 flex-1 w-0',
|
||||
isSelected && 'font-medium text-primary text-white',
|
||||
node.labelProps?.className
|
||||
)}
|
||||
{...(isSelected ? { 'data-selected': true } : {})}
|
||||
>
|
||||
{node.empty ? '' : node.name}
|
||||
</span>
|
||||
{renderActionsComponent && renderActionsComponent(node)}
|
||||
{isHovered && renderHoverComponent
|
||||
? renderHoverComponent(node)
|
||||
: null}
|
||||
</div>
|
||||
|
||||
<AnimatePresence initial={false}>
|
||||
{isExpanded && children && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{
|
||||
height: 'auto',
|
||||
opacity: 1,
|
||||
transition: {
|
||||
height: {
|
||||
duration: Math.min(
|
||||
0.3 + children.length * 0.018,
|
||||
0.7
|
||||
),
|
||||
ease: 'easeInOut',
|
||||
},
|
||||
opacity: {
|
||||
duration: Math.min(
|
||||
0.2 + children.length * 0.012,
|
||||
0.4
|
||||
),
|
||||
ease: 'easeInOut',
|
||||
},
|
||||
},
|
||||
}}
|
||||
exit={{
|
||||
height: 0,
|
||||
opacity: 0,
|
||||
transition: {
|
||||
height: {
|
||||
duration: Math.min(
|
||||
0.2 + children.length * 0.01,
|
||||
0.45
|
||||
),
|
||||
ease: 'easeInOut',
|
||||
},
|
||||
opacity: {
|
||||
duration: 0.1,
|
||||
ease: 'easeOut',
|
||||
},
|
||||
},
|
||||
}}
|
||||
style={{ overflow: 'hidden' }}
|
||||
>
|
||||
{children.map((child) => (
|
||||
<TreeNode
|
||||
key={child.id}
|
||||
node={child}
|
||||
level={level + 1}
|
||||
expanded={expanded}
|
||||
loading={loading}
|
||||
loadedChildren={loadedChildren}
|
||||
hasMoreChildren={hasMoreChildren}
|
||||
onToggle={onToggle}
|
||||
onNodeClick={onNodeClick}
|
||||
defaultIcon={DefaultIcon}
|
||||
defaultFolderIcon={DefaultFolderIcon}
|
||||
defaultIconProps={defaultIconProps}
|
||||
defaultFolderIconProps={defaultFolderIconProps}
|
||||
selectable={selectable}
|
||||
selectedId={selectedId}
|
||||
onSelect={onSelect}
|
||||
className="mt-0.5"
|
||||
renderHoverComponent={renderHoverComponent}
|
||||
renderActionsComponent={renderActionsComponent}
|
||||
loadingNodeIds={loadingNodeIds}
|
||||
/>
|
||||
))}
|
||||
{isLoading ? (
|
||||
<TreeItemSkeleton
|
||||
style={{
|
||||
paddingLeft: `${level + 2 * 16 + 8}px`,
|
||||
}}
|
||||
/>
|
||||
) : null}
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function findNodeById<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
>(
|
||||
nodes: TreeNode<Type, Context>[],
|
||||
id: string,
|
||||
initialPath: TreeNode<Type, Context>[] = []
|
||||
): { node: TreeNode<Type, Context> | null; path: TreeNode<Type, Context>[] } {
|
||||
const path: TreeNode<Type, Context>[] = [...initialPath];
|
||||
for (const node of nodes) {
|
||||
if (node.id === id) return { node, path };
|
||||
if (node.children) {
|
||||
const found = findNodeById(node.children, id, [...path, node]);
|
||||
if (found.node) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { node: null, path };
|
||||
}
|
||||
41
src/components/tree-view/tree.ts
Normal file
41
src/components/tree-view/tree.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import type React from 'react';
|
||||
|
||||
export interface TreeNode<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> {
|
||||
id: string;
|
||||
name: string;
|
||||
isFolder?: boolean;
|
||||
children?: TreeNode<Type, Context>[];
|
||||
icon?: LucideIcon;
|
||||
iconProps?: React.ComponentProps<LucideIcon>;
|
||||
labelProps?: React.ComponentProps<'span'>;
|
||||
type: Type;
|
||||
unselectable?: boolean;
|
||||
tooltip?: string;
|
||||
context: Context[Type];
|
||||
empty?: boolean;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export type FetchChildrenFunction<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> = (
|
||||
nodeId: string,
|
||||
nodeType: Type,
|
||||
nodeContext: Context[Type]
|
||||
) => Promise<TreeNode<Type, Context>[]>;
|
||||
|
||||
export interface SelectableTreeProps<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> {
|
||||
enabled: boolean;
|
||||
defaultSelectedId?: string;
|
||||
onSelectedChange?: (node: TreeNode<Type, Context>) => void;
|
||||
selectedId?: string;
|
||||
setSelectedId?: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||
}
|
||||
153
src/components/tree-view/use-tree.ts
Normal file
153
src/components/tree-view/use-tree.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import type { Dispatch, SetStateAction } from 'react';
|
||||
import { useState, useCallback, useMemo } from 'react';
|
||||
import type { TreeNode, FetchChildrenFunction } from './tree';
|
||||
|
||||
export interface ExpandedState {
|
||||
[key: string]: boolean;
|
||||
}
|
||||
|
||||
interface LoadingState {
|
||||
[key: string]: boolean;
|
||||
}
|
||||
|
||||
interface LoadedChildren<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
> {
|
||||
[key: string]: TreeNode<Type, Context>[];
|
||||
}
|
||||
|
||||
interface HasMoreChildrenState {
|
||||
[key: string]: boolean;
|
||||
}
|
||||
|
||||
export function useTree<
|
||||
Type extends string,
|
||||
Context extends Record<Type, unknown>,
|
||||
>({
|
||||
fetchChildren,
|
||||
expanded: expandedProp,
|
||||
setExpanded: setExpandedProp,
|
||||
}: {
|
||||
fetchChildren?: FetchChildrenFunction<Type, Context>;
|
||||
expanded?: ExpandedState;
|
||||
setExpanded?: Dispatch<SetStateAction<ExpandedState>>;
|
||||
}) {
|
||||
const [expandedInternal, setExpandedInternal] = useState<ExpandedState>({});
|
||||
|
||||
const expanded = useMemo(
|
||||
() => expandedProp ?? expandedInternal,
|
||||
[expandedProp, expandedInternal]
|
||||
);
|
||||
const setExpanded = useCallback(
|
||||
(value: SetStateAction<ExpandedState>) => {
|
||||
if (setExpandedProp) {
|
||||
setExpandedProp(value);
|
||||
} else {
|
||||
setExpandedInternal(value);
|
||||
}
|
||||
},
|
||||
[setExpandedProp, setExpandedInternal]
|
||||
);
|
||||
|
||||
const [loading, setLoading] = useState<LoadingState>({});
|
||||
const [loadedChildren, setLoadedChildren] = useState<
|
||||
LoadedChildren<Type, Context>
|
||||
>({});
|
||||
const [hasMoreChildren, setHasMoreChildren] =
|
||||
useState<HasMoreChildrenState>({});
|
||||
|
||||
const mergeChildren = useCallback(
|
||||
(
|
||||
staticChildren: TreeNode<Type, Context>[] = [],
|
||||
fetchedChildren: TreeNode<Type, Context>[] = []
|
||||
) => {
|
||||
const fetchedChildrenIds = new Set(
|
||||
fetchedChildren.map((child) => child.id)
|
||||
);
|
||||
const uniqueStaticChildren = staticChildren.filter(
|
||||
(child) => !fetchedChildrenIds.has(child.id)
|
||||
);
|
||||
return [...uniqueStaticChildren, ...fetchedChildren];
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const toggleNode = useCallback(
|
||||
async (
|
||||
nodeId: string,
|
||||
nodeType: Type,
|
||||
nodeContext: Context[Type],
|
||||
staticChildren?: TreeNode<Type, Context>[]
|
||||
) => {
|
||||
if (expanded[nodeId]) {
|
||||
// If we're collapsing, just update expanded state
|
||||
setExpanded((prev) => ({ ...prev, [nodeId]: false }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Get any previously fetched children
|
||||
const previouslyFetchedChildren = loadedChildren[nodeId] || [];
|
||||
|
||||
// If we have static children, merge them with any previously fetched children
|
||||
if (staticChildren?.length) {
|
||||
const mergedChildren = mergeChildren(
|
||||
staticChildren,
|
||||
previouslyFetchedChildren
|
||||
);
|
||||
setLoadedChildren((prev) => ({
|
||||
...prev,
|
||||
[nodeId]: mergedChildren,
|
||||
}));
|
||||
|
||||
// Only show "more loading" if we haven't fetched children before
|
||||
setHasMoreChildren((prev) => ({
|
||||
...prev,
|
||||
[nodeId]: !previouslyFetchedChildren.length,
|
||||
}));
|
||||
}
|
||||
|
||||
// Set expanded state immediately to show static/previously fetched children
|
||||
setExpanded((prev) => ({ ...prev, [nodeId]: true }));
|
||||
|
||||
// If we haven't loaded dynamic children yet
|
||||
if (!previouslyFetchedChildren.length) {
|
||||
setLoading((prev) => ({ ...prev, [nodeId]: true }));
|
||||
try {
|
||||
const fetchedChildren = await fetchChildren?.(
|
||||
nodeId,
|
||||
nodeType,
|
||||
nodeContext
|
||||
);
|
||||
// Merge static and newly fetched children
|
||||
const allChildren = mergeChildren(
|
||||
staticChildren || [],
|
||||
fetchedChildren
|
||||
);
|
||||
|
||||
setLoadedChildren((prev) => ({
|
||||
...prev,
|
||||
[nodeId]: allChildren,
|
||||
}));
|
||||
setHasMoreChildren((prev) => ({
|
||||
...prev,
|
||||
[nodeId]: false,
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error('Error loading children:', error);
|
||||
} finally {
|
||||
setLoading((prev) => ({ ...prev, [nodeId]: false }));
|
||||
}
|
||||
}
|
||||
},
|
||||
[expanded, loadedChildren, fetchChildren, mergeChildren, setExpanded]
|
||||
);
|
||||
|
||||
return {
|
||||
expanded,
|
||||
loading,
|
||||
loadedChildren,
|
||||
hasMoreChildren,
|
||||
toggleNode,
|
||||
};
|
||||
}
|
||||
@@ -12,6 +12,8 @@ export interface CanvasContext {
|
||||
}) => void;
|
||||
setOverlapGraph: (graph: Graph<string>) => void;
|
||||
overlapGraph: Graph<string>;
|
||||
setShowFilter: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
showFilter: boolean;
|
||||
}
|
||||
|
||||
export const canvasContext = createContext<CanvasContext>({
|
||||
@@ -19,4 +21,6 @@ export const canvasContext = createContext<CanvasContext>({
|
||||
fitView: emptyFn,
|
||||
setOverlapGraph: emptyFn,
|
||||
overlapGraph: createGraph(),
|
||||
setShowFilter: emptyFn,
|
||||
showFilter: false,
|
||||
});
|
||||
|
||||
@@ -21,6 +21,8 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
const [overlapGraph, setOverlapGraph] =
|
||||
useState<Graph<string>>(createGraph());
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
|
||||
const reorderTables = useCallback(
|
||||
(
|
||||
options: { updateHistory?: boolean } = {
|
||||
@@ -77,6 +79,8 @@ export const CanvasProvider = ({ children }: CanvasProviderProps) => {
|
||||
fitView,
|
||||
setOverlapGraph,
|
||||
overlapGraph,
|
||||
setShowFilter,
|
||||
showFilter,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -277,6 +277,11 @@ export interface ChartDBContext {
|
||||
customType: Partial<DBCustomType>,
|
||||
options?: { updateHistory: boolean }
|
||||
) => Promise<void>;
|
||||
|
||||
// Filters
|
||||
hiddenTableIds?: string[];
|
||||
addHiddenTableId: (tableId: string) => Promise<void>;
|
||||
removeHiddenTableId: (tableId: string) => Promise<void>;
|
||||
}
|
||||
|
||||
export const chartDBContext = createContext<ChartDBContext>({
|
||||
@@ -372,4 +377,9 @@ export const chartDBContext = createContext<ChartDBContext>({
|
||||
removeCustomType: emptyFn,
|
||||
removeCustomTypes: emptyFn,
|
||||
updateCustomType: emptyFn,
|
||||
|
||||
// Filters
|
||||
hiddenTableIds: [],
|
||||
addHiddenTableId: emptyFn,
|
||||
removeHiddenTableId: emptyFn,
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { deepCopy, generateId } from '@/lib/utils';
|
||||
import { randomColor } from '@/lib/colors';
|
||||
@@ -29,6 +29,7 @@ import {
|
||||
DBCustomTypeKind,
|
||||
type DBCustomType,
|
||||
} from '@/lib/domain/db-custom-type';
|
||||
import { useConfig } from '@/hooks/use-config';
|
||||
|
||||
export interface ChartDBProviderProps {
|
||||
diagram?: Diagram;
|
||||
@@ -44,6 +45,11 @@ export const ChartDBProvider: React.FC<
|
||||
const { setSchemasFilter, schemasFilter } = useLocalConfig();
|
||||
const { addUndoAction, resetRedoStack, resetUndoStack } =
|
||||
useRedoUndoStack();
|
||||
const {
|
||||
getHiddenTablesForDiagram,
|
||||
hideTableForDiagram,
|
||||
unhideTableForDiagram,
|
||||
} = useConfig();
|
||||
const [diagramId, setDiagramId] = useState('');
|
||||
const [diagramName, setDiagramName] = useState('');
|
||||
const [diagramCreatedAt, setDiagramCreatedAt] = useState<Date>(new Date());
|
||||
@@ -65,6 +71,7 @@ export const ChartDBProvider: React.FC<
|
||||
const [customTypes, setCustomTypes] = useState<DBCustomType[]>(
|
||||
diagram?.customTypes ?? []
|
||||
);
|
||||
const [hiddenTableIds, setHiddenTableIds] = useState<string[]>([]);
|
||||
const { events: diffEvents } = useDiff();
|
||||
|
||||
const diffCalculatedHandler = useCallback((event: DiffCalculatedEvent) => {
|
||||
@@ -85,6 +92,14 @@ export const ChartDBProvider: React.FC<
|
||||
|
||||
diffEvents.useSubscription(diffCalculatedHandler);
|
||||
|
||||
// Sync hiddenTableIds with config
|
||||
useEffect(() => {
|
||||
if (diagramId) {
|
||||
const hiddenTables = getHiddenTablesForDiagram(diagramId);
|
||||
setHiddenTableIds(hiddenTables);
|
||||
}
|
||||
}, [diagramId, getHiddenTablesForDiagram]);
|
||||
|
||||
const defaultSchemaName = defaultSchemas[databaseType];
|
||||
|
||||
const readonly = useMemo(
|
||||
@@ -1712,6 +1727,29 @@ export const ChartDBProvider: React.FC<
|
||||
]
|
||||
);
|
||||
|
||||
const addHiddenTableId: ChartDBContext['addHiddenTableId'] = useCallback(
|
||||
async (tableId: string) => {
|
||||
if (!hiddenTableIds.includes(tableId)) {
|
||||
setHiddenTableIds((prev) => [...prev, tableId]);
|
||||
await hideTableForDiagram(diagramId, tableId);
|
||||
}
|
||||
},
|
||||
[hiddenTableIds, diagramId, hideTableForDiagram]
|
||||
);
|
||||
|
||||
const removeHiddenTableId: ChartDBContext['removeHiddenTableId'] =
|
||||
useCallback(
|
||||
async (tableId: string) => {
|
||||
if (hiddenTableIds.includes(tableId)) {
|
||||
setHiddenTableIds((prev) =>
|
||||
prev.filter((id) => id !== tableId)
|
||||
);
|
||||
await unhideTableForDiagram(diagramId, tableId);
|
||||
}
|
||||
},
|
||||
[hiddenTableIds, diagramId, unhideTableForDiagram]
|
||||
);
|
||||
|
||||
return (
|
||||
<chartDBContext.Provider
|
||||
value={{
|
||||
@@ -1784,6 +1822,9 @@ export const ChartDBProvider: React.FC<
|
||||
removeCustomType,
|
||||
removeCustomTypes,
|
||||
updateCustomType,
|
||||
hiddenTableIds,
|
||||
addHiddenTableId,
|
||||
removeHiddenTableId,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -8,9 +8,23 @@ export interface ConfigContext {
|
||||
config?: Partial<ChartDBConfig>;
|
||||
updateFn?: (config: ChartDBConfig) => ChartDBConfig;
|
||||
}) => Promise<void>;
|
||||
getHiddenTablesForDiagram: (diagramId: string) => string[];
|
||||
setHiddenTablesForDiagram: (
|
||||
diagramId: string,
|
||||
hiddenTableIds: string[]
|
||||
) => Promise<void>;
|
||||
hideTableForDiagram: (diagramId: string, tableId: string) => Promise<void>;
|
||||
unhideTableForDiagram: (
|
||||
diagramId: string,
|
||||
tableId: string
|
||||
) => Promise<void>;
|
||||
}
|
||||
|
||||
export const ConfigContext = createContext<ConfigContext>({
|
||||
config: undefined,
|
||||
updateConfig: emptyFn,
|
||||
getHiddenTablesForDiagram: () => [],
|
||||
setHiddenTablesForDiagram: emptyFn,
|
||||
hideTableForDiagram: emptyFn,
|
||||
unhideTableForDiagram: emptyFn,
|
||||
});
|
||||
|
||||
@@ -44,8 +44,86 @@ export const ConfigProvider: React.FC<React.PropsWithChildren> = ({
|
||||
return promise;
|
||||
};
|
||||
|
||||
const getHiddenTablesForDiagram = (diagramId: string): string[] => {
|
||||
return config?.hiddenTablesByDiagram?.[diagramId] ?? [];
|
||||
};
|
||||
|
||||
const setHiddenTablesForDiagram = async (
|
||||
diagramId: string,
|
||||
hiddenTableIds: string[]
|
||||
): Promise<void> => {
|
||||
return updateConfig({
|
||||
updateFn: (currentConfig) => ({
|
||||
...currentConfig,
|
||||
hiddenTablesByDiagram: {
|
||||
...currentConfig.hiddenTablesByDiagram,
|
||||
[diagramId]: hiddenTableIds,
|
||||
},
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
const hideTableForDiagram = async (
|
||||
diagramId: string,
|
||||
tableId: string
|
||||
): Promise<void> => {
|
||||
return updateConfig({
|
||||
updateFn: (currentConfig) => {
|
||||
const currentHiddenTables =
|
||||
currentConfig.hiddenTablesByDiagram?.[diagramId] ?? [];
|
||||
if (currentHiddenTables.includes(tableId)) {
|
||||
return currentConfig; // Already hidden, no change needed
|
||||
}
|
||||
|
||||
return {
|
||||
...currentConfig,
|
||||
hiddenTablesByDiagram: {
|
||||
...currentConfig.hiddenTablesByDiagram,
|
||||
[diagramId]: [...currentHiddenTables, tableId],
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const unhideTableForDiagram = async (
|
||||
diagramId: string,
|
||||
tableId: string
|
||||
): Promise<void> => {
|
||||
return updateConfig({
|
||||
updateFn: (currentConfig) => {
|
||||
const currentHiddenTables =
|
||||
currentConfig.hiddenTablesByDiagram?.[diagramId] ?? [];
|
||||
const filteredTables = currentHiddenTables.filter(
|
||||
(id) => id !== tableId
|
||||
);
|
||||
|
||||
if (filteredTables.length === currentHiddenTables.length) {
|
||||
return currentConfig; // Not hidden, no change needed
|
||||
}
|
||||
|
||||
return {
|
||||
...currentConfig,
|
||||
hiddenTablesByDiagram: {
|
||||
...currentConfig.hiddenTablesByDiagram,
|
||||
[diagramId]: filteredTables,
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<ConfigContext.Provider value={{ config, updateConfig }}>
|
||||
<ConfigContext.Provider
|
||||
value={{
|
||||
config,
|
||||
updateConfig,
|
||||
getHiddenTablesForDiagram,
|
||||
setHiddenTablesForDiagram,
|
||||
hideTableForDiagram,
|
||||
unhideTableForDiagram,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
|
||||
@@ -8,6 +8,7 @@ export enum KeyboardShortcutAction {
|
||||
TOGGLE_SIDE_PANEL = 'toggle_side_panel',
|
||||
SHOW_ALL = 'show_all',
|
||||
TOGGLE_THEME = 'toggle_theme',
|
||||
TOGGLE_FILTER = 'toggle_filter',
|
||||
}
|
||||
|
||||
export interface KeyboardShortcut {
|
||||
@@ -71,6 +72,13 @@ export const keyboardShortcuts: Record<
|
||||
keyCombinationMac: 'meta+m',
|
||||
keyCombinationWin: 'ctrl+m',
|
||||
},
|
||||
[KeyboardShortcutAction.TOGGLE_FILTER]: {
|
||||
action: KeyboardShortcutAction.TOGGLE_FILTER,
|
||||
keyCombinationLabelMac: '⌘F',
|
||||
keyCombinationLabelWin: 'Ctrl+F',
|
||||
keyCombinationMac: 'meta+f',
|
||||
keyCombinationWin: 'ctrl+f',
|
||||
},
|
||||
};
|
||||
|
||||
export interface KeyboardShortcutForOS {
|
||||
|
||||
@@ -48,6 +48,7 @@ export const ThemeProvider: React.FC<React.PropsWithChildren> = ({
|
||||
handleThemeToggle,
|
||||
{
|
||||
preventDefault: true,
|
||||
enableOnFormTags: true,
|
||||
},
|
||||
[handleThemeToggle]
|
||||
);
|
||||
|
||||
@@ -35,8 +35,22 @@ import type { OnChange } from '@monaco-editor/react';
|
||||
import { useDebounce } from '@/hooks/use-debounce-v2';
|
||||
import { InstructionsSection } from './instructions-section/instructions-section';
|
||||
import { parseSQLError } from '@/lib/data/sql-import';
|
||||
import type { editor } from 'monaco-editor';
|
||||
import type { editor, IDisposable } from 'monaco-editor';
|
||||
import { waitFor } from '@/lib/utils';
|
||||
import {
|
||||
validateSQL,
|
||||
type ValidationResult,
|
||||
} from '@/lib/data/sql-import/sql-validator';
|
||||
import { SQLValidationStatus } from './sql-validation-status';
|
||||
|
||||
const calculateContentSizeMB = (content: string): number => {
|
||||
return content.length / (1024 * 1024); // Convert to MB
|
||||
};
|
||||
|
||||
const calculateIsLargeFile = (content: string): boolean => {
|
||||
const contentSizeMB = calculateContentSizeMB(content);
|
||||
return contentSizeMB > 2; // Consider large if over 2MB
|
||||
};
|
||||
|
||||
const errorScriptOutputMessage =
|
||||
'Invalid JSON. Please correct it or contact us at support@chartdb.io for help.';
|
||||
@@ -118,6 +132,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const { effectiveTheme } = useTheme();
|
||||
const [errorMessage, setErrorMessage] = useState('');
|
||||
const editorRef = useRef<editor.IStandaloneCodeEditor | null>(null);
|
||||
const pasteDisposableRef = useRef<IDisposable | null>(null);
|
||||
|
||||
const { t } = useTranslation();
|
||||
const { isSm: isDesktop } = useBreakpoint('sm');
|
||||
@@ -125,6 +140,11 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
const [showCheckJsonButton, setShowCheckJsonButton] = useState(false);
|
||||
const [isCheckingJson, setIsCheckingJson] = useState(false);
|
||||
const [showSSMSInfoDialog, setShowSSMSInfoDialog] = useState(false);
|
||||
const [sqlValidation, setSqlValidation] = useState<ValidationResult | null>(
|
||||
null
|
||||
);
|
||||
const [isAutoFixing, setIsAutoFixing] = useState(false);
|
||||
const [showAutoFixButton, setShowAutoFixButton] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
setScriptResult('');
|
||||
@@ -135,11 +155,33 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
// Check if the ddl is valid
|
||||
useEffect(() => {
|
||||
if (importMethod !== 'ddl') {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!scriptResult.trim()) return;
|
||||
if (!scriptResult.trim()) {
|
||||
setSqlValidation(null);
|
||||
setShowAutoFixButton(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// First run our validation based on database type
|
||||
const validation = validateSQL(scriptResult, databaseType);
|
||||
setSqlValidation(validation);
|
||||
|
||||
// If we have auto-fixable errors, show the auto-fix button
|
||||
if (validation.fixedSQL && validation.errors.length > 0) {
|
||||
setShowAutoFixButton(true);
|
||||
// Don't try to parse invalid SQL
|
||||
setErrorMessage('SQL contains syntax errors');
|
||||
return;
|
||||
}
|
||||
|
||||
// Hide auto-fix button if no fixes available
|
||||
setShowAutoFixButton(false);
|
||||
|
||||
// Validate the SQL (either original or already fixed)
|
||||
parseSQLError({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
@@ -185,8 +227,44 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
}
|
||||
}, [errorMessage.length, onImport, scriptResult]);
|
||||
|
||||
const handleAutoFix = useCallback(() => {
|
||||
if (sqlValidation?.fixedSQL) {
|
||||
setIsAutoFixing(true);
|
||||
setShowAutoFixButton(false);
|
||||
setErrorMessage('');
|
||||
|
||||
// Apply the fix with a delay so user sees the fixing message
|
||||
setTimeout(() => {
|
||||
setScriptResult(sqlValidation.fixedSQL!);
|
||||
|
||||
setTimeout(() => {
|
||||
setIsAutoFixing(false);
|
||||
}, 100);
|
||||
}, 1000);
|
||||
}
|
||||
}, [sqlValidation, setScriptResult]);
|
||||
|
||||
const handleErrorClick = useCallback((line: number) => {
|
||||
if (editorRef.current) {
|
||||
// Set cursor to the error line
|
||||
editorRef.current.setPosition({ lineNumber: line, column: 1 });
|
||||
editorRef.current.revealLineInCenter(line);
|
||||
editorRef.current.focus();
|
||||
}
|
||||
}, []);
|
||||
|
||||
const formatEditor = useCallback(() => {
|
||||
if (editorRef.current) {
|
||||
const model = editorRef.current.getModel();
|
||||
if (model) {
|
||||
const content = model.getValue();
|
||||
|
||||
// Skip formatting for large files (> 2MB)
|
||||
if (calculateIsLargeFile(content)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
editorRef.current
|
||||
?.getAction('editor.action.formatDocument')
|
||||
@@ -229,37 +307,69 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
setIsCheckingJson(false);
|
||||
}, [scriptResult, setScriptResult, formatEditor]);
|
||||
|
||||
const detectAndSetImportMethod = useCallback(() => {
|
||||
const content = editorRef.current?.getValue();
|
||||
if (content && content.trim()) {
|
||||
const detectedType = detectContentType(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
setImportMethod(detectedType);
|
||||
}
|
||||
}
|
||||
}, [setImportMethod, importMethod]);
|
||||
|
||||
const [editorDidMount, setEditorDidMount] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (editorRef.current && editorDidMount) {
|
||||
editorRef.current.onDidPaste(() => {
|
||||
setTimeout(() => {
|
||||
editorRef.current
|
||||
?.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 0);
|
||||
setTimeout(detectAndSetImportMethod, 0);
|
||||
});
|
||||
}
|
||||
}, [detectAndSetImportMethod, editorDidMount]);
|
||||
// Cleanup paste handler on unmount
|
||||
return () => {
|
||||
if (pasteDisposableRef.current) {
|
||||
pasteDisposableRef.current.dispose();
|
||||
pasteDisposableRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleEditorDidMount = useCallback(
|
||||
(editor: editor.IStandaloneCodeEditor) => {
|
||||
editorRef.current = editor;
|
||||
setEditorDidMount(true);
|
||||
|
||||
// Cleanup previous disposable if it exists
|
||||
if (pasteDisposableRef.current) {
|
||||
pasteDisposableRef.current.dispose();
|
||||
pasteDisposableRef.current = null;
|
||||
}
|
||||
|
||||
// Add paste handler for all modes
|
||||
const disposable = editor.onDidPaste(() => {
|
||||
const model = editor.getModel();
|
||||
if (!model) return;
|
||||
|
||||
const content = model.getValue();
|
||||
|
||||
// Skip formatting for large files (> 2MB) to prevent browser freezing
|
||||
const isLargeFile = calculateIsLargeFile(content);
|
||||
|
||||
// First, detect content type to determine if we should switch modes
|
||||
const detectedType = detectContentType(content);
|
||||
if (detectedType && detectedType !== importMethod) {
|
||||
// Switch to the detected mode immediately
|
||||
setImportMethod(detectedType);
|
||||
|
||||
// Only format if it's JSON (query mode) AND file is not too large
|
||||
if (detectedType === 'query' && !isLargeFile) {
|
||||
// For JSON mode, format after a short delay
|
||||
setTimeout(() => {
|
||||
editor
|
||||
.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode, do NOT format as it can break the SQL
|
||||
} else {
|
||||
// Content type didn't change, apply formatting based on current mode
|
||||
if (importMethod === 'query' && !isLargeFile) {
|
||||
// Only format JSON content if not too large
|
||||
setTimeout(() => {
|
||||
editor
|
||||
.getAction('editor.action.formatDocument')
|
||||
?.run();
|
||||
}, 100);
|
||||
}
|
||||
// For DDL mode or large files, do NOT format
|
||||
}
|
||||
});
|
||||
|
||||
pasteDisposableRef.current = disposable;
|
||||
},
|
||||
[]
|
||||
[importMethod, setImportMethod]
|
||||
);
|
||||
|
||||
const renderHeader = useCallback(() => {
|
||||
@@ -316,7 +426,7 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
: 'dbml-light'
|
||||
}
|
||||
options={{
|
||||
formatOnPaste: true,
|
||||
formatOnPaste: false, // Never format on paste - we handle it manually
|
||||
minimap: { enabled: false },
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
@@ -345,10 +455,13 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
</Suspense>
|
||||
</div>
|
||||
|
||||
{errorMessage ? (
|
||||
<div className="mt-2 flex shrink-0 items-center gap-2">
|
||||
<p className="text-xs text-red-700">{errorMessage}</p>
|
||||
</div>
|
||||
{errorMessage || (importMethod === 'ddl' && sqlValidation) ? (
|
||||
<SQLValidationStatus
|
||||
validation={sqlValidation}
|
||||
errorMessage={errorMessage}
|
||||
isAutoFixing={isAutoFixing}
|
||||
onErrorClick={handleErrorClick}
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
),
|
||||
@@ -359,6 +472,9 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
effectiveTheme,
|
||||
debouncedHandleInputChange,
|
||||
handleEditorDidMount,
|
||||
sqlValidation,
|
||||
isAutoFixing,
|
||||
handleErrorClick,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -444,13 +560,28 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
)
|
||||
)}
|
||||
</Button>
|
||||
) : showAutoFixButton && importMethod === 'ddl' ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
onClick={handleAutoFix}
|
||||
disabled={isAutoFixing}
|
||||
className="bg-sky-600 text-white hover:bg-sky-700"
|
||||
>
|
||||
{isAutoFixing ? (
|
||||
<Spinner size="small" />
|
||||
) : (
|
||||
'Try auto-fix'
|
||||
)}
|
||||
</Button>
|
||||
) : keepDialogAfterImport ? (
|
||||
<Button
|
||||
type="button"
|
||||
variant="default"
|
||||
disabled={
|
||||
scriptResult.trim().length === 0 ||
|
||||
errorMessage.length > 0
|
||||
errorMessage.length > 0 ||
|
||||
isAutoFixing
|
||||
}
|
||||
onClick={handleImport}
|
||||
>
|
||||
@@ -463,7 +594,8 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
variant="default"
|
||||
disabled={
|
||||
scriptResult.trim().length === 0 ||
|
||||
errorMessage.length > 0
|
||||
errorMessage.length > 0 ||
|
||||
isAutoFixing
|
||||
}
|
||||
onClick={handleImport}
|
||||
>
|
||||
@@ -496,6 +628,10 @@ export const ImportDatabase: React.FC<ImportDatabaseProps> = ({
|
||||
handleCheckJson,
|
||||
goBack,
|
||||
t,
|
||||
importMethod,
|
||||
isAutoFixing,
|
||||
showAutoFixButton,
|
||||
handleAutoFix,
|
||||
]);
|
||||
|
||||
return (
|
||||
|
||||
179
src/dialogs/common/import-database/sql-validation-status.tsx
Normal file
179
src/dialogs/common/import-database/sql-validation-status.tsx
Normal file
@@ -0,0 +1,179 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { CheckCircle, AlertTriangle, MessageCircleWarning } from 'lucide-react';
|
||||
import { Alert, AlertDescription } from '@/components/alert/alert';
|
||||
import type { ValidationResult } from '@/lib/data/sql-import/sql-validator';
|
||||
import { Separator } from '@/components/separator/separator';
|
||||
import { ScrollArea } from '@/components/scroll-area/scroll-area';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
|
||||
interface SQLValidationStatusProps {
|
||||
validation?: ValidationResult | null;
|
||||
errorMessage: string;
|
||||
isAutoFixing?: boolean;
|
||||
onErrorClick?: (line: number) => void;
|
||||
}
|
||||
|
||||
export const SQLValidationStatus: React.FC<SQLValidationStatusProps> = ({
|
||||
validation,
|
||||
errorMessage,
|
||||
isAutoFixing = false,
|
||||
onErrorClick,
|
||||
}) => {
|
||||
const hasErrors = useMemo(
|
||||
() => validation?.errors.length && validation.errors.length > 0,
|
||||
[validation?.errors]
|
||||
);
|
||||
const hasWarnings = useMemo(
|
||||
() => validation?.warnings && validation.warnings.length > 0,
|
||||
[validation?.warnings]
|
||||
);
|
||||
const wasAutoFixed = useMemo(
|
||||
() =>
|
||||
validation?.warnings?.some((w) =>
|
||||
w.message.includes('Auto-fixed')
|
||||
) || false,
|
||||
[validation?.warnings]
|
||||
);
|
||||
|
||||
if (!validation && !errorMessage && !isAutoFixing) return null;
|
||||
|
||||
if (isAutoFixing) {
|
||||
return (
|
||||
<>
|
||||
<Separator className="mb-1 mt-2" />
|
||||
<div className="rounded-md border border-sky-200 bg-sky-50 dark:border-sky-800 dark:bg-sky-950">
|
||||
<div className="space-y-3 p-3 pt-2 text-sky-700 dark:text-sky-300">
|
||||
<div className="flex items-start gap-2">
|
||||
<Spinner className="mt-0.5 size-4 shrink-0 text-sky-700 dark:text-sky-300" />
|
||||
<div className="flex-1 text-sm text-sky-700 dark:text-sky-300">
|
||||
Auto-fixing SQL syntax errors...
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// If we have parser errors (errorMessage) after validation
|
||||
if (errorMessage && !hasErrors) {
|
||||
return (
|
||||
<>
|
||||
<Separator className="mb-1 mt-2" />
|
||||
<div className="mb-1 flex shrink-0 items-center gap-2">
|
||||
<p className="text-xs text-red-700">{errorMessage}</p>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Separator className="mb-1 mt-2" />
|
||||
|
||||
{hasErrors ? (
|
||||
<div className="rounded-md border border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-950">
|
||||
<ScrollArea className="h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-red-700 dark:text-red-300">
|
||||
{validation?.errors
|
||||
.slice(0, 3)
|
||||
.map((error, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
className="flex items-start gap-2"
|
||||
>
|
||||
<MessageCircleWarning className="mt-0.5 size-4 shrink-0 text-red-700 dark:text-red-300" />
|
||||
<div className="flex-1 text-sm text-red-700 dark:text-red-300">
|
||||
<button
|
||||
onClick={() =>
|
||||
onErrorClick?.(error.line)
|
||||
}
|
||||
className="rounded font-medium underline hover:text-red-600 focus:outline-none focus:ring-1 focus:ring-red-500 dark:hover:text-red-200"
|
||||
type="button"
|
||||
>
|
||||
Line {error.line}
|
||||
</button>
|
||||
<span className="mx-1">:</span>
|
||||
<span className="text-xs">
|
||||
{error.message}
|
||||
</span>
|
||||
{error.suggestion && (
|
||||
<div className="mt-1 flex items-start gap-2">
|
||||
<span className="text-xs font-medium ">
|
||||
{error.suggestion}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{validation?.errors &&
|
||||
validation?.errors.length > 3 ? (
|
||||
<div className="flex items-center gap-2">
|
||||
<MessageCircleWarning className="mt-0.5 size-4 shrink-0 text-red-700 dark:text-red-300" />
|
||||
<span className="text-xs font-medium">
|
||||
{validation.errors.length - 3} more
|
||||
error
|
||||
{validation.errors.length - 3 > 1
|
||||
? 's'
|
||||
: ''}
|
||||
</span>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{wasAutoFixed && !hasErrors ? (
|
||||
<Alert className="border-green-200 bg-green-50 dark:border-green-800 dark:bg-green-950">
|
||||
<CheckCircle className="size-4 text-green-600 dark:text-green-400" />
|
||||
<AlertDescription className="text-sm text-green-700 dark:text-green-300">
|
||||
SQL syntax errors were automatically fixed. Your SQL is
|
||||
now ready to import.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
) : null}
|
||||
|
||||
{hasWarnings && !hasErrors ? (
|
||||
<div className="rounded-md border border-sky-200 bg-sky-50 dark:border-sky-800 dark:bg-sky-950">
|
||||
<ScrollArea className="h-24">
|
||||
<div className="space-y-3 p-3 pt-2 text-sky-700 dark:text-sky-300">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertTriangle className="mt-0.5 size-4 shrink-0 text-sky-700 dark:text-sky-300" />
|
||||
<div className="flex-1 text-sm text-sky-700 dark:text-sky-300">
|
||||
<div className="mb-1 font-medium">
|
||||
Import Info:
|
||||
</div>
|
||||
{validation?.warnings.map(
|
||||
(warning, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
className="ml-2 text-xs"
|
||||
>
|
||||
• {warning.message}
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{!hasErrors && !hasWarnings && !errorMessage && validation ? (
|
||||
<div className="rounded-md border border-green-200 bg-green-50 dark:border-green-800 dark:bg-green-950">
|
||||
<div className="space-y-3 p-3 pt-2 text-green-700 dark:text-green-300">
|
||||
<div className="flex items-start gap-2">
|
||||
<CheckCircle className="mt-0.5 size-4 shrink-0 text-green-700 dark:text-green-300" />
|
||||
<div className="flex-1 text-sm text-green-700 dark:text-green-300">
|
||||
SQL syntax validated successfully
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
};
|
||||
2
src/dialogs/common/select-tables/constants.ts
Normal file
2
src/dialogs/common/select-tables/constants.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export const MAX_TABLES_IN_DIAGRAM = 500;
|
||||
export const MAX_TABLES_WITHOUT_SHOWING_FILTER = 50;
|
||||
665
src/dialogs/common/select-tables/select-tables.tsx
Normal file
665
src/dialogs/common/select-tables/select-tables.tsx
Normal file
@@ -0,0 +1,665 @@
|
||||
import React, { useState, useMemo, useEffect, useCallback } from 'react';
|
||||
import { Button } from '@/components/button/button';
|
||||
import { Input } from '@/components/input/input';
|
||||
import { Search, AlertCircle, Check, X, View, Table } from 'lucide-react';
|
||||
import { Checkbox } from '@/components/checkbox/checkbox';
|
||||
import type { DatabaseMetadata } from '@/lib/data/import-metadata/metadata-types/database-metadata';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain/db-schema';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogInternalContent,
|
||||
DialogTitle,
|
||||
} from '@/components/dialog/dialog';
|
||||
import type { SelectedTable } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { generateTableKey } from '@/lib/domain';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
import {
|
||||
Pagination,
|
||||
PaginationContent,
|
||||
PaginationItem,
|
||||
PaginationPrevious,
|
||||
PaginationNext,
|
||||
} from '@/components/pagination/pagination';
|
||||
import { MAX_TABLES_IN_DIAGRAM } from './constants';
|
||||
import { useBreakpoint } from '@/hooks/use-breakpoint';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export interface SelectTablesProps {
|
||||
databaseMetadata?: DatabaseMetadata;
|
||||
onImport: ({
|
||||
selectedTables,
|
||||
databaseMetadata,
|
||||
}: {
|
||||
selectedTables?: SelectedTable[];
|
||||
databaseMetadata?: DatabaseMetadata;
|
||||
}) => Promise<void>;
|
||||
onBack: () => void;
|
||||
isLoading?: boolean;
|
||||
}
|
||||
|
||||
const TABLES_PER_PAGE = 10;
|
||||
|
||||
interface TableInfo {
|
||||
key: string;
|
||||
schema?: string;
|
||||
tableName: string;
|
||||
fullName: string;
|
||||
type: 'table' | 'view';
|
||||
}
|
||||
|
||||
export const SelectTables: React.FC<SelectTablesProps> = ({
|
||||
databaseMetadata,
|
||||
onImport,
|
||||
onBack,
|
||||
isLoading = false,
|
||||
}) => {
|
||||
const [searchTerm, setSearchTerm] = useState('');
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [showTables, setShowTables] = useState(true);
|
||||
const [showViews, setShowViews] = useState(false);
|
||||
const { t } = useTranslation();
|
||||
|
||||
// Prepare all tables and views with their metadata
|
||||
const allTables = useMemo(() => {
|
||||
const tables: TableInfo[] = [];
|
||||
|
||||
// Add regular tables
|
||||
databaseMetadata?.tables.forEach((table) => {
|
||||
const schema = schemaNameToDomainSchemaName(table.schema);
|
||||
const tableName = table.table;
|
||||
|
||||
const key = `table:${generateTableKey({ tableName, schemaName: schema })}`;
|
||||
|
||||
tables.push({
|
||||
key,
|
||||
schema,
|
||||
tableName,
|
||||
fullName: schema ? `${schema}.${tableName}` : tableName,
|
||||
type: 'table',
|
||||
});
|
||||
});
|
||||
|
||||
// Add views
|
||||
databaseMetadata?.views?.forEach((view) => {
|
||||
const schema = schemaNameToDomainSchemaName(view.schema);
|
||||
const viewName = view.view_name;
|
||||
|
||||
if (!viewName) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = `view:${generateTableKey({
|
||||
tableName: viewName,
|
||||
schemaName: schema,
|
||||
})}`;
|
||||
|
||||
tables.push({
|
||||
key,
|
||||
schema,
|
||||
tableName: viewName,
|
||||
fullName:
|
||||
schema === 'default' ? viewName : `${schema}.${viewName}`,
|
||||
type: 'view',
|
||||
});
|
||||
});
|
||||
|
||||
return tables.sort((a, b) => a.fullName.localeCompare(b.fullName));
|
||||
}, [databaseMetadata?.tables, databaseMetadata?.views]);
|
||||
|
||||
// Count tables and views separately
|
||||
const tableCount = useMemo(
|
||||
() => allTables.filter((t) => t.type === 'table').length,
|
||||
[allTables]
|
||||
);
|
||||
const viewCount = useMemo(
|
||||
() => allTables.filter((t) => t.type === 'view').length,
|
||||
[allTables]
|
||||
);
|
||||
|
||||
// Initialize selectedTables with all tables (not views) if less than 100 tables
|
||||
const [selectedTables, setSelectedTables] = useState<Set<string>>(() => {
|
||||
const tables = allTables.filter((t) => t.type === 'table');
|
||||
if (tables.length < MAX_TABLES_IN_DIAGRAM) {
|
||||
return new Set(tables.map((t) => t.key));
|
||||
}
|
||||
return new Set();
|
||||
});
|
||||
|
||||
// Filter tables based on search term and type filters
|
||||
const filteredTables = useMemo(() => {
|
||||
let filtered = allTables;
|
||||
|
||||
// Filter by type
|
||||
filtered = filtered.filter((table) => {
|
||||
if (table.type === 'table' && !showTables) return false;
|
||||
if (table.type === 'view' && !showViews) return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
// Filter by search term
|
||||
if (searchTerm.trim()) {
|
||||
const searchLower = searchTerm.toLowerCase();
|
||||
filtered = filtered.filter(
|
||||
(table) =>
|
||||
table.tableName.toLowerCase().includes(searchLower) ||
|
||||
table.schema?.toLowerCase().includes(searchLower) ||
|
||||
table.fullName.toLowerCase().includes(searchLower)
|
||||
);
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}, [allTables, searchTerm, showTables, showViews]);
|
||||
|
||||
// Calculate pagination
|
||||
const totalPages = useMemo(
|
||||
() => Math.max(1, Math.ceil(filteredTables.length / TABLES_PER_PAGE)),
|
||||
[filteredTables.length]
|
||||
);
|
||||
|
||||
const paginatedTables = useMemo(() => {
|
||||
const startIndex = (currentPage - 1) * TABLES_PER_PAGE;
|
||||
const endIndex = startIndex + TABLES_PER_PAGE;
|
||||
return filteredTables.slice(startIndex, endIndex);
|
||||
}, [filteredTables, currentPage]);
|
||||
|
||||
// Get currently visible selected tables
|
||||
const visibleSelectedTables = useMemo(() => {
|
||||
return paginatedTables.filter((table) => selectedTables.has(table.key));
|
||||
}, [paginatedTables, selectedTables]);
|
||||
|
||||
const canAddMore = useMemo(
|
||||
() => selectedTables.size < MAX_TABLES_IN_DIAGRAM,
|
||||
[selectedTables.size]
|
||||
);
|
||||
const hasSearchResults = useMemo(
|
||||
() => filteredTables.length > 0,
|
||||
[filteredTables.length]
|
||||
);
|
||||
const allVisibleSelected = useMemo(
|
||||
() =>
|
||||
visibleSelectedTables.length === paginatedTables.length &&
|
||||
paginatedTables.length > 0,
|
||||
[visibleSelectedTables.length, paginatedTables.length]
|
||||
);
|
||||
const canSelectAllFiltered = useMemo(
|
||||
() =>
|
||||
filteredTables.length > 0 &&
|
||||
filteredTables.some((table) => !selectedTables.has(table.key)) &&
|
||||
canAddMore,
|
||||
[filteredTables, selectedTables, canAddMore]
|
||||
);
|
||||
|
||||
// Reset to first page when search changes
|
||||
useEffect(() => {
|
||||
setCurrentPage(1);
|
||||
}, [searchTerm]);
|
||||
|
||||
const handleTableToggle = useCallback(
|
||||
(tableKey: string) => {
|
||||
const newSelected = new Set(selectedTables);
|
||||
|
||||
if (newSelected.has(tableKey)) {
|
||||
newSelected.delete(tableKey);
|
||||
} else if (selectedTables.size < MAX_TABLES_IN_DIAGRAM) {
|
||||
newSelected.add(tableKey);
|
||||
}
|
||||
|
||||
setSelectedTables(newSelected);
|
||||
},
|
||||
[selectedTables]
|
||||
);
|
||||
|
||||
const handleTogglePageSelection = useCallback(() => {
|
||||
const newSelected = new Set(selectedTables);
|
||||
|
||||
if (allVisibleSelected) {
|
||||
// Deselect all on current page
|
||||
for (const table of paginatedTables) {
|
||||
newSelected.delete(table.key);
|
||||
}
|
||||
} else {
|
||||
// Select all on current page
|
||||
for (const table of paginatedTables) {
|
||||
if (newSelected.size >= MAX_TABLES_IN_DIAGRAM) break;
|
||||
newSelected.add(table.key);
|
||||
}
|
||||
}
|
||||
|
||||
setSelectedTables(newSelected);
|
||||
}, [allVisibleSelected, paginatedTables, selectedTables]);
|
||||
|
||||
const handleSelectAllFiltered = useCallback(() => {
|
||||
const newSelected = new Set(selectedTables);
|
||||
|
||||
for (const table of filteredTables) {
|
||||
if (newSelected.size >= MAX_TABLES_IN_DIAGRAM) break;
|
||||
newSelected.add(table.key);
|
||||
}
|
||||
|
||||
setSelectedTables(newSelected);
|
||||
}, [filteredTables, selectedTables]);
|
||||
|
||||
const handleNextPage = useCallback(() => {
|
||||
if (currentPage < totalPages) {
|
||||
setCurrentPage(currentPage + 1);
|
||||
}
|
||||
}, [currentPage, totalPages]);
|
||||
|
||||
const handlePrevPage = useCallback(() => {
|
||||
if (currentPage > 1) {
|
||||
setCurrentPage(currentPage - 1);
|
||||
}
|
||||
}, [currentPage]);
|
||||
|
||||
const handleClearSelection = useCallback(() => {
|
||||
setSelectedTables(new Set());
|
||||
}, []);
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
const selectedTableObjects: SelectedTable[] = Array.from(selectedTables)
|
||||
.map((key): SelectedTable | null => {
|
||||
const table = allTables.find((t) => t.key === key);
|
||||
if (!table) return null;
|
||||
|
||||
return {
|
||||
schema: table.schema,
|
||||
table: table.tableName,
|
||||
type: table.type,
|
||||
} satisfies SelectedTable;
|
||||
})
|
||||
.filter((t): t is SelectedTable => t !== null);
|
||||
|
||||
onImport({ selectedTables: selectedTableObjects, databaseMetadata });
|
||||
}, [selectedTables, allTables, onImport, databaseMetadata]);
|
||||
|
||||
const { isMd: isDesktop } = useBreakpoint('md');
|
||||
|
||||
const renderPagination = useCallback(
|
||||
() => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious
|
||||
onClick={handlePrevPage}
|
||||
className={cn(
|
||||
'cursor-pointer',
|
||||
currentPage === 1 &&
|
||||
'pointer-events-none opacity-50'
|
||||
)}
|
||||
/>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<span className="px-3 text-sm text-muted-foreground">
|
||||
Page {currentPage} of {totalPages}
|
||||
</span>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext
|
||||
onClick={handleNextPage}
|
||||
className={cn(
|
||||
'cursor-pointer',
|
||||
(currentPage >= totalPages ||
|
||||
filteredTables.length === 0) &&
|
||||
'pointer-events-none opacity-50'
|
||||
)}
|
||||
/>
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
),
|
||||
[
|
||||
currentPage,
|
||||
totalPages,
|
||||
handlePrevPage,
|
||||
handleNextPage,
|
||||
filteredTables.length,
|
||||
]
|
||||
);
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex h-[400px] items-center justify-center">
|
||||
<div className="text-center">
|
||||
<Spinner className="mb-4" />
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Parsing database metadata...
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<DialogHeader>
|
||||
<DialogTitle>Select Tables to Import</DialogTitle>
|
||||
<DialogDescription>
|
||||
{tableCount} {tableCount === 1 ? 'table' : 'tables'}
|
||||
{viewCount > 0 && (
|
||||
<>
|
||||
{' and '}
|
||||
{viewCount} {viewCount === 1 ? 'view' : 'views'}
|
||||
</>
|
||||
)}
|
||||
{' found. '}
|
||||
{allTables.length > MAX_TABLES_IN_DIAGRAM
|
||||
? `Select up to ${MAX_TABLES_IN_DIAGRAM} to import.`
|
||||
: 'Choose which ones to import.'}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogInternalContent>
|
||||
<div className="flex h-full flex-col space-y-4">
|
||||
{/* Warning/Info Banner */}
|
||||
{allTables.length > MAX_TABLES_IN_DIAGRAM ? (
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center gap-2 rounded-lg p-3 text-sm',
|
||||
'bg-amber-50 text-amber-800 dark:bg-amber-950 dark:text-amber-200'
|
||||
)}
|
||||
>
|
||||
<AlertCircle className="size-4 shrink-0" />
|
||||
<span>
|
||||
Due to performance limitations, you can import a
|
||||
maximum of {MAX_TABLES_IN_DIAGRAM} tables.
|
||||
</span>
|
||||
</div>
|
||||
) : null}
|
||||
{/* Search Input */}
|
||||
<div className="relative">
|
||||
<Search className="absolute left-3 top-1/2 size-4 -translate-y-1/2 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search tables..."
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
className="px-9"
|
||||
/>
|
||||
{searchTerm && (
|
||||
<button
|
||||
onClick={() => setSearchTerm('')}
|
||||
className="absolute right-3 top-1/2 -translate-y-1/2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<X className="size-4" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Selection Status and Actions - Responsive layout */}
|
||||
<div className="flex flex-col items-center gap-3 sm:flex-row sm:items-center sm:justify-between sm:gap-4">
|
||||
{/* Left side: selection count -> checkboxes -> results found */}
|
||||
<div className="flex flex-col items-center gap-3 text-sm sm:flex-row sm:items-center sm:gap-4">
|
||||
<div className="flex flex-col items-center gap-1 sm:flex-row sm:items-center sm:gap-4">
|
||||
<span className="text-center font-medium">
|
||||
{selectedTables.size} /{' '}
|
||||
{Math.min(
|
||||
MAX_TABLES_IN_DIAGRAM,
|
||||
allTables.length
|
||||
)}{' '}
|
||||
items selected
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3 sm:border-x sm:px-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
checked={showTables}
|
||||
onCheckedChange={(checked) => {
|
||||
// Prevent unchecking if it's the only one checked
|
||||
if (!checked && !showViews) return;
|
||||
setShowTables(!!checked);
|
||||
}}
|
||||
/>
|
||||
<Table
|
||||
className="size-4"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
<span>tables</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
checked={showViews}
|
||||
onCheckedChange={(checked) => {
|
||||
// Prevent unchecking if it's the only one checked
|
||||
if (!checked && !showTables) return;
|
||||
setShowViews(!!checked);
|
||||
}}
|
||||
/>
|
||||
<View
|
||||
className="size-4"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
<span>views</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<span className="hidden text-muted-foreground sm:inline">
|
||||
{filteredTables.length}{' '}
|
||||
{filteredTables.length === 1
|
||||
? 'result'
|
||||
: 'results'}{' '}
|
||||
found
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Right side: action buttons */}
|
||||
<div className="flex flex-wrap items-center justify-center gap-2">
|
||||
{hasSearchResults && (
|
||||
<>
|
||||
{/* Show page selection button when not searching and no selection */}
|
||||
{!searchTerm &&
|
||||
selectedTables.size === 0 && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={
|
||||
handleTogglePageSelection
|
||||
}
|
||||
disabled={
|
||||
paginatedTables.length === 0
|
||||
}
|
||||
>
|
||||
{allVisibleSelected
|
||||
? 'Deselect'
|
||||
: 'Select'}{' '}
|
||||
page
|
||||
</Button>
|
||||
)}
|
||||
{/* Show Select all button when there are unselected tables */}
|
||||
{canSelectAllFiltered &&
|
||||
selectedTables.size === 0 && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={
|
||||
handleSelectAllFiltered
|
||||
}
|
||||
disabled={!canSelectAllFiltered}
|
||||
title={(() => {
|
||||
const unselectedCount =
|
||||
filteredTables.filter(
|
||||
(table) =>
|
||||
!selectedTables.has(
|
||||
table.key
|
||||
)
|
||||
).length;
|
||||
const remainingCapacity =
|
||||
MAX_TABLES_IN_DIAGRAM -
|
||||
selectedTables.size;
|
||||
if (
|
||||
unselectedCount >
|
||||
remainingCapacity
|
||||
) {
|
||||
return `Can only select ${remainingCapacity} more tables (${MAX_TABLES_IN_DIAGRAM} max limit)`;
|
||||
}
|
||||
return undefined;
|
||||
})()}
|
||||
>
|
||||
{(() => {
|
||||
const unselectedCount =
|
||||
filteredTables.filter(
|
||||
(table) =>
|
||||
!selectedTables.has(
|
||||
table.key
|
||||
)
|
||||
).length;
|
||||
const remainingCapacity =
|
||||
MAX_TABLES_IN_DIAGRAM -
|
||||
selectedTables.size;
|
||||
if (
|
||||
unselectedCount >
|
||||
remainingCapacity
|
||||
) {
|
||||
return `Select ${remainingCapacity} of ${unselectedCount}`;
|
||||
}
|
||||
return `Select all ${unselectedCount}`;
|
||||
})()}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{selectedTables.size > 0 && (
|
||||
<>
|
||||
{/* Show page selection/deselection button when user has selections */}
|
||||
{paginatedTables.length > 0 && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleTogglePageSelection}
|
||||
>
|
||||
{allVisibleSelected
|
||||
? 'Deselect'
|
||||
: 'Select'}{' '}
|
||||
page
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleClearSelection}
|
||||
>
|
||||
Clear selection
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Table List */}
|
||||
<div className="flex min-h-[428px] flex-1 flex-col">
|
||||
{hasSearchResults ? (
|
||||
<>
|
||||
<div className="flex-1 py-4">
|
||||
<div className="space-y-1">
|
||||
{paginatedTables.map((table) => {
|
||||
const isSelected = selectedTables.has(
|
||||
table.key
|
||||
);
|
||||
const isDisabled =
|
||||
!isSelected &&
|
||||
selectedTables.size >=
|
||||
MAX_TABLES_IN_DIAGRAM;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={table.key}
|
||||
className={cn(
|
||||
'flex items-center gap-3 rounded-md px-3 py-2 text-sm transition-colors',
|
||||
{
|
||||
'cursor-not-allowed':
|
||||
isDisabled,
|
||||
|
||||
'bg-muted hover:bg-muted/80':
|
||||
isSelected,
|
||||
'hover:bg-accent':
|
||||
!isSelected &&
|
||||
!isDisabled,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<Checkbox
|
||||
checked={isSelected}
|
||||
disabled={isDisabled}
|
||||
onCheckedChange={() =>
|
||||
handleTableToggle(
|
||||
table.key
|
||||
)
|
||||
}
|
||||
/>
|
||||
{table.type === 'view' ? (
|
||||
<View
|
||||
className="size-4"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
) : (
|
||||
<Table
|
||||
className="size-4"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
)}
|
||||
<span className="flex-1">
|
||||
{table.schema ? (
|
||||
<span className="text-muted-foreground">
|
||||
{table.schema}.
|
||||
</span>
|
||||
) : null}
|
||||
<span className="font-medium">
|
||||
{table.tableName}
|
||||
</span>
|
||||
{table.type === 'view' && (
|
||||
<span className="ml-2 text-xs text-muted-foreground">
|
||||
(view)
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
{isSelected && (
|
||||
<Check className="size-4 text-pink-600" />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<div className="flex h-full items-center justify-center py-4">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{searchTerm
|
||||
? 'No tables found matching your search.'
|
||||
: 'Start typing to search for tables...'}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{isDesktop ? renderPagination() : null}
|
||||
</DialogInternalContent>
|
||||
<DialogFooter
|
||||
// className={cn(
|
||||
// 'gap-2',
|
||||
// isDesktop
|
||||
// ? 'flex items-center justify-between'
|
||||
// : 'flex flex-col'
|
||||
// )}
|
||||
className="flex flex-col-reverse gap-2 sm:flex-row sm:justify-end sm:space-x-2 md:justify-between md:gap-0"
|
||||
>
|
||||
{/* Desktop layout */}
|
||||
|
||||
<Button type="button" variant="secondary" onClick={onBack}>
|
||||
{t('new_diagram_dialog.back')}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
onClick={handleConfirm}
|
||||
disabled={selectedTables.size === 0}
|
||||
className="bg-pink-500 text-white hover:bg-pink-600"
|
||||
>
|
||||
Import {selectedTables.size} Tables
|
||||
</Button>
|
||||
|
||||
{!isDesktop ? renderPagination() : null}
|
||||
</DialogFooter>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,4 +1,5 @@
|
||||
export enum CreateDiagramDialogStep {
|
||||
SELECT_DATABASE = 'SELECT_DATABASE',
|
||||
IMPORT_DATABASE = 'IMPORT_DATABASE',
|
||||
SELECT_TABLES = 'SELECT_TABLES',
|
||||
}
|
||||
|
||||
@@ -15,9 +15,13 @@ import type { DatabaseEdition } from '@/lib/domain/database-edition';
|
||||
import { SelectDatabase } from './select-database/select-database';
|
||||
import { CreateDiagramDialogStep } from './create-diagram-dialog-step';
|
||||
import { ImportDatabase } from '../common/import-database/import-database';
|
||||
import { SelectTables } from '../common/select-tables/select-tables';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import type { BaseDialogProps } from '../common/base-dialog-props';
|
||||
import { sqlImportToDiagram } from '@/lib/data/sql-import';
|
||||
import type { SelectedTable } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { filterMetadataByTables } from '@/lib/data/import-metadata/filter-metadata';
|
||||
import { MAX_TABLES_WITHOUT_SHOWING_FILTER } from '../common/select-tables/constants';
|
||||
|
||||
export interface CreateDiagramDialogProps extends BaseDialogProps {}
|
||||
|
||||
@@ -42,6 +46,8 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
const { listDiagrams, addDiagram } = useStorage();
|
||||
const [diagramNumber, setDiagramNumber] = useState<number>(1);
|
||||
const navigate = useNavigate();
|
||||
const [parsedMetadata, setParsedMetadata] = useState<DatabaseMetadata>();
|
||||
const [isParsingMetadata, setIsParsingMetadata] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
setDatabaseEdition(undefined);
|
||||
@@ -62,49 +68,72 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
setDatabaseEdition(undefined);
|
||||
setScriptResult('');
|
||||
setImportMethod('query');
|
||||
setParsedMetadata(undefined);
|
||||
}, [dialog.open]);
|
||||
|
||||
const hasExistingDiagram = (diagramId ?? '').trim().length !== 0;
|
||||
|
||||
const importNewDiagram = useCallback(async () => {
|
||||
let diagram: Diagram | undefined;
|
||||
const importNewDiagram = useCallback(
|
||||
async ({
|
||||
selectedTables,
|
||||
databaseMetadata,
|
||||
}: {
|
||||
selectedTables?: SelectedTable[];
|
||||
databaseMetadata?: DatabaseMetadata;
|
||||
} = {}) => {
|
||||
let diagram: Diagram | undefined;
|
||||
|
||||
if (importMethod === 'ddl') {
|
||||
diagram = await sqlImportToDiagram({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
if (importMethod === 'ddl') {
|
||||
diagram = await sqlImportToDiagram({
|
||||
sqlContent: scriptResult,
|
||||
sourceDatabaseType: databaseType,
|
||||
targetDatabaseType: databaseType,
|
||||
});
|
||||
} else {
|
||||
let metadata: DatabaseMetadata | undefined = databaseMetadata;
|
||||
|
||||
if (!metadata) {
|
||||
metadata = loadDatabaseMetadata(scriptResult);
|
||||
}
|
||||
|
||||
if (selectedTables && selectedTables.length > 0) {
|
||||
metadata = filterMetadataByTables({
|
||||
metadata,
|
||||
selectedTables,
|
||||
});
|
||||
}
|
||||
|
||||
diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata: metadata,
|
||||
diagramNumber,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
}
|
||||
|
||||
await addDiagram({ diagram });
|
||||
await updateConfig({
|
||||
config: { defaultDiagramId: diagram.id },
|
||||
});
|
||||
} else {
|
||||
const databaseMetadata: DatabaseMetadata =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
|
||||
diagram = await loadFromDatabaseMetadata({
|
||||
databaseType,
|
||||
databaseMetadata,
|
||||
diagramNumber,
|
||||
databaseEdition:
|
||||
databaseEdition?.trim().length === 0
|
||||
? undefined
|
||||
: databaseEdition,
|
||||
});
|
||||
}
|
||||
|
||||
await addDiagram({ diagram });
|
||||
await updateConfig({ config: { defaultDiagramId: diagram.id } });
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
}, [
|
||||
importMethod,
|
||||
databaseType,
|
||||
addDiagram,
|
||||
databaseEdition,
|
||||
closeCreateDiagramDialog,
|
||||
navigate,
|
||||
updateConfig,
|
||||
scriptResult,
|
||||
diagramNumber,
|
||||
]);
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
},
|
||||
[
|
||||
importMethod,
|
||||
databaseType,
|
||||
addDiagram,
|
||||
databaseEdition,
|
||||
closeCreateDiagramDialog,
|
||||
navigate,
|
||||
updateConfig,
|
||||
scriptResult,
|
||||
diagramNumber,
|
||||
]
|
||||
);
|
||||
|
||||
const createEmptyDiagram = useCallback(async () => {
|
||||
const diagram: Diagram = {
|
||||
@@ -138,10 +167,56 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
openImportDBMLDialog,
|
||||
]);
|
||||
|
||||
const importNewDiagramOrFilterTables = useCallback(async () => {
|
||||
try {
|
||||
setIsParsingMetadata(true);
|
||||
|
||||
if (importMethod === 'ddl') {
|
||||
await importNewDiagram();
|
||||
} else {
|
||||
// Parse metadata asynchronously to avoid blocking the UI
|
||||
const metadata = await new Promise<DatabaseMetadata>(
|
||||
(resolve, reject) => {
|
||||
setTimeout(() => {
|
||||
try {
|
||||
const result =
|
||||
loadDatabaseMetadata(scriptResult);
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
);
|
||||
|
||||
const totalTablesAndViews =
|
||||
metadata.tables.length + (metadata.views?.length || 0);
|
||||
|
||||
setParsedMetadata(metadata);
|
||||
|
||||
// Check if it's a large database that needs table selection
|
||||
if (totalTablesAndViews > MAX_TABLES_WITHOUT_SHOWING_FILTER) {
|
||||
setStep(CreateDiagramDialogStep.SELECT_TABLES);
|
||||
} else {
|
||||
await importNewDiagram({
|
||||
databaseMetadata: metadata,
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
setIsParsingMetadata(false);
|
||||
}
|
||||
}, [importMethod, scriptResult, importNewDiagram]);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
{...dialog}
|
||||
onOpenChange={(open) => {
|
||||
// Don't allow closing while parsing metadata
|
||||
if (isParsingMetadata) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!hasExistingDiagram) {
|
||||
return;
|
||||
}
|
||||
@@ -154,6 +229,8 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
<DialogContent
|
||||
className="flex max-h-dvh w-full flex-col md:max-w-[900px]"
|
||||
showClose={hasExistingDiagram}
|
||||
onInteractOutside={(e) => e.preventDefault()}
|
||||
onEscapeKeyDown={(e) => e.preventDefault()}
|
||||
>
|
||||
{step === CreateDiagramDialogStep.SELECT_DATABASE ? (
|
||||
<SelectDatabase
|
||||
@@ -165,9 +242,9 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
setStep(CreateDiagramDialogStep.IMPORT_DATABASE)
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
) : step === CreateDiagramDialogStep.IMPORT_DATABASE ? (
|
||||
<ImportDatabase
|
||||
onImport={importNewDiagram}
|
||||
onImport={importNewDiagramOrFilterTables}
|
||||
onCreateEmptyDiagram={createEmptyDiagram}
|
||||
databaseEdition={databaseEdition}
|
||||
databaseType={databaseType}
|
||||
@@ -180,8 +257,18 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
title={t('new_diagram_dialog.import_database.title')}
|
||||
importMethod={importMethod}
|
||||
setImportMethod={setImportMethod}
|
||||
keepDialogAfterImport={true}
|
||||
/>
|
||||
)}
|
||||
) : step === CreateDiagramDialogStep.SELECT_TABLES ? (
|
||||
<SelectTables
|
||||
isLoading={isParsingMetadata || !parsedMetadata}
|
||||
databaseMetadata={parsedMetadata}
|
||||
onImport={importNewDiagram}
|
||||
onBack={() =>
|
||||
setStep(CreateDiagramDialogStep.IMPORT_DATABASE)
|
||||
}
|
||||
/>
|
||||
) : null}
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
|
||||
@@ -23,11 +23,16 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Editor } from '@/components/code-snippet/code-snippet';
|
||||
import { useTheme } from '@/hooks/use-theme';
|
||||
import { AlertCircle } from 'lucide-react';
|
||||
import { importDBMLToDiagram, sanitizeDBML } from '@/lib/dbml-import';
|
||||
import {
|
||||
importDBMLToDiagram,
|
||||
sanitizeDBML,
|
||||
preprocessDBML,
|
||||
} from '@/lib/dbml/dbml-import/dbml-import';
|
||||
import { useChartDB } from '@/hooks/use-chartdb';
|
||||
import { Parser } from '@dbml/core';
|
||||
import { useCanvas } from '@/hooks/use-canvas';
|
||||
import { setupDBMLLanguage } from '@/components/code-snippet/languages/dbml-language';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import { useToast } from '@/components/toast/use-toast';
|
||||
import { Spinner } from '@/components/spinner/spinner';
|
||||
import { debounce } from '@/lib/utils';
|
||||
@@ -189,7 +194,8 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
if (!content.trim()) return;
|
||||
|
||||
try {
|
||||
const sanitizedContent = sanitizeDBML(content);
|
||||
const preprocessedContent = preprocessDBML(content);
|
||||
const sanitizedContent = sanitizeDBML(preprocessedContent);
|
||||
const parser = new Parser();
|
||||
parser.parse(sanitizedContent, 'dbml');
|
||||
} catch (e) {
|
||||
@@ -242,13 +248,11 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
if (!dbmlContent.trim() || errorMessage) return;
|
||||
|
||||
try {
|
||||
// Sanitize DBML content before importing
|
||||
const sanitizedContent = sanitizeDBML(dbmlContent);
|
||||
const importedDiagram = await importDBMLToDiagram(sanitizedContent);
|
||||
const importedDiagram = await importDBMLToDiagram(dbmlContent);
|
||||
const tableIdsToRemove = tables
|
||||
.filter((table) =>
|
||||
importedDiagram.tables?.some(
|
||||
(t) =>
|
||||
(t: DBTable) =>
|
||||
t.name === table.name && t.schema === table.schema
|
||||
)
|
||||
)
|
||||
@@ -257,19 +261,21 @@ Ref: comments.user_id > users.id // Each comment is written by one user`;
|
||||
const relationshipIdsToRemove = relationships
|
||||
.filter((relationship) => {
|
||||
const sourceTable = tables.find(
|
||||
(table) => table.id === relationship.sourceTableId
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.sourceTableId
|
||||
);
|
||||
const targetTable = tables.find(
|
||||
(table) => table.id === relationship.targetTableId
|
||||
(table: DBTable) =>
|
||||
table.id === relationship.targetTableId
|
||||
);
|
||||
if (!sourceTable || !targetTable) return true;
|
||||
const replacementSourceTable = importedDiagram.tables?.find(
|
||||
(table) =>
|
||||
(table: DBTable) =>
|
||||
table.name === sourceTable.name &&
|
||||
table.schema === sourceTable.schema
|
||||
);
|
||||
const replacementTargetTable = importedDiagram.tables?.find(
|
||||
(table) =>
|
||||
(table: DBTable) =>
|
||||
table.name === targetTable.name &&
|
||||
table.schema === targetTable.schema
|
||||
);
|
||||
|
||||
@@ -21,7 +21,7 @@ import { useTranslation } from 'react-i18next';
|
||||
export interface TableSchemaDialogProps extends BaseDialogProps {
|
||||
table?: DBTable;
|
||||
schemas: DBSchema[];
|
||||
onConfirm: (schema: string) => void;
|
||||
onConfirm: ({ schema }: { schema: DBSchema }) => void;
|
||||
}
|
||||
|
||||
export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
@@ -31,7 +31,7 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
onConfirm,
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [selectedSchema, setSelectedSchema] = React.useState<string>(
|
||||
const [selectedSchemaId, setSelectedSchemaId] = React.useState<string>(
|
||||
table?.schema
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
@@ -39,7 +39,7 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
|
||||
useEffect(() => {
|
||||
if (!dialog.open) return;
|
||||
setSelectedSchema(
|
||||
setSelectedSchemaId(
|
||||
table?.schema
|
||||
? schemaNameToSchemaId(table.schema)
|
||||
: (schemas?.[0]?.id ?? '')
|
||||
@@ -48,8 +48,11 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
const { closeTableSchemaDialog } = useDialog();
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
onConfirm(selectedSchema);
|
||||
}, [onConfirm, selectedSchema]);
|
||||
const schema = schemas.find((s) => s.id === selectedSchemaId);
|
||||
if (!schema) return;
|
||||
|
||||
onConfirm({ schema });
|
||||
}, [onConfirm, selectedSchemaId, schemas]);
|
||||
|
||||
const schemaOptions: SelectBoxOption[] = useMemo(
|
||||
() =>
|
||||
@@ -89,9 +92,9 @@ export const TableSchemaDialog: React.FC<TableSchemaDialogProps> = ({
|
||||
<SelectBox
|
||||
options={schemaOptions}
|
||||
multiple={false}
|
||||
value={selectedSchema}
|
||||
value={selectedSchemaId}
|
||||
onChange={(value) =>
|
||||
setSelectedSchema(value as string)
|
||||
setSelectedSchemaId(value as string)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -74,8 +74,8 @@ export const ar: LanguageTranslation = {
|
||||
title: 'مخططات متعددة',
|
||||
description:
|
||||
'{{formattedSchemas}} :مخططات في هذا الرسم البياني. يتم حاليا عرض {{schemasCount}} هناك',
|
||||
dont_show_again: 'لا تظهره مجدداً',
|
||||
change_schema: 'تغيير',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'لا شيء',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,8 @@ export const ar: LanguageTranslation = {
|
||||
delete_field: 'حذف الحقل',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
},
|
||||
index_actions: {
|
||||
title: 'خصائص الفهرس',
|
||||
@@ -269,6 +271,8 @@ export const ar: LanguageTranslation = {
|
||||
redo: 'إعادة',
|
||||
reorder_diagram: 'إعادة ترتيب الرسم البياني',
|
||||
highlight_overlapping_tables: 'تمييز الجداول المتداخلة',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const bn: LanguageTranslation = {
|
||||
title: 'বহু স্কিমা',
|
||||
description:
|
||||
'{{schemasCount}} স্কিমা এই ডায়াগ্রামে রয়েছে। বর্তমানে প্রদর্শিত: {{formattedSchemas}}।',
|
||||
dont_show_again: 'পুনরায় দেখাবেন না',
|
||||
change_schema: 'পরিবর্তন করুন',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'কিছুই না',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,9 @@ export const bn: LanguageTranslation = {
|
||||
no_comments: 'কোনো মন্তব্য নেই',
|
||||
delete_field: 'ফিল্ড মুছুন',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -269,6 +272,8 @@ export const bn: LanguageTranslation = {
|
||||
redo: 'পুনরায় করুন',
|
||||
reorder_diagram: 'ডায়াগ্রাম পুনর্বিন্যাস করুন',
|
||||
highlight_overlapping_tables: 'ওভারল্যাপিং টেবিল হাইলাইট করুন',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const de: LanguageTranslation = {
|
||||
title: 'Mehrere Schemas',
|
||||
description:
|
||||
'{{schemasCount}} Schemas in diesem Diagramm. Derzeit angezeigt: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Nicht erneut anzeigen',
|
||||
change_schema: 'Schema ändern',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'Keine',
|
||||
},
|
||||
|
||||
@@ -152,6 +152,9 @@ export const de: LanguageTranslation = {
|
||||
no_comments: 'Keine Kommentare',
|
||||
delete_field: 'Feld löschen',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -271,6 +274,8 @@ export const de: LanguageTranslation = {
|
||||
redo: 'Wiederholen',
|
||||
reorder_diagram: 'Diagramm neu anordnen',
|
||||
highlight_overlapping_tables: 'Überlappende Tabellen hervorheben',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -73,8 +73,7 @@ export const en = {
|
||||
title: 'Multiple Schemas',
|
||||
description:
|
||||
'{{schemasCount}} schemas in this diagram. Currently displaying: {{formattedSchemas}}.',
|
||||
dont_show_again: "Don't show again",
|
||||
change_schema: 'Change',
|
||||
show_me: 'Show me',
|
||||
none: 'none',
|
||||
},
|
||||
|
||||
@@ -146,6 +145,8 @@ export const en = {
|
||||
character_length: 'Max Length',
|
||||
comments: 'Comments',
|
||||
no_comments: 'No comments',
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
delete_field: 'Delete Field',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -263,6 +264,7 @@ export const en = {
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Reorder Diagram',
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -141,6 +141,9 @@ export const es: LanguageTranslation = {
|
||||
no_comments: 'Sin comentarios',
|
||||
delete_field: 'Eliminar Campo',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -259,6 +262,8 @@ export const es: LanguageTranslation = {
|
||||
redo: 'Rehacer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
highlight_overlapping_tables: 'Resaltar tablas superpuestas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
@@ -405,8 +410,8 @@ export const es: LanguageTranslation = {
|
||||
title: 'Múltiples Esquemas',
|
||||
description:
|
||||
'{{schemasCount}} esquemas en este diagrama. Actualmente mostrando: {{formattedSchemas}}.',
|
||||
dont_show_again: 'No mostrar de nuevo',
|
||||
change_schema: 'Cambiar',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'nada',
|
||||
},
|
||||
// TODO: Translate
|
||||
|
||||
@@ -139,6 +139,9 @@ export const fr: LanguageTranslation = {
|
||||
no_comments: 'Pas de commentaires',
|
||||
delete_field: 'Supprimer le Champ',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -257,6 +260,8 @@ export const fr: LanguageTranslation = {
|
||||
redo: 'Rétablir',
|
||||
reorder_diagram: 'Réorganiser le Diagramme',
|
||||
highlight_overlapping_tables: 'Surligner les tables chevauchées',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
@@ -345,8 +350,8 @@ export const fr: LanguageTranslation = {
|
||||
title: 'Schémas Multiples',
|
||||
description:
|
||||
'{{schemasCount}} schémas dans ce diagramme. Actuellement affiché(s) : {{formattedSchemas}}.',
|
||||
dont_show_again: 'Ne plus afficher',
|
||||
change_schema: 'Changer',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'Aucun',
|
||||
},
|
||||
|
||||
|
||||
@@ -75,8 +75,8 @@ export const gu: LanguageTranslation = {
|
||||
title: 'કઈંક વધારે સ્કીમા',
|
||||
description:
|
||||
'{{schemasCount}} સ્કીમા આ ડાયાગ્રામમાં છે. હાલમાં દર્શાવેલ છે: {{formattedSchemas}}.',
|
||||
dont_show_again: 'ફરીથી ન બતાવો',
|
||||
change_schema: 'બદલો',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'કઈ નહીં',
|
||||
},
|
||||
|
||||
@@ -152,6 +152,9 @@ export const gu: LanguageTranslation = {
|
||||
no_comments: 'કોઈ ટિપ્પણીઓ નથી',
|
||||
delete_field: 'ફીલ્ડ કાઢી નાખો',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -270,6 +273,8 @@ export const gu: LanguageTranslation = {
|
||||
redo: 'રીડુ',
|
||||
reorder_diagram: 'ડાયાગ્રામ ફરીથી વ્યવસ્થિત કરો',
|
||||
highlight_overlapping_tables: 'ઓવરલેપ કરતો ટેબલ હાઇલાઇટ કરો',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -74,8 +74,8 @@ export const hi: LanguageTranslation = {
|
||||
title: 'एकाधिक स्कीमा',
|
||||
description:
|
||||
'{{schemasCount}} स्कीमा इस आरेख में हैं। वर्तमान में प्रदर्शित: {{formattedSchemas}}।',
|
||||
dont_show_again: 'फिर से न दिखाएँ',
|
||||
change_schema: 'बदलें',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'कोई नहीं',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,9 @@ export const hi: LanguageTranslation = {
|
||||
no_comments: 'कोई टिप्पणी नहीं',
|
||||
delete_field: 'फ़ील्ड हटाएँ',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -270,6 +273,8 @@ export const hi: LanguageTranslation = {
|
||||
redo: 'पुनः करें',
|
||||
reorder_diagram: 'आरेख पुनः व्यवस्थित करें',
|
||||
highlight_overlapping_tables: 'ओवरलैपिंग तालिकाओं को हाइलाइट करें',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -74,8 +74,8 @@ export const id_ID: LanguageTranslation = {
|
||||
title: 'Schema Lebih dari satu',
|
||||
description:
|
||||
'{{schemasCount}} schema di diagram ini. Sedang ditampilkan: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Jangan tampilkan lagi',
|
||||
change_schema: 'Ubah',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'Tidak ada',
|
||||
},
|
||||
|
||||
@@ -150,6 +150,9 @@ export const id_ID: LanguageTranslation = {
|
||||
no_comments: 'Tidak ada komentar',
|
||||
delete_field: 'Hapus Kolom',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -268,6 +271,8 @@ export const id_ID: LanguageTranslation = {
|
||||
redo: 'Redo',
|
||||
reorder_diagram: 'Atur Ulang Diagram',
|
||||
highlight_overlapping_tables: 'Sorot Tabel yang Tumpang Tindih',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -76,8 +76,8 @@ export const ja: LanguageTranslation = {
|
||||
title: '複数のスキーマ',
|
||||
description:
|
||||
'このダイアグラムには{{schemasCount}}個のスキーマがあります。現在表示中: {{formattedSchemas}}。',
|
||||
dont_show_again: '再表示しない',
|
||||
change_schema: '変更',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'なし',
|
||||
},
|
||||
|
||||
@@ -154,6 +154,9 @@ export const ja: LanguageTranslation = {
|
||||
no_comments: 'コメントがありません',
|
||||
delete_field: 'フィールドを削除',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -275,6 +278,8 @@ export const ja: LanguageTranslation = {
|
||||
reorder_diagram: 'ダイアグラムを並べ替え',
|
||||
// TODO: Translate
|
||||
highlight_overlapping_tables: 'Highlight Overlapping Tables',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -74,8 +74,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
title: '다중 스키마',
|
||||
description:
|
||||
'현재 다이어그램에 {{schemasCount}}개의 스키마가 있습니다. Currently displaying: {{formattedSchemas}}.',
|
||||
dont_show_again: '다시 보여주지 마세요',
|
||||
change_schema: '변경',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: '없음',
|
||||
},
|
||||
|
||||
@@ -150,6 +150,9 @@ export const ko_KR: LanguageTranslation = {
|
||||
no_comments: '주석 없음',
|
||||
delete_field: '필드 삭제',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -268,6 +271,8 @@ export const ko_KR: LanguageTranslation = {
|
||||
redo: '다시 실행',
|
||||
reorder_diagram: '다이어그램 재정렬',
|
||||
highlight_overlapping_tables: '겹치는 테이블 강조 표시',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const mr: LanguageTranslation = {
|
||||
title: 'एकाधिक स्कीमा',
|
||||
description:
|
||||
'{{schemasCount}} स्कीमा या आरेखात आहेत. सध्या दाखवत आहोत: {{formattedSchemas}}.',
|
||||
dont_show_again: 'पुन्हा दाखवू नका',
|
||||
change_schema: 'बदला',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'काहीही नाही',
|
||||
},
|
||||
|
||||
@@ -153,6 +153,9 @@ export const mr: LanguageTranslation = {
|
||||
no_comments: 'कोणत्याही टिप्पणी नाहीत',
|
||||
delete_field: 'फील्ड हटवा',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -273,6 +276,8 @@ export const mr: LanguageTranslation = {
|
||||
redo: 'पुन्हा करा',
|
||||
reorder_diagram: 'आरेख पुनःक्रमित करा',
|
||||
highlight_overlapping_tables: 'ओव्हरलॅपिंग टेबल्स हायलाइट करा',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const ne: LanguageTranslation = {
|
||||
title: 'विविध स्कीमहरू',
|
||||
description:
|
||||
'{{schemasCount}} डायाग्राममा स्कीमहरू। हालको रूपमा देखाइएको छ: {{formattedSchemas}}।',
|
||||
dont_show_again: 'फेरि देखाउन नदिनुहोस्',
|
||||
change_schema: 'स्कीम परिवर्तन गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'कुनै पनि छैन',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,9 @@ export const ne: LanguageTranslation = {
|
||||
no_comments: 'कुनै टिप्पणीहरू छैनन्',
|
||||
delete_field: 'क्षेत्र हटाउनुहोस्',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -271,6 +274,8 @@ export const ne: LanguageTranslation = {
|
||||
reorder_diagram: 'पुनः क्रमबद्ध गर्नुहोस्',
|
||||
highlight_overlapping_tables:
|
||||
'अतिरिक्त तालिकाहरू हाइलाइट गर्नुहोस्',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
title: 'Múltiplos Esquemas',
|
||||
description:
|
||||
'{{schemasCount}} esquemas neste diagrama. Atualmente exibindo: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Não mostrar novamente',
|
||||
change_schema: 'Alterar',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'nenhum',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,9 @@ export const pt_BR: LanguageTranslation = {
|
||||
no_comments: 'Sem comentários',
|
||||
delete_field: 'Excluir Campo',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -269,6 +272,8 @@ export const pt_BR: LanguageTranslation = {
|
||||
redo: 'Refazer',
|
||||
reorder_diagram: 'Reordenar Diagrama',
|
||||
highlight_overlapping_tables: 'Destacar Tabelas Sobrepostas',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -73,8 +73,8 @@ export const ru: LanguageTranslation = {
|
||||
title: 'Множественные схемы',
|
||||
description:
|
||||
'{{schemasCount}} схем в этой диаграмме. В данный момент отображается: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Больше не показывать',
|
||||
change_schema: 'Изменить',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'никто',
|
||||
},
|
||||
|
||||
@@ -147,6 +147,9 @@ export const ru: LanguageTranslation = {
|
||||
comments: 'Комментарии',
|
||||
no_comments: 'Нет комментария',
|
||||
delete_field: 'Удалить поле',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
character_length: 'Макс. длина',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -266,6 +269,8 @@ export const ru: LanguageTranslation = {
|
||||
redo: 'Вернуть',
|
||||
reorder_diagram: 'Переупорядочить диаграмму',
|
||||
highlight_overlapping_tables: 'Выделение перекрывающихся таблиц',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const te: LanguageTranslation = {
|
||||
title: 'బహుళ స్కీమాలు',
|
||||
description:
|
||||
'{{schemasCount}} స్కీమాలు ఈ చిత్రంలో ఉన్నాయి. ప్రస్తుత స్కీమాలు: {{formattedSchemas}}.',
|
||||
dont_show_again: 'మరలా చూపించవద్దు',
|
||||
change_schema: 'మార్చు',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'ఎదరికాదు',
|
||||
},
|
||||
|
||||
@@ -151,6 +151,9 @@ export const te: LanguageTranslation = {
|
||||
no_comments: 'వ్యాఖ్యలు లేవు',
|
||||
delete_field: 'ఫీల్డ్ తొలగించు',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -270,6 +273,8 @@ export const te: LanguageTranslation = {
|
||||
redo: 'మరలా చేయు',
|
||||
reorder_diagram: 'చిత్రాన్ని పునఃసరిచేయండి',
|
||||
highlight_overlapping_tables: 'అవకాశించు పట్టికలను హైలైట్ చేయండి',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -75,8 +75,8 @@ export const tr: LanguageTranslation = {
|
||||
title: 'Birden Fazla Şema',
|
||||
description:
|
||||
'Bu diyagramda {{schemasCount}} şema var. Şu anda görüntülenen: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Tekrar gösterme',
|
||||
change_schema: 'Değiştir',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'yok',
|
||||
},
|
||||
|
||||
@@ -150,6 +150,9 @@ export const tr: LanguageTranslation = {
|
||||
no_comments: 'Yorum yok',
|
||||
delete_field: 'Alanı Sil',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -268,6 +271,8 @@ export const tr: LanguageTranslation = {
|
||||
redo: 'Yinele',
|
||||
reorder_diagram: 'Diyagramı Yeniden Sırala',
|
||||
highlight_overlapping_tables: 'Çakışan Tabloları Vurgula',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
new_diagram_dialog: {
|
||||
database_selection: {
|
||||
|
||||
@@ -73,8 +73,8 @@ export const uk: LanguageTranslation = {
|
||||
title: 'Кілька схем',
|
||||
description:
|
||||
'{{schemasCount}} схеми на цій діаграмі. Зараз відображається: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Більше не показувати',
|
||||
change_schema: 'Зміна',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'немає',
|
||||
},
|
||||
|
||||
@@ -149,6 +149,9 @@ export const uk: LanguageTranslation = {
|
||||
no_comments: 'Немає коментарів',
|
||||
delete_field: 'Видалити поле',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -267,6 +270,8 @@ export const uk: LanguageTranslation = {
|
||||
redo: 'Повторити',
|
||||
reorder_diagram: 'Перевпорядкувати діаграму',
|
||||
highlight_overlapping_tables: 'Показати таблиці, що перекриваються',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -74,8 +74,8 @@ export const vi: LanguageTranslation = {
|
||||
title: 'Có nhiều lược đồ',
|
||||
description:
|
||||
'Có {{schemasCount}} lược đồ trong sơ đồ này. Hiện đang hiển thị: {{formattedSchemas}}.',
|
||||
dont_show_again: 'Không hiển thị lại',
|
||||
change_schema: 'Thay đổi',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: 'không có',
|
||||
},
|
||||
|
||||
@@ -150,6 +150,9 @@ export const vi: LanguageTranslation = {
|
||||
no_comments: 'Không có bình luận',
|
||||
delete_field: 'Xóa trường',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -268,6 +271,8 @@ export const vi: LanguageTranslation = {
|
||||
redo: 'Làm lại',
|
||||
reorder_diagram: 'Sắp xếp lại sơ đồ',
|
||||
highlight_overlapping_tables: 'Làm nổi bật các bảng chồng chéo',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -71,8 +71,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
title: '多个模式',
|
||||
description:
|
||||
'此关系图中有 {{schemasCount}} 个模式,当前显示:{{formattedSchemas}}。',
|
||||
dont_show_again: '不再展示',
|
||||
change_schema: '更改',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: '无',
|
||||
},
|
||||
|
||||
@@ -147,6 +147,9 @@ export const zh_CN: LanguageTranslation = {
|
||||
no_comments: '空',
|
||||
delete_field: '删除字段',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -265,6 +268,8 @@ export const zh_CN: LanguageTranslation = {
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列关系图',
|
||||
highlight_overlapping_tables: '突出显示重叠的表',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
@@ -71,8 +71,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
title: '多重 Schema',
|
||||
description:
|
||||
'此圖表中包含 {{schemasCount}} 個 Schema,目前顯示:{{formattedSchemas}}。',
|
||||
dont_show_again: '不再顯示',
|
||||
change_schema: '變更',
|
||||
// TODO: Translate
|
||||
show_me: 'Show me',
|
||||
none: '無',
|
||||
},
|
||||
|
||||
@@ -147,6 +147,9 @@ export const zh_TW: LanguageTranslation = {
|
||||
no_comments: '無註解',
|
||||
delete_field: '刪除欄位',
|
||||
// TODO: Translate
|
||||
default_value: 'Default Value',
|
||||
no_default: 'No default',
|
||||
// TODO: Translate
|
||||
character_length: 'Max Length',
|
||||
},
|
||||
index_actions: {
|
||||
@@ -265,6 +268,8 @@ export const zh_TW: LanguageTranslation = {
|
||||
redo: '重做',
|
||||
reorder_diagram: '重新排列圖表',
|
||||
highlight_overlapping_tables: '突出顯示重疊表格',
|
||||
// TODO: Translate
|
||||
filter: 'Filter Tables',
|
||||
},
|
||||
|
||||
new_diagram_dialog: {
|
||||
|
||||
870
src/lib/data/export-metadata/__tests__/export-sql-dbml.test.ts
Normal file
870
src/lib/data/export-metadata/__tests__/export-sql-dbml.test.ts
Normal file
@@ -0,0 +1,870 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { exportBaseSQL } from '../export-sql-script';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { Diagram } from '@/lib/domain/diagram';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
|
||||
// Mock the dbml/core importer
|
||||
vi.mock('@dbml/core', () => ({
|
||||
importer: {
|
||||
import: vi.fn((sql: string) => {
|
||||
// Return a simplified DBML for testing
|
||||
return sql;
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('DBML Export - SQL Generation Tests', () => {
|
||||
// Helper to generate test IDs and timestamps
|
||||
let idCounter = 0;
|
||||
const testId = () => `test-id-${++idCounter}`;
|
||||
const testTime = Date.now();
|
||||
|
||||
// Helper to create a field with all required properties
|
||||
const createField = (overrides: Partial<DBField>): DBField =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'field',
|
||||
type: { id: 'text', name: 'text' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
createdAt: testTime,
|
||||
...overrides,
|
||||
}) as DBField;
|
||||
|
||||
// Helper to create a table with all required properties
|
||||
const createTable = (overrides: Partial<DBTable>): DBTable =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'table',
|
||||
fields: [],
|
||||
indexes: [],
|
||||
createdAt: testTime,
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 200,
|
||||
...overrides,
|
||||
}) as DBTable;
|
||||
|
||||
// Helper to create a diagram with all required properties
|
||||
const createDiagram = (overrides: Partial<Diagram>): Diagram =>
|
||||
({
|
||||
id: testId(),
|
||||
name: 'diagram',
|
||||
databaseType: DatabaseType.GENERIC,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
createdAt: testTime,
|
||||
updatedAt: testTime,
|
||||
...overrides,
|
||||
}) as Diagram;
|
||||
|
||||
describe('Composite Primary Keys', () => {
|
||||
it('should handle tables with composite primary keys correctly', () => {
|
||||
const tableId = testId();
|
||||
const field1Id = testId();
|
||||
const field2Id = testId();
|
||||
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Enchanted Library',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: tableId,
|
||||
name: 'spell_components',
|
||||
fields: [
|
||||
createField({
|
||||
id: field1Id,
|
||||
name: 'spell_id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: field2Id,
|
||||
name: 'component_id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'quantity',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
default: '1',
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#FFD700',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should contain composite primary key syntax
|
||||
expect(sql).toContain('PRIMARY KEY (spell_id, component_id)');
|
||||
// Should NOT contain individual PRIMARY KEY constraints
|
||||
expect(sql).not.toMatch(/spell_id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
expect(sql).not.toMatch(
|
||||
/component_id\s+uuid\s+NOT NULL\s+PRIMARY KEY/
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle single primary keys inline', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Wizard Academy',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'wizards',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'name',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#9370DB',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should contain inline PRIMARY KEY
|
||||
expect(sql).toMatch(/id\s+uuid\s+NOT NULL\s+PRIMARY KEY/);
|
||||
// Should NOT contain separate PRIMARY KEY constraint
|
||||
expect(sql).not.toContain('PRIMARY KEY (id)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Default Value Handling', () => {
|
||||
it('should skip invalid default values like "has default"', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Potion Shop',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'potions',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'is_active',
|
||||
type: { id: 'boolean', name: 'boolean' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: 'has default',
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'stock_count',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
default: 'DEFAULT has default',
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#98FB98',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should not contain invalid default values
|
||||
expect(sql).not.toContain('DEFAULT has default');
|
||||
expect(sql).not.toContain('DEFAULT DEFAULT has default');
|
||||
// The fields should still be in the table
|
||||
expect(sql).toContain('is_active boolean');
|
||||
expect(sql).toContain('stock_count int NOT NULL'); // integer gets simplified to int
|
||||
});
|
||||
|
||||
it('should handle valid default values correctly', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Treasure Vault',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'treasures',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'gold_value',
|
||||
type: { id: 'numeric', name: 'numeric' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
default: '100.50',
|
||||
precision: 10,
|
||||
scale: 2,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'created_at',
|
||||
type: { id: 'timestamp', name: 'timestamp' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: 'now()',
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'currency',
|
||||
type: { id: 'char', name: 'char' },
|
||||
characterMaximumLength: '3',
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
default: 'EUR',
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#FFD700',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should contain valid defaults
|
||||
expect(sql).toContain('DEFAULT 100.50');
|
||||
expect(sql).toContain('DEFAULT now()');
|
||||
expect(sql).toContain('DEFAULT EUR');
|
||||
});
|
||||
|
||||
it('should handle NOW and similar default values', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Quest Log',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'quests',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'created_at',
|
||||
type: { id: 'timestamp', name: 'timestamp' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: 'NOW',
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'updated_at',
|
||||
type: { id: 'timestamp', name: 'timestamp' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: "('now')",
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#4169E1',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should convert NOW to NOW() and ('now') to now()
|
||||
expect(sql).toContain('created_at timestamp DEFAULT NOW');
|
||||
expect(sql).toContain('updated_at timestamp DEFAULT now()');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Character Type Handling', () => {
|
||||
it('should handle char types with and without length correctly', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Dragon Registry',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'dragons',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'element_code',
|
||||
type: { id: 'char', name: 'char' },
|
||||
characterMaximumLength: '2',
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'status',
|
||||
type: { id: 'char', name: 'char' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#FF6347',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should handle char with explicit length
|
||||
expect(sql).toContain('element_code char(2)');
|
||||
// Should add default length for char without length
|
||||
expect(sql).toContain('status char(1)');
|
||||
});
|
||||
|
||||
it('should not have spaces between char and parentheses', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Rune Inscriptions',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'runes',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'symbol',
|
||||
type: { id: 'char', name: 'char' },
|
||||
characterMaximumLength: '5',
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#8B4513',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should not contain "char (" with space
|
||||
expect(sql).not.toContain('char (');
|
||||
expect(sql).toContain('char(5)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Table Structures', () => {
|
||||
it('should handle tables with no primary key', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Alchemy Log',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'experiment_logs',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'experiment_id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'alchemist_id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'result',
|
||||
type: { id: 'text', name: 'text' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'logged_at',
|
||||
type: { id: 'timestamp', name: 'timestamp' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
default: 'now()',
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#32CD32',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should create a valid table without primary key
|
||||
expect(sql).toContain('CREATE TABLE experiment_logs');
|
||||
expect(sql).not.toContain('PRIMARY KEY');
|
||||
});
|
||||
|
||||
it('should handle multiple tables with relationships', () => {
|
||||
const guildTableId = testId();
|
||||
const memberTableId = testId();
|
||||
const guildIdFieldId = testId();
|
||||
const memberGuildIdFieldId = testId();
|
||||
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Adventurer Guild System',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: guildTableId,
|
||||
name: 'guilds',
|
||||
fields: [
|
||||
createField({
|
||||
id: guildIdFieldId,
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'name',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'founded_year',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
x: 0,
|
||||
y: 0,
|
||||
color: '#4169E1',
|
||||
}),
|
||||
createTable({
|
||||
id: memberTableId,
|
||||
name: 'guild_members',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: memberGuildIdFieldId,
|
||||
name: 'guild_id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'member_name',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'rank',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: "'Novice'",
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
x: 250,
|
||||
y: 0,
|
||||
color: '#FFD700',
|
||||
}),
|
||||
],
|
||||
relationships: [
|
||||
{
|
||||
id: testId(),
|
||||
name: 'fk_guild_members_guild',
|
||||
sourceTableId: memberTableId,
|
||||
targetTableId: guildTableId,
|
||||
sourceFieldId: memberGuildIdFieldId,
|
||||
targetFieldId: guildIdFieldId,
|
||||
sourceCardinality: 'many',
|
||||
targetCardinality: 'one',
|
||||
createdAt: testTime,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should create both tables
|
||||
expect(sql).toContain('CREATE TABLE guilds');
|
||||
expect(sql).toContain('CREATE TABLE guild_members');
|
||||
// Should create foreign key
|
||||
expect(sql).toContain(
|
||||
'ALTER TABLE guild_members ADD CONSTRAINT fk_guild_members_guild FOREIGN KEY (guild_id) REFERENCES guilds (id)'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Schema Support', () => {
|
||||
it('should handle tables with schemas correctly', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Multi-Realm Database',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'portals',
|
||||
schema: 'transportation',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'destination',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#9370DB',
|
||||
}),
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'spells',
|
||||
schema: 'magic',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'name',
|
||||
type: { id: 'varchar', name: 'varchar' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: true,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
x: 250,
|
||||
y: 0,
|
||||
color: '#FF1493',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should create schemas
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS transportation');
|
||||
expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS magic');
|
||||
// Should use schema-qualified table names
|
||||
expect(sql).toContain('CREATE TABLE transportation.portals');
|
||||
expect(sql).toContain('CREATE TABLE magic.spells');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty tables array', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Empty Realm',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
expect(sql).toBe('');
|
||||
});
|
||||
|
||||
it('should handle tables with empty fields', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Void Space',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'empty_table',
|
||||
fields: [],
|
||||
indexes: [],
|
||||
color: '#000000',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should still create table structure
|
||||
expect(sql).toContain('CREATE TABLE empty_table');
|
||||
expect(sql).toContain('(\n\n)');
|
||||
});
|
||||
|
||||
it('should handle special characters in default values', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Mystic Scrolls',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'scrolls',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'uuid', name: 'uuid' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'inscription',
|
||||
type: { id: 'text', name: 'text' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
default: "'Ancient\\'s Wisdom'",
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#8B4513',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should preserve escaped quotes
|
||||
expect(sql).toContain("DEFAULT 'Ancient\\'s Wisdom'");
|
||||
});
|
||||
|
||||
it('should handle numeric precision and scale', () => {
|
||||
const diagram: Diagram = createDiagram({
|
||||
id: testId(),
|
||||
name: 'Treasury',
|
||||
databaseType: DatabaseType.POSTGRESQL,
|
||||
tables: [
|
||||
createTable({
|
||||
id: testId(),
|
||||
name: 'gold_reserves',
|
||||
fields: [
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'id',
|
||||
type: { id: 'integer', name: 'integer' },
|
||||
primaryKey: true,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'amount',
|
||||
type: { id: 'numeric', name: 'numeric' },
|
||||
primaryKey: false,
|
||||
nullable: false,
|
||||
unique: false,
|
||||
precision: 15,
|
||||
scale: 2,
|
||||
}),
|
||||
createField({
|
||||
id: testId(),
|
||||
name: 'interest_rate',
|
||||
type: { id: 'numeric', name: 'numeric' },
|
||||
primaryKey: false,
|
||||
nullable: true,
|
||||
unique: false,
|
||||
precision: 5,
|
||||
}),
|
||||
],
|
||||
indexes: [],
|
||||
color: '#FFD700',
|
||||
}),
|
||||
],
|
||||
relationships: [],
|
||||
});
|
||||
|
||||
const sql = exportBaseSQL({
|
||||
diagram,
|
||||
targetDatabaseType: DatabaseType.POSTGRESQL,
|
||||
isDBMLFlow: true,
|
||||
});
|
||||
|
||||
// Should include precision and scale
|
||||
expect(sql).toContain('amount numeric(15, 2)');
|
||||
// Should include precision only when scale is not provided
|
||||
expect(sql).toContain('interest_rate numeric(5)');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -48,6 +48,50 @@ export function exportFieldComment(comment: string): string {
|
||||
.join('');
|
||||
}
|
||||
|
||||
export function escapeSQLComment(comment: string): string {
|
||||
if (!comment) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Escape single quotes by doubling them
|
||||
let escaped = comment.replace(/'/g, "''");
|
||||
|
||||
// Replace newlines with spaces to prevent breaking SQL syntax
|
||||
// Some databases support multi-line comments with specific syntax,
|
||||
// but for maximum compatibility, we'll replace newlines with spaces
|
||||
escaped = escaped.replace(/[\r\n]+/g, ' ');
|
||||
|
||||
// Trim any excessive whitespace
|
||||
escaped = escaped.replace(/\s+/g, ' ').trim();
|
||||
|
||||
return escaped;
|
||||
}
|
||||
|
||||
export function formatTableComment(comment: string): string {
|
||||
if (!comment) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Split by newlines and add -- to each line
|
||||
return (
|
||||
comment
|
||||
.split('\n')
|
||||
.map((line) => `-- ${line}`)
|
||||
.join('\n') + '\n'
|
||||
);
|
||||
}
|
||||
|
||||
export function formatMSSQLTableComment(comment: string): string {
|
||||
if (!comment) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// For MSSQL, we use multi-line comment syntax
|
||||
// Escape */ to prevent breaking the comment block
|
||||
const escaped = comment.replace(/\*\//g, '* /');
|
||||
return `/**\n${escaped}\n*/\n`;
|
||||
}
|
||||
|
||||
export function getInlineFK(table: DBTable, diagram: Diagram): string {
|
||||
if (!diagram.relationships) {
|
||||
return '';
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
formatMSSQLTableComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
@@ -108,7 +109,7 @@ export function exportMSSQL(diagram: Diagram): string {
|
||||
: `[${table.name}]`;
|
||||
|
||||
return `${
|
||||
table.comments ? `/**\n${table.comments}\n*/\n` : ''
|
||||
table.comments ? formatMSSQLTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `[${field.name}]`;
|
||||
@@ -231,14 +232,50 @@ export function exportMSSQL(diagram: Diagram): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `[${sourceTable.schema}].[${sourceTable.name}]`
|
||||
: `[${sourceTable.name}]`;
|
||||
const targetTableName = targetTable.schema
|
||||
? `[${targetTable.schema}].[${targetTable.name}]`
|
||||
: `[${targetTable.name}]`;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT [${r.name}] FOREIGN KEY([${sourceField.name}]) REFERENCES ${targetTableName}([${targetField.name}]);\n`;
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `[${fkTable.schema}].[${fkTable.name}]`
|
||||
: `[${fkTable.name}]`;
|
||||
const refTableName = refTable.schema
|
||||
? `[${refTable.schema}].[${refTable.name}]`
|
||||
: `[${refTable.name}]`;
|
||||
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT [${r.name}] FOREIGN KEY([${fkField.name}]) REFERENCES ${refTableName}([${refField.name}]);\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
escapeSQLComment,
|
||||
formatTableComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
@@ -215,7 +217,7 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
return `${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `\`${field.name}\``;
|
||||
@@ -289,7 +291,7 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
|
||||
// MySQL supports inline comments
|
||||
const comment = field.comments
|
||||
? ` COMMENT '${field.comments.replace(/'/g, "''")}'`
|
||||
? ` COMMENT '${escapeSQLComment(field.comments)}'`
|
||||
: '';
|
||||
|
||||
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeWithSize}${notNull}${autoIncrement}${unique}${defaultValue}${comment}`;
|
||||
@@ -304,7 +306,7 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
}\n)${
|
||||
// MySQL supports table comments
|
||||
table.comments
|
||||
? ` COMMENT='${table.comments.replace(/'/g, "''")}'`
|
||||
? ` COMMENT='${escapeSQLComment(table.comments)}'`
|
||||
: ''
|
||||
};\n\n${
|
||||
// Add indexes - MySQL creates them separately from the table definition
|
||||
@@ -423,18 +425,54 @@ export function exportMySQL(diagram: Diagram): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `\`${sourceTable.schema}\`.\`${sourceTable.name}\``
|
||||
: `\`${sourceTable.name}\``;
|
||||
const targetTableName = targetTable.schema
|
||||
? `\`${targetTable.schema}\`.\`${targetTable.name}\``
|
||||
: `\`${targetTable.name}\``;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `\`${fkTable.schema}\`.\`${fkTable.name}\``
|
||||
: `\`${fkTable.name}\``;
|
||||
const refTableName = refTable.schema
|
||||
? `\`${refTable.schema}\`.\`${refTable.name}\``
|
||||
: `\`${refTable.name}\``;
|
||||
|
||||
// Create a descriptive constraint name
|
||||
const constraintName = `\`fk_${sourceTable.name}_${sourceField.name}\``;
|
||||
const constraintName = `\`fk_${fkTable.name}_${fkField.name}\``;
|
||||
|
||||
// MySQL supports ON DELETE and ON UPDATE actions
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${sourceField.name}\`) REFERENCES ${targetTableName}(\`${targetField.name}\`)\nON UPDATE CASCADE ON DELETE RESTRICT;\n`;
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${fkField.name}\`) REFERENCES ${refTableName}(\`${refField.name}\`)\nON UPDATE CASCADE ON DELETE RESTRICT;\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n');
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
escapeSQLComment,
|
||||
formatTableComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
@@ -216,7 +218,7 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
const primaryKeyFields = table.fields.filter((f) => f.primaryKey);
|
||||
|
||||
return `${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
@@ -311,7 +313,7 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
}\n);\n\n${
|
||||
// Add table comments
|
||||
table.comments
|
||||
? `COMMENT ON TABLE ${tableName} IS '${table.comments.replace(/'/g, "''")}';\n\n`
|
||||
? `COMMENT ON TABLE ${tableName} IS '${escapeSQLComment(table.comments)}';\n\n`
|
||||
: ''
|
||||
}${
|
||||
// Add column comments
|
||||
@@ -319,7 +321,7 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
.filter((f) => f.comments)
|
||||
.map(
|
||||
(f) =>
|
||||
`COMMENT ON COLUMN ${tableName}."${f.name}" IS '${f.comments?.replace(/'/g, "''")}';\n`
|
||||
`COMMENT ON COLUMN ${tableName}."${f.name}" IS '${escapeSQLComment(f.comments || '')}';\n`
|
||||
)
|
||||
.join('')
|
||||
}\n${
|
||||
@@ -415,17 +417,53 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `"${sourceTable.schema}"."${sourceTable.name}"`
|
||||
: `"${sourceTable.name}"`;
|
||||
const targetTableName = targetTable.schema
|
||||
? `"${targetTable.schema}"."${targetTable.name}"`
|
||||
: `"${targetTable.name}"`;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return '';
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `"${fkTable.schema}"."${fkTable.name}"`
|
||||
: `"${fkTable.name}"`;
|
||||
const refTableName = refTable.schema
|
||||
? `"${refTable.schema}"."${refTable.name}"`
|
||||
: `"${refTable.name}"`;
|
||||
|
||||
// Create a unique constraint name by combining table and field names
|
||||
// Ensure it stays within PostgreSQL's 63-character limit for identifiers
|
||||
// and doesn't get truncated in a way that breaks SQL syntax
|
||||
const baseName = `fk_${sourceTable.name}_${sourceField.name}_${targetTable.name}_${targetField.name}`;
|
||||
const baseName = `fk_${fkTable.name}_${fkField.name}_${refTable.name}_${refField.name}`;
|
||||
// Limit to 60 chars (63 minus quotes) to ensure the whole identifier stays within limits
|
||||
const safeConstraintName =
|
||||
baseName.length > 60
|
||||
@@ -434,7 +472,7 @@ export function exportPostgreSQL(diagram: Diagram): string {
|
||||
|
||||
const constraintName = `"${safeConstraintName}"`;
|
||||
|
||||
return `ALTER TABLE ${sourceTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY("${sourceField.name}") REFERENCES ${targetTableName}("${targetField.name}");\n`;
|
||||
return `ALTER TABLE ${fkTableName}\nADD CONSTRAINT ${constraintName} FOREIGN KEY("${fkField.name}") REFERENCES ${refTableName}("${refField.name}");\n`;
|
||||
})
|
||||
.filter(Boolean) // Remove empty strings
|
||||
.join('\n')}`;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {
|
||||
exportFieldComment,
|
||||
formatTableComment,
|
||||
isFunction,
|
||||
isKeyword,
|
||||
strHasQuotes,
|
||||
@@ -195,7 +196,7 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
primaryKeyFields[0].type.name.toLowerCase() === 'int');
|
||||
|
||||
return `${schemaComment}${
|
||||
table.comments ? `-- ${table.comments}\n` : ''
|
||||
table.comments ? formatTableComment(table.comments) : ''
|
||||
}CREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
|
||||
.map((field: DBField) => {
|
||||
const fieldName = `"${field.name}"`;
|
||||
@@ -346,8 +347,44 @@ export function exportSQLite(diagram: Diagram): string {
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'many' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else if (
|
||||
r.sourceCardinality === 'one' &&
|
||||
r.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceField;
|
||||
refTable = targetTable;
|
||||
refField = targetField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return;
|
||||
}
|
||||
|
||||
// Create commented out version of what would be ALTER TABLE statement
|
||||
sqlScript += `-- ALTER TABLE "${sourceTable.name}" ADD CONSTRAINT "fk_${sourceTable.name}_${sourceField.name}" FOREIGN KEY("${sourceField.name}") REFERENCES "${targetTable.name}"("${targetField.name}");\n`;
|
||||
sqlScript += `-- ALTER TABLE "${fkTable.name}" ADD CONSTRAINT "fk_${fkTable.name}_${fkField.name}" FOREIGN KEY("${fkField.name}") REFERENCES "${refTable.name}"("${refField.name}");\n`;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -163,6 +163,12 @@ export const exportBaseSQL = ({
|
||||
: table.name;
|
||||
sqlScript += `CREATE TABLE ${tableName} (\n`;
|
||||
|
||||
// Check for composite primary keys
|
||||
const primaryKeyFields = table.fields.filter(
|
||||
(field) => field.primaryKey
|
||||
);
|
||||
const hasCompositePrimaryKey = primaryKeyFields.length > 1;
|
||||
|
||||
table.fields.forEach((field, index) => {
|
||||
let typeName = simplifyDataType(field.type.name);
|
||||
|
||||
@@ -214,6 +220,15 @@ export const exportBaseSQL = ({
|
||||
typeName = 'text[]';
|
||||
}
|
||||
|
||||
// Handle special types
|
||||
if (
|
||||
typeName.toLowerCase() === 'char' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
// Default char without length to char(1)
|
||||
typeName = 'char';
|
||||
}
|
||||
|
||||
sqlScript += ` ${field.name} ${typeName}`;
|
||||
|
||||
// Add size for character types
|
||||
@@ -225,6 +240,12 @@ export const exportBaseSQL = ({
|
||||
} else if (field.type.name.toLowerCase().includes('varchar')) {
|
||||
// Keep varchar sizing, but don't apply to TEXT (previously enum)
|
||||
sqlScript += `(500)`;
|
||||
} else if (
|
||||
typeName.toLowerCase() === 'char' &&
|
||||
!field.characterMaximumLength
|
||||
) {
|
||||
// Default char without explicit length to char(1) for compatibility
|
||||
sqlScript += `(1)`;
|
||||
}
|
||||
|
||||
// Add precision and scale for numeric types
|
||||
@@ -249,49 +270,63 @@ export const exportBaseSQL = ({
|
||||
// Temp remove default user-define value when it have it
|
||||
let fieldDefault = field.default;
|
||||
|
||||
// Remove the type cast part after :: if it exists
|
||||
if (fieldDefault.includes('::')) {
|
||||
const endedWithParentheses = fieldDefault.endsWith(')');
|
||||
fieldDefault = fieldDefault.split('::')[0];
|
||||
// Skip invalid default values for DBML export
|
||||
if (
|
||||
fieldDefault === 'has default' ||
|
||||
fieldDefault === 'DEFAULT has default'
|
||||
) {
|
||||
// Skip this default value as it's invalid SQL
|
||||
} else {
|
||||
// Remove the type cast part after :: if it exists
|
||||
if (fieldDefault.includes('::')) {
|
||||
const endedWithParentheses = fieldDefault.endsWith(')');
|
||||
fieldDefault = fieldDefault.split('::')[0];
|
||||
|
||||
if (
|
||||
(fieldDefault.startsWith('(') &&
|
||||
!fieldDefault.endsWith(')')) ||
|
||||
endedWithParentheses
|
||||
) {
|
||||
fieldDefault += ')';
|
||||
if (
|
||||
(fieldDefault.startsWith('(') &&
|
||||
!fieldDefault.endsWith(')')) ||
|
||||
endedWithParentheses
|
||||
) {
|
||||
fieldDefault += ')';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldDefault === `('now')`) {
|
||||
fieldDefault = `now()`;
|
||||
}
|
||||
if (fieldDefault === `('now')`) {
|
||||
fieldDefault = `now()`;
|
||||
}
|
||||
|
||||
sqlScript += ` DEFAULT ${fieldDefault}`;
|
||||
sqlScript += ` DEFAULT ${fieldDefault}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle PRIMARY KEY constraint
|
||||
if (field.primaryKey) {
|
||||
// Handle PRIMARY KEY constraint - only add inline if not composite
|
||||
if (field.primaryKey && !hasCompositePrimaryKey) {
|
||||
sqlScript += ' PRIMARY KEY';
|
||||
}
|
||||
|
||||
// Add a comma after each field except the last one
|
||||
if (index < table.fields.length - 1) {
|
||||
// Add a comma after each field except the last one (or before composite primary key)
|
||||
if (index < table.fields.length - 1 || hasCompositePrimaryKey) {
|
||||
sqlScript += ',\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Add composite primary key constraint if needed
|
||||
if (hasCompositePrimaryKey) {
|
||||
const pkFieldNames = primaryKeyFields.map((f) => f.name).join(', ');
|
||||
sqlScript += `\n PRIMARY KEY (${pkFieldNames})`;
|
||||
}
|
||||
|
||||
sqlScript += '\n);\n\n';
|
||||
|
||||
// Add table comment
|
||||
if (table.comments) {
|
||||
sqlScript += `COMMENT ON TABLE ${tableName} IS '${table.comments}';\n`;
|
||||
sqlScript += `COMMENT ON TABLE ${tableName} IS '${table.comments.replace(/'/g, "''")}';\n`;
|
||||
}
|
||||
|
||||
table.fields.forEach((field) => {
|
||||
// Add column comment
|
||||
if (field.comments) {
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${field.name} IS '${field.comments}';\n`;
|
||||
sqlScript += `COMMENT ON COLUMN ${tableName}.${field.name} IS '${field.comments.replace(/'/g, "''")}';\n`;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -338,13 +373,52 @@ export const exportBaseSQL = ({
|
||||
sourceTableField &&
|
||||
targetTableField
|
||||
) {
|
||||
const sourceTableName = sourceTable.schema
|
||||
? `${sourceTable.schema}.${sourceTable.name}`
|
||||
: sourceTable.name;
|
||||
const targetTableName = targetTable.schema
|
||||
? `${targetTable.schema}.${targetTable.name}`
|
||||
: targetTable.name;
|
||||
sqlScript += `ALTER TABLE ${sourceTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${sourceTableField.name}) REFERENCES ${targetTableName} (${targetTableField.name});\n`;
|
||||
// Determine which table should have the foreign key based on cardinality
|
||||
// In a 1:many relationship, the foreign key goes on the "many" side
|
||||
// If source is "one" and target is "many", FK goes on target table
|
||||
// If source is "many" and target is "one", FK goes on source table
|
||||
let fkTable, fkField, refTable, refField;
|
||||
|
||||
if (
|
||||
relationship.sourceCardinality === 'one' &&
|
||||
relationship.targetCardinality === 'many'
|
||||
) {
|
||||
// FK goes on target table
|
||||
fkTable = targetTable;
|
||||
fkField = targetTableField;
|
||||
refTable = sourceTable;
|
||||
refField = sourceTableField;
|
||||
} else if (
|
||||
relationship.sourceCardinality === 'many' &&
|
||||
relationship.targetCardinality === 'one'
|
||||
) {
|
||||
// FK goes on source table
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceTableField;
|
||||
refTable = targetTable;
|
||||
refField = targetTableField;
|
||||
} else if (
|
||||
relationship.sourceCardinality === 'one' &&
|
||||
relationship.targetCardinality === 'one'
|
||||
) {
|
||||
// For 1:1, FK can go on either side, but typically goes on the table that references the other
|
||||
// We'll keep the current behavior for 1:1
|
||||
fkTable = sourceTable;
|
||||
fkField = sourceTableField;
|
||||
refTable = targetTable;
|
||||
refField = targetTableField;
|
||||
} else {
|
||||
// Many-to-many relationships need a junction table, skip for now
|
||||
return;
|
||||
}
|
||||
|
||||
const fkTableName = fkTable.schema
|
||||
? `${fkTable.schema}.${fkTable.name}`
|
||||
: fkTable.name;
|
||||
const refTableName = refTable.schema
|
||||
? `${refTable.schema}.${refTable.name}`
|
||||
: refTable.name;
|
||||
sqlScript += `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${relationship.name} FOREIGN KEY (${fkField.name}) REFERENCES ${refTableName} (${refField.name});\n`;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
126
src/lib/data/import-metadata/filter-metadata.ts
Normal file
126
src/lib/data/import-metadata/filter-metadata.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import type { DatabaseMetadata } from './metadata-types/database-metadata';
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain/db-schema';
|
||||
|
||||
export interface SelectedTable {
|
||||
schema?: string | null;
|
||||
table: string;
|
||||
type: 'table' | 'view';
|
||||
}
|
||||
|
||||
export function filterMetadataByTables({
|
||||
metadata,
|
||||
selectedTables: inputSelectedTables,
|
||||
}: {
|
||||
metadata: DatabaseMetadata;
|
||||
selectedTables: SelectedTable[];
|
||||
}): DatabaseMetadata {
|
||||
const selectedTables = inputSelectedTables.map((st) => {
|
||||
// Normalize schema names to ensure consistent filtering
|
||||
const schema = schemaNameToDomainSchemaName(st.schema) ?? '';
|
||||
return {
|
||||
...st,
|
||||
schema,
|
||||
};
|
||||
});
|
||||
|
||||
// Create sets for faster lookup
|
||||
const selectedTableSet = new Set(
|
||||
selectedTables
|
||||
.filter((st) => st.type === 'table')
|
||||
.map((st) => `${st.schema}.${st.table}`)
|
||||
);
|
||||
const selectedViewSet = new Set(
|
||||
selectedTables
|
||||
.filter((st) => st.type === 'view')
|
||||
.map((st) => `${st.schema}.${st.table}`)
|
||||
);
|
||||
|
||||
// Filter tables
|
||||
const filteredTables = metadata.tables.filter((table) => {
|
||||
const schema = schemaNameToDomainSchemaName(table.schema) ?? '';
|
||||
const tableId = `${schema}.${table.table}`;
|
||||
return selectedTableSet.has(tableId);
|
||||
});
|
||||
|
||||
// Filter views - include views that were explicitly selected
|
||||
const filteredViews =
|
||||
metadata.views?.filter((view) => {
|
||||
const schema = schemaNameToDomainSchemaName(view.schema) ?? '';
|
||||
const viewName = view.view_name ?? '';
|
||||
const viewId = `${schema}.${viewName}`;
|
||||
return selectedViewSet.has(viewId);
|
||||
}) || [];
|
||||
|
||||
// Filter columns - include columns from both tables and views
|
||||
const filteredColumns = metadata.columns.filter((col) => {
|
||||
const fromTable = filteredTables.some(
|
||||
(tb) => tb.schema === col.schema && tb.table === col.table
|
||||
);
|
||||
// For views, the column.table field might contain the view name
|
||||
const fromView = filteredViews.some(
|
||||
(view) => view.schema === col.schema && view.view_name === col.table
|
||||
);
|
||||
return fromTable || fromView;
|
||||
});
|
||||
|
||||
// Filter primary keys
|
||||
const filteredPrimaryKeys = metadata.pk_info.filter((pk) =>
|
||||
filteredTables.some(
|
||||
(tb) => tb.schema === pk.schema && tb.table === pk.table
|
||||
)
|
||||
);
|
||||
|
||||
// Filter indexes
|
||||
const filteredIndexes = metadata.indexes.filter((idx) =>
|
||||
filteredTables.some(
|
||||
(tb) => tb.schema === idx.schema && tb.table === idx.table
|
||||
)
|
||||
);
|
||||
|
||||
// Filter foreign keys - include if either source or target table is selected
|
||||
// This ensures all relationships related to selected tables are preserved
|
||||
const filteredForeignKeys = metadata.fk_info.filter((fk) => {
|
||||
// Handle reference_schema and reference_table fields from the JSON
|
||||
const targetSchema = fk.reference_schema;
|
||||
const targetTable = (fk.reference_table || '').replace(/^"+|"+$/g, ''); // Remove extra quotes
|
||||
|
||||
const sourceIncluded = filteredTables.some(
|
||||
(tb) => tb.schema === fk.schema && tb.table === fk.table
|
||||
);
|
||||
const targetIncluded = filteredTables.some(
|
||||
(tb) => tb.schema === targetSchema && tb.table === targetTable
|
||||
);
|
||||
return sourceIncluded || targetIncluded;
|
||||
});
|
||||
|
||||
const schemasWithTables = new Set(filteredTables.map((tb) => tb.schema));
|
||||
const schemasWithViews = new Set(filteredViews.map((view) => view.schema));
|
||||
|
||||
// Filter custom types if they exist
|
||||
const filteredCustomTypes =
|
||||
metadata.custom_types?.filter((customType) => {
|
||||
// Also check if the type is used by any of the selected tables' columns
|
||||
const typeUsedInColumns = filteredColumns.some(
|
||||
(col) =>
|
||||
col.type === customType.type ||
|
||||
col.type.includes(customType.type) // Handle array types like "custom_type[]"
|
||||
);
|
||||
|
||||
return (
|
||||
schemasWithTables.has(customType.schema) ||
|
||||
schemasWithViews.has(customType.schema) ||
|
||||
typeUsedInColumns
|
||||
);
|
||||
}) || [];
|
||||
|
||||
return {
|
||||
...metadata,
|
||||
tables: filteredTables,
|
||||
columns: filteredColumns,
|
||||
pk_info: filteredPrimaryKeys,
|
||||
indexes: filteredIndexes,
|
||||
fk_info: filteredForeignKeys,
|
||||
views: filteredViews,
|
||||
custom_types: filteredCustomTypes,
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import { schemaNameToDomainSchemaName } from '@/lib/domain/db-schema';
|
||||
import type { TableInfo } from './table-info';
|
||||
import { z } from 'zod';
|
||||
|
||||
@@ -33,20 +32,12 @@ export type AggregatedIndexInfo = Omit<IndexInfo, 'column'> & {
|
||||
};
|
||||
|
||||
export const createAggregatedIndexes = ({
|
||||
tableInfo,
|
||||
tableSchema,
|
||||
indexes,
|
||||
tableIndexes,
|
||||
}: {
|
||||
tableInfo: TableInfo;
|
||||
indexes: IndexInfo[];
|
||||
tableIndexes: IndexInfo[];
|
||||
tableSchema?: string;
|
||||
}): AggregatedIndexInfo[] => {
|
||||
const tableIndexes = indexes.filter((idx) => {
|
||||
const indexSchema = schemaNameToDomainSchemaName(idx.schema);
|
||||
|
||||
return idx.table === tableInfo.table && indexSchema === tableSchema;
|
||||
});
|
||||
|
||||
return Object.values(
|
||||
tableIndexes.reduce(
|
||||
(acc, idx) => {
|
||||
|
||||
132
src/lib/data/sql-import/__tests__/sql-validator-autofix.test.ts
Normal file
132
src/lib/data/sql-import/__tests__/sql-validator-autofix.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { validateSQL } from '../sql-validator';
|
||||
import { DatabaseType } from '@/lib/domain';
|
||||
|
||||
describe('SQL Validator Auto-fix', () => {
|
||||
it('should provide auto-fix for cast operator errors', () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
lair_location GEOGRAPHY(POINT, 4326)
|
||||
);
|
||||
|
||||
-- Problematic queries with cast operator errors
|
||||
SELECT id: :text FROM dragons;
|
||||
SELECT ST_X(lair_location: :geometry) AS longitude FROM dragons;
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
// Should detect errors
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
|
||||
// Should provide fixed SQL
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Fixed SQL should have correct cast operators
|
||||
expect(result.fixedSQL).toContain('::text');
|
||||
expect(result.fixedSQL).toContain('::geometry');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
|
||||
// The CREATE TABLE should remain intact
|
||||
expect(result.fixedSQL).toContain('GEOGRAPHY(POINT, 4326)');
|
||||
});
|
||||
|
||||
it('should handle multi-line cast operator errors', () => {
|
||||
const sql = `
|
||||
SELECT AVG(power_level): :DECIMAL(3,
|
||||
2) FROM enchantments;
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).toContain('::DECIMAL(3,');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
});
|
||||
|
||||
it('should auto-fix split DECIMAL declarations', () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
strength DECIMAL(10,
|
||||
2) NOT NULL,
|
||||
effectiveness NUMERIC(5,
|
||||
3) DEFAULT 0.000
|
||||
);`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
|
||||
// Should provide fixed SQL
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Fixed SQL should have DECIMAL on one line
|
||||
expect(result.fixedSQL).toContain('DECIMAL(10,2)');
|
||||
expect(result.fixedSQL).toContain('NUMERIC(5,3)');
|
||||
expect(result.fixedSQL).not.toMatch(
|
||||
/DECIMAL\s*\(\s*\d+\s*,\s*\n\s*\d+\s*\)/
|
||||
);
|
||||
|
||||
// Should have warning about auto-fix
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle multiple auto-fixes together', () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantments (
|
||||
id INTEGER PRIMARY KEY,
|
||||
power_level DECIMAL(10,
|
||||
2) NOT NULL,
|
||||
magic_type VARCHAR(50)
|
||||
);
|
||||
|
||||
SELECT AVG(power_level): :DECIMAL(3,
|
||||
2) FROM enchantments;
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
|
||||
// Should fix both issues
|
||||
expect(result.fixedSQL).toContain('DECIMAL(10,2)');
|
||||
expect(result.fixedSQL).toContain('::DECIMAL(3,');
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
|
||||
// Should have warnings for both fixes
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed cast operator')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should preserve original SQL when no errors', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.fixedSQL).toBeUndefined();
|
||||
});
|
||||
});
|
||||
145
src/lib/data/sql-import/__tests__/sql-validator.test.ts
Normal file
145
src/lib/data/sql-import/__tests__/sql-validator.test.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { validateSQL } from '../sql-validator';
|
||||
import { DatabaseType } from '@/lib/domain';
|
||||
|
||||
describe('SQL Validator', () => {
|
||||
it('should detect cast operator errors (: :)', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
spellbook JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
SELECT id: :text FROM wizards;
|
||||
SELECT COUNT(*): :integer FROM wizards;
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors).toHaveLength(2);
|
||||
expect(result.errors[0].message).toContain('Invalid cast operator');
|
||||
expect(result.errors[0].suggestion).toBe('Replace ": :" with "::"');
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).toContain('::text');
|
||||
expect(result.fixedSQL).toContain('::integer');
|
||||
});
|
||||
|
||||
it('should detect split DECIMAL declarations', () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
power_level DECIMAL(10,
|
||||
2) NOT NULL
|
||||
);`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(
|
||||
result.errors.some((e) =>
|
||||
e.message.includes('DECIMAL type declaration is split')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about extensions', () => {
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
CREATE EXTENSION postgis;
|
||||
CREATE TABLE dragons (id UUID PRIMARY KEY);
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(
|
||||
result.warnings.some((w) => w.message.includes('CREATE EXTENSION'))
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about functions and triggers', () => {
|
||||
const sql = `
|
||||
CREATE OR REPLACE FUNCTION update_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_wizards_timestamp
|
||||
BEFORE UPDATE ON wizards
|
||||
FOR EACH ROW EXECUTE FUNCTION update_timestamp();
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Function definitions')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.warnings.some((w) =>
|
||||
w.message.includes('Trigger definitions')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate clean SQL as valid', () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
magic_email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id SERIAL PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
name VARCHAR(200) NOT NULL,
|
||||
incantation TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.fixedSQL).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle the fifth example file issues', () => {
|
||||
const sql = `
|
||||
-- Sample from the problematic file
|
||||
UPDATE magic_towers
|
||||
SET
|
||||
power_average = (
|
||||
SELECT AVG(power): :DECIMAL(3,
|
||||
2)
|
||||
FROM enchantments
|
||||
WHERE tower_id = NEW.tower_id
|
||||
);
|
||||
|
||||
SELECT
|
||||
ST_X(t.location: :geometry) AS longitude,
|
||||
ST_Y(t.location: :geometry) AS latitude
|
||||
FROM towers t;
|
||||
`;
|
||||
|
||||
const result = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
// Should find multiple cast operator errors
|
||||
expect(
|
||||
result.errors.filter((e) =>
|
||||
e.message.includes('Invalid cast operator')
|
||||
).length
|
||||
).toBeGreaterThan(0);
|
||||
expect(result.fixedSQL).toBeDefined();
|
||||
expect(result.fixedSQL).not.toContain(': :');
|
||||
expect(result.fixedSQL).toContain('::DECIMAL');
|
||||
expect(result.fixedSQL).toContain('::geometry');
|
||||
});
|
||||
});
|
||||
@@ -3,10 +3,13 @@ import { generateDiagramId, generateId } from '@/lib/utils';
|
||||
import type { DBTable } from '@/lib/domain/db-table';
|
||||
import type { Cardinality, DBRelationship } from '@/lib/domain/db-relationship';
|
||||
import type { DBField } from '@/lib/domain/db-field';
|
||||
import type { DBIndex } from '@/lib/domain/db-index';
|
||||
import type { DataType } from '@/lib/data/data-types/data-types';
|
||||
import { genericDataTypes } from '@/lib/data/data-types/generic-data-types';
|
||||
import { randomColor } from '@/lib/colors';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
import type { DBCustomType } from '@/lib/domain/db-custom-type';
|
||||
import { DBCustomTypeKind } from '@/lib/domain/db-custom-type';
|
||||
|
||||
// Common interfaces for SQL entities
|
||||
export interface SQLColumn {
|
||||
@@ -62,6 +65,7 @@ export interface SQLParserResult {
|
||||
relationships: SQLForeignKey[];
|
||||
types?: SQLCustomType[];
|
||||
enums?: SQLEnumType[];
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
// Define more specific types for SQL AST nodes
|
||||
@@ -543,6 +547,18 @@ export function convertToChartDBDiagram(
|
||||
) {
|
||||
// Ensure integer types are preserved
|
||||
mappedType = { id: 'integer', name: 'integer' };
|
||||
} else if (
|
||||
sourceDatabaseType === DatabaseType.POSTGRESQL &&
|
||||
parserResult.enums &&
|
||||
parserResult.enums.some(
|
||||
(e) => e.name.toLowerCase() === column.type.toLowerCase()
|
||||
)
|
||||
) {
|
||||
// If the column type matches a custom enum type, preserve it
|
||||
mappedType = {
|
||||
id: column.type.toLowerCase(),
|
||||
name: column.type,
|
||||
};
|
||||
} else {
|
||||
// Use the standard mapping for other types
|
||||
mappedType = mapSQLTypeToGenericType(
|
||||
@@ -588,25 +604,38 @@ export function convertToChartDBDiagram(
|
||||
});
|
||||
|
||||
// Create indexes
|
||||
const indexes = table.indexes.map((sqlIndex) => {
|
||||
const fieldIds = sqlIndex.columns.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
throw new Error(
|
||||
`Index references non-existent column: ${columnName}`
|
||||
);
|
||||
}
|
||||
return field.id;
|
||||
});
|
||||
const indexes = table.indexes
|
||||
.map((sqlIndex) => {
|
||||
const fieldIds = sqlIndex.columns
|
||||
.map((columnName) => {
|
||||
const field = fields.find((f) => f.name === columnName);
|
||||
if (!field) {
|
||||
console.warn(
|
||||
`Index ${sqlIndex.name} references non-existent column: ${columnName} in table ${table.name}. Skipping this column.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return field.id;
|
||||
})
|
||||
.filter((id): id is string => id !== null);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: sqlIndex.name,
|
||||
fieldIds,
|
||||
unique: sqlIndex.unique,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
});
|
||||
// Only create index if at least one column was found
|
||||
if (fieldIds.length === 0) {
|
||||
console.warn(
|
||||
`Index ${sqlIndex.name} has no valid columns. Skipping index.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: sqlIndex.name,
|
||||
fieldIds,
|
||||
unique: sqlIndex.unique,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
})
|
||||
.filter((idx): idx is DBIndex => idx !== null);
|
||||
|
||||
return {
|
||||
id: newId,
|
||||
@@ -708,12 +737,29 @@ export function convertToChartDBDiagram(
|
||||
});
|
||||
});
|
||||
|
||||
// Convert SQL enum types to ChartDB custom types
|
||||
const customTypes: DBCustomType[] = [];
|
||||
|
||||
if (parserResult.enums) {
|
||||
parserResult.enums.forEach((enumType, index) => {
|
||||
customTypes.push({
|
||||
id: generateId(),
|
||||
name: enumType.name,
|
||||
schema: 'public', // Default to public schema for now
|
||||
kind: DBCustomTypeKind.enum,
|
||||
values: enumType.values,
|
||||
order: index,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const diagram = {
|
||||
id: generateDiagramId(),
|
||||
name: `SQL Import (${sourceDatabaseType})`,
|
||||
databaseType: targetDatabaseType,
|
||||
tables,
|
||||
relationships,
|
||||
customTypes: customTypes.length > 0 ? customTypes : undefined,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
@@ -0,0 +1,458 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Core Parser Tests', () => {
|
||||
it('should parse basic tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should parse foreign key relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE mages (
|
||||
id INTEGER PRIMARY KEY,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
});
|
||||
|
||||
it('should skip functions with warnings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE test_table (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION test_func() RETURNS VOID AS $$
|
||||
BEGIN
|
||||
NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Function'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle tables that fail to parse', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE valid_table (id INTEGER PRIMARY KEY);
|
||||
|
||||
-- This table has syntax that might fail parsing
|
||||
CREATE TABLE complex_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value NUMERIC(10,
|
||||
2) GENERATED ALWAYS AS (1 + 1) STORED
|
||||
);
|
||||
|
||||
CREATE TABLE another_valid (
|
||||
id INTEGER PRIMARY KEY,
|
||||
complex_ref INTEGER REFERENCES complex_table(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find all 3 tables even if complex_table fails to parse
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'another_valid',
|
||||
'complex_table',
|
||||
'valid_table',
|
||||
]);
|
||||
|
||||
// Should still find the foreign key relationship
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'another_valid' &&
|
||||
r.targetTable === 'complex_table'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse the magical academy system fixture', async () => {
|
||||
const sql = `-- Magical Academy System Database Schema
|
||||
-- This is a test fixture representing a typical magical academy system
|
||||
|
||||
CREATE TABLE magic_schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
location text,
|
||||
crystal_frequency varchar(20),
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE magical_ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text,
|
||||
is_system boolean NOT NULL DEFAULT false,
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE spell_permissions(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
spell_school text NOT NULL,
|
||||
spell_action text NOT NULL,
|
||||
description text,
|
||||
UNIQUE (spell_school, spell_action)
|
||||
);
|
||||
|
||||
CREATE TABLE rank_permissions(
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
permission_id uuid NOT NULL REFERENCES spell_permissions(id) ON DELETE CASCADE,
|
||||
granted_at timestamptz NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (rank_id, permission_id)
|
||||
);
|
||||
|
||||
CREATE TABLE grimoire_types(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
description text,
|
||||
is_active boolean NOT NULL DEFAULT true
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
username text NOT NULL,
|
||||
email text NOT NULL,
|
||||
password_hash text NOT NULL,
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
is_active boolean NOT NULL DEFAULT true,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (school_id, username),
|
||||
UNIQUE (email)
|
||||
);
|
||||
|
||||
-- This function should not prevent the next table from being parsed
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM towers
|
||||
WHERE id = NEW.tower_id AND school_id = NEW.school_id
|
||||
) THEN
|
||||
RAISE EXCEPTION 'Tower does not belong to magic school';
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
assigned_at timestamptz NOT NULL DEFAULT now(),
|
||||
assigned_by uuid REFERENCES wizards(id),
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
CREATE TABLE apprentices(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id text NOT NULL, -- Magical Apprentice Identifier
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
date_of_birth date NOT NULL,
|
||||
magical_affinity varchar(10),
|
||||
email text,
|
||||
crystal_phone varchar(20),
|
||||
dormitory text,
|
||||
emergency_contact jsonb,
|
||||
patron_info jsonb,
|
||||
primary_mentor uuid REFERENCES wizards(id),
|
||||
referring_wizard uuid REFERENCES wizards(id),
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (school_id, apprentice_id)
|
||||
);
|
||||
|
||||
CREATE TABLE spell_lessons(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
lesson_date timestamptz NOT NULL,
|
||||
duration_minutes integer NOT NULL DEFAULT 30,
|
||||
status text NOT NULL DEFAULT 'scheduled',
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
CONSTRAINT valid_status CHECK (status IN ('scheduled', 'confirmed', 'in_progress', 'completed', 'cancelled', 'no_show'))
|
||||
);
|
||||
|
||||
CREATE TABLE grimoires(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
lesson_id uuid REFERENCES spell_lessons(id),
|
||||
grimoire_type_id uuid NOT NULL REFERENCES grimoire_types(id),
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
content jsonb NOT NULL,
|
||||
enchantments jsonb,
|
||||
is_sealed boolean NOT NULL DEFAULT false,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_scrolls(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
scroll_number text NOT NULL,
|
||||
scroll_date date NOT NULL DEFAULT CURRENT_DATE,
|
||||
due_date date NOT NULL,
|
||||
subtotal numeric(10,2) NOT NULL,
|
||||
magical_tax numeric(10,2) NOT NULL DEFAULT 0,
|
||||
scholarship_amount numeric(10,2) NOT NULL DEFAULT 0,
|
||||
total_gold numeric(10,2) NOT NULL,
|
||||
status text NOT NULL DEFAULT 'draft',
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
UNIQUE (school_id, scroll_number),
|
||||
CONSTRAINT valid_scroll_status CHECK (status IN ('draft', 'sent', 'paid', 'overdue', 'cancelled'))
|
||||
);
|
||||
|
||||
CREATE TABLE scroll_line_items(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
description text NOT NULL,
|
||||
quantity numeric(10,2) NOT NULL DEFAULT 1,
|
||||
gold_per_unit numeric(10,2) NOT NULL,
|
||||
total_gold numeric(10,2) NOT NULL,
|
||||
lesson_id uuid REFERENCES spell_lessons(id),
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE patron_sponsorships(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
patron_house text NOT NULL,
|
||||
sponsorship_code text NOT NULL,
|
||||
claim_number text NOT NULL,
|
||||
claim_date date NOT NULL DEFAULT CURRENT_DATE,
|
||||
gold_requested numeric(10,2) NOT NULL,
|
||||
gold_approved numeric(10,2),
|
||||
status text NOT NULL DEFAULT 'submitted',
|
||||
denial_reason text,
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (claim_number),
|
||||
CONSTRAINT valid_sponsorship_status CHECK (status IN ('draft', 'submitted', 'in_review', 'approved', 'partial', 'denied', 'appealed'))
|
||||
);
|
||||
|
||||
CREATE TABLE gold_payments(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
payment_date timestamptz NOT NULL DEFAULT now(),
|
||||
gold_amount numeric(10,2) NOT NULL,
|
||||
payment_method text NOT NULL,
|
||||
reference_rune text,
|
||||
notes text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
created_by uuid NOT NULL REFERENCES wizards(id),
|
||||
CONSTRAINT valid_payment_method CHECK (payment_method IN ('gold_coins', 'crystal_transfer', 'mithril_card', 'dragon_scale', 'patron_sponsorship', 'other'))
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
tower_id uuid,
|
||||
table_name text NOT NULL,
|
||||
record_id uuid,
|
||||
spell_operation text NOT NULL,
|
||||
old_values jsonb,
|
||||
new_values jsonb,
|
||||
casting_source inet,
|
||||
magical_signature text,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
FOREIGN KEY (school_id) REFERENCES magic_schools(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE SET NULL,
|
||||
CONSTRAINT valid_spell_operation CHECK (spell_operation IN ('INSERT', 'UPDATE', 'DELETE'))
|
||||
);
|
||||
|
||||
-- Enable Row Level Security
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE apprentices ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE grimoires ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE spell_lessons ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE tuition_scrolls ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create RLS Policies
|
||||
CREATE POLICY school_isolation_wizards ON wizards
|
||||
FOR ALL TO authenticated
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
|
||||
CREATE POLICY school_isolation_apprentices ON apprentices
|
||||
FOR ALL TO authenticated
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
|
||||
-- Create arcane audit trigger function
|
||||
CREATE FUNCTION arcane_audit_trigger()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO arcane_logs (
|
||||
school_id,
|
||||
wizard_id,
|
||||
tower_id,
|
||||
table_name,
|
||||
record_id,
|
||||
spell_operation,
|
||||
old_values,
|
||||
new_values
|
||||
) VALUES (
|
||||
current_setting('app.current_school', true)::uuid,
|
||||
current_setting('app.current_wizard', true)::uuid,
|
||||
current_setting('app.current_tower', true)::uuid,
|
||||
TG_TABLE_NAME,
|
||||
COALESCE(NEW.id, OLD.id),
|
||||
TG_OP,
|
||||
CASE WHEN TG_OP IN ('UPDATE', 'DELETE') THEN to_jsonb(OLD) ELSE NULL END,
|
||||
CASE WHEN TG_OP IN ('INSERT', 'UPDATE') THEN to_jsonb(NEW) ELSE NULL END
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create triggers
|
||||
CREATE TRIGGER arcane_audit_wizards AFTER INSERT OR UPDATE OR DELETE ON wizards
|
||||
FOR EACH ROW EXECUTE FUNCTION arcane_audit_trigger();
|
||||
|
||||
CREATE TRIGGER arcane_audit_apprentices AFTER INSERT OR UPDATE OR DELETE ON apprentices
|
||||
FOR EACH ROW EXECUTE FUNCTION arcane_audit_trigger();`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
expect(result.tables).toHaveLength(16);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'magic_schools',
|
||||
'magical_ranks',
|
||||
'patron_sponsorships',
|
||||
'rank_permissions',
|
||||
'scroll_line_items',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(tableNames).toEqual(expectedTables);
|
||||
|
||||
// Should have many relationships
|
||||
expect(result.relationships.length).toBeGreaterThan(30);
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify specific critical relationships exist
|
||||
const hasWizardSchoolFK = result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'magic_schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
);
|
||||
expect(hasWizardSchoolFK).toBe(true);
|
||||
|
||||
const hasApprenticeMentorFK = result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
);
|
||||
expect(hasApprenticeMentorFK).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle ALTER TABLE ENABLE ROW LEVEL SECURITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE secure_table (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE secure_table ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
// The warning should mention row level security
|
||||
expect(
|
||||
result.warnings!.some((w) =>
|
||||
w.toLowerCase().includes('row level security')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should extract foreign keys even from unparsed tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE base (id UUID PRIMARY KEY);
|
||||
|
||||
-- Intentionally malformed to fail parsing
|
||||
CREATE TABLE malformed (
|
||||
id UUID PRIMARY KEY,
|
||||
base_id UUID REFERENCES base(id),
|
||||
FOREIGN KEY (base_id) REFERENCES base(id) ON DELETE CASCADE,
|
||||
value NUMERIC(10,
|
||||
2) -- Missing closing paren will cause parse failure
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should still create the table entry
|
||||
expect(result.tables.map((t) => t.name)).toContain('malformed');
|
||||
|
||||
// Should extract the foreign key
|
||||
const fks = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'malformed'
|
||||
);
|
||||
expect(fks.length).toBeGreaterThan(0);
|
||||
expect(fks[0].targetTable).toBe('base');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,330 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Real-World Examples', () => {
|
||||
describe('Magical Academy Example', () => {
|
||||
it('should parse the magical academy example with all 16 tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spell_permissions(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
spell_type text NOT NULL,
|
||||
casting_level text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rank_spell_permissions(
|
||||
rank_id uuid NOT NULL REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
spell_permission_id uuid NOT NULL REFERENCES spell_permissions(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (rank_id, spell_permission_id)
|
||||
);
|
||||
|
||||
CREATE TABLE grimoire_types(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
wizard_name text NOT NULL,
|
||||
email text NOT NULL,
|
||||
UNIQUE (school_id, wizard_name)
|
||||
);
|
||||
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Function body
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
assigned_at timestamptz NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
CREATE TABLE apprentices(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
first_name text NOT NULL,
|
||||
last_name text NOT NULL,
|
||||
enrollment_date date NOT NULL,
|
||||
primary_mentor uuid REFERENCES wizards(id),
|
||||
sponsoring_wizard uuid REFERENCES wizards(id)
|
||||
);
|
||||
|
||||
CREATE TABLE spell_lessons(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
instructor_id uuid NOT NULL REFERENCES wizards(id),
|
||||
lesson_date timestamptz NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE grimoires(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
grimoire_type_id uuid NOT NULL REFERENCES grimoire_types(id),
|
||||
author_wizard_id uuid NOT NULL REFERENCES wizards(id),
|
||||
content jsonb NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_scrolls(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
apprentice_id uuid NOT NULL REFERENCES apprentices(id) ON DELETE CASCADE,
|
||||
total_amount numeric(10,2) NOT NULL,
|
||||
status text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tuition_items(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
description text NOT NULL,
|
||||
amount numeric(10,2) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE patron_sponsorships(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
patron_house text NOT NULL,
|
||||
sponsorship_code text NOT NULL,
|
||||
status text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE gold_payments(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tuition_scroll_id uuid NOT NULL REFERENCES tuition_scrolls(id) ON DELETE CASCADE,
|
||||
amount numeric(10,2) NOT NULL,
|
||||
payment_date timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
tower_id uuid,
|
||||
table_name text NOT NULL,
|
||||
operation text NOT NULL,
|
||||
record_id uuid,
|
||||
changes jsonb,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
FOREIGN KEY (school_id) REFERENCES schools(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (wizard_id) REFERENCES wizards(id) ON DELETE SET NULL,
|
||||
FOREIGN KEY (tower_id) REFERENCES towers(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Enable RLS
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE apprentices ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create policies
|
||||
CREATE POLICY school_isolation ON wizards
|
||||
FOR ALL TO public
|
||||
USING (school_id = current_setting('app.current_school')::uuid);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find all 16 tables
|
||||
const expectedTables = [
|
||||
'apprentices',
|
||||
'arcane_logs',
|
||||
'gold_payments',
|
||||
'grimoire_types',
|
||||
'grimoires',
|
||||
'patron_sponsorships',
|
||||
'rank_spell_permissions',
|
||||
'ranks',
|
||||
'schools',
|
||||
'spell_lessons',
|
||||
'spell_permissions',
|
||||
'towers',
|
||||
'tuition_items',
|
||||
'tuition_scrolls',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
];
|
||||
|
||||
expect(result.tables).toHaveLength(16);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual(
|
||||
expectedTables
|
||||
);
|
||||
|
||||
// Verify key relationships exist
|
||||
const relationships = result.relationships;
|
||||
|
||||
// Check some critical relationships
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizards' &&
|
||||
r.targetTable === 'schools' &&
|
||||
r.sourceColumn === 'school_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'wizard_ranks' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'wizard_id'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'apprentices' &&
|
||||
r.targetTable === 'wizards' &&
|
||||
r.sourceColumn === 'primary_mentor'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have warnings about functions, policies, and RLS
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enchanted Bazaar Example', () => {
|
||||
it('should parse the enchanted bazaar example with functions and policies', async () => {
|
||||
const sql = `
|
||||
-- Enchanted Bazaar tables with complex features
|
||||
CREATE TABLE merchants(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE artifacts(
|
||||
id SERIAL PRIMARY KEY,
|
||||
merchant_id INTEGER REFERENCES merchants(id) ON DELETE CASCADE,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL CHECK (price >= 0),
|
||||
enchantment_charges INTEGER DEFAULT 0 CHECK (enchantment_charges >= 0)
|
||||
);
|
||||
|
||||
-- Function that should be skipped
|
||||
CREATE FUNCTION consume_charges(artifact_id INTEGER, charges_used INTEGER)
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
UPDATE artifacts SET enchantment_charges = enchantment_charges - charges_used WHERE id = artifact_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE trades(
|
||||
id SERIAL PRIMARY KEY,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(50) DEFAULT 'negotiating'
|
||||
);
|
||||
|
||||
CREATE TABLE trade_items(
|
||||
trade_id INTEGER REFERENCES trades(id) ON DELETE CASCADE,
|
||||
artifact_id INTEGER REFERENCES artifacts(id),
|
||||
quantity INTEGER NOT NULL CHECK (quantity > 0),
|
||||
agreed_price DECIMAL(10, 2) NOT NULL,
|
||||
PRIMARY KEY (trade_id, artifact_id)
|
||||
);
|
||||
|
||||
-- Enable RLS
|
||||
ALTER TABLE artifacts ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Create policy
|
||||
CREATE POLICY merchant_artifacts ON artifacts
|
||||
FOR ALL TO merchants
|
||||
USING (merchant_id = current_user_id());
|
||||
|
||||
-- Create trigger
|
||||
CREATE TRIGGER charge_consumption_trigger
|
||||
AFTER INSERT ON trade_items
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION consume_charges();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should parse all tables despite functions, policies, and triggers
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
// Check for specific tables
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('merchants');
|
||||
expect(tableNames).toContain('artifacts');
|
||||
expect(tableNames).toContain('trades');
|
||||
expect(tableNames).toContain('trade_items');
|
||||
|
||||
// Check relationships
|
||||
if (tableNames.includes('marketplace_tokens')) {
|
||||
// Real file relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'marketplace_listings' &&
|
||||
r.targetTable === 'inventory_items'
|
||||
)
|
||||
).toBe(true);
|
||||
} else {
|
||||
// Mock data relationships
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'artifacts' &&
|
||||
r.targetTable === 'merchants'
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) =>
|
||||
r.sourceTable === 'trade_items' &&
|
||||
r.targetTable === 'trades'
|
||||
)
|
||||
).toBe(true);
|
||||
}
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
if (result.warnings) {
|
||||
expect(
|
||||
result.warnings.some(
|
||||
(w) =>
|
||||
w.includes('Function') ||
|
||||
w.includes('Policy') ||
|
||||
w.includes('Trigger') ||
|
||||
w.includes('ROW LEVEL SECURITY')
|
||||
)
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Parser Integration', () => {
|
||||
it('should parse simple SQL', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
});
|
||||
|
||||
it('should handle functions correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION get_wizard() RETURNS INTEGER AS $$
|
||||
BEGIN
|
||||
RETURN 1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
});
|
||||
|
||||
it('should handle policies correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_scrolls (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE POLICY wizard_policy ON ancient_scrolls
|
||||
FOR SELECT
|
||||
USING (true);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle RLS correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_vault (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE enchanted_vault ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle triggers correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_log (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TRIGGER spell_trigger
|
||||
AFTER INSERT ON spell_log
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION spell_func();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should preserve all relationships', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
|
||||
-- This function should trigger improved parser
|
||||
CREATE FUNCTION dummy() RETURNS VOID AS $$ BEGIN END; $$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE quests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id),
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(3);
|
||||
|
||||
// Verify all relationships are preserved
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'wizards' && r.targetTable === 'guilds'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'quests' && r.targetTable === 'wizards'
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
result.relationships.some(
|
||||
(r) => r.sourceTable === 'quests' && r.targetTable === 'guilds'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,491 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Parser', () => {
|
||||
describe('Basic Table Parsing', () => {
|
||||
it('should parse simple tables with basic data types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
magic_email TEXT UNIQUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
expect(result.tables[0].columns[0].name).toBe('id');
|
||||
expect(result.tables[0].columns[0].type).toBe('INTEGER');
|
||||
expect(result.tables[0].columns[0].primaryKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse multiple tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE guilds (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE mages (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
guild_id INTEGER REFERENCES guilds(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'guilds',
|
||||
'mages',
|
||||
]);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('mages');
|
||||
expect(result.relationships[0].targetTable).toBe('guilds');
|
||||
});
|
||||
|
||||
it('should handle IF NOT EXISTS clause', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE IF NOT EXISTS potions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('potions');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Data Types', () => {
|
||||
it('should handle UUID and special PostgreSQL types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE special_types (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
data JSONB,
|
||||
tags TEXT[],
|
||||
location POINT,
|
||||
mana_cost MONEY,
|
||||
binary_data BYTEA
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.find((c) => c.name === 'id')?.type).toBe('UUID');
|
||||
expect(columns.find((c) => c.name === 'data')?.type).toBe('JSONB');
|
||||
expect(columns.find((c) => c.name === 'tags')?.type).toBe('TEXT[]');
|
||||
});
|
||||
|
||||
it('should handle numeric with precision', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE treasury (
|
||||
id SERIAL PRIMARY KEY,
|
||||
amount NUMERIC(10, 2),
|
||||
percentage DECIMAL(5, 2),
|
||||
big_number BIGINT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
// Parser limitation: scale on separate line is not captured
|
||||
const amountType = columns.find((c) => c.name === 'amount')?.type;
|
||||
expect(amountType).toMatch(/^NUMERIC/);
|
||||
});
|
||||
|
||||
it('should handle multi-line numeric definitions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE multi_line (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value NUMERIC(10,
|
||||
2),
|
||||
another_col TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Foreign Key Relationships', () => {
|
||||
it('should parse inline foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE realms (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE sanctuaries (
|
||||
id INTEGER PRIMARY KEY,
|
||||
realm_id INTEGER REFERENCES realms(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('sanctuaries');
|
||||
expect(result.relationships[0].targetTable).toBe('realms');
|
||||
expect(result.relationships[0].sourceColumn).toBe('realm_id');
|
||||
expect(result.relationships[0].targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse table-level foreign key constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantment_orders (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE enchantment_items (
|
||||
id INTEGER PRIMARY KEY,
|
||||
order_id INTEGER,
|
||||
CONSTRAINT fk_order FOREIGN KEY (order_id) REFERENCES enchantment_orders(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe(
|
||||
'enchantment_items'
|
||||
);
|
||||
expect(result.relationships[0].targetTable).toBe(
|
||||
'enchantment_orders'
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse composite foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magic_schools (id UUID PRIMARY KEY);
|
||||
CREATE TABLE quests (
|
||||
school_id UUID,
|
||||
quest_id UUID,
|
||||
name TEXT,
|
||||
PRIMARY KEY (school_id, quest_id),
|
||||
FOREIGN KEY (school_id) REFERENCES magic_schools(id)
|
||||
);
|
||||
CREATE TABLE rituals (
|
||||
id UUID PRIMARY KEY,
|
||||
school_id UUID,
|
||||
quest_id UUID,
|
||||
FOREIGN KEY (school_id, quest_id) REFERENCES quests(school_id, quest_id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
// Composite foreign keys are not fully supported
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('quests');
|
||||
expect(result.relationships[0].targetTable).toBe('magic_schools');
|
||||
});
|
||||
|
||||
it('should handle ON DELETE and ON UPDATE clauses', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE scrolls (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
// ON DELETE/UPDATE clauses are not preserved in output
|
||||
});
|
||||
});
|
||||
|
||||
describe('Constraints', () => {
|
||||
it('should parse unique constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id INTEGER PRIMARY KEY,
|
||||
magic_email TEXT UNIQUE,
|
||||
wizard_name TEXT,
|
||||
UNIQUE (wizard_name)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.find((c) => c.name === 'magic_email')?.unique).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should parse check constraints', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
mana_cost DECIMAL CHECK (mana_cost > 0),
|
||||
quantity INTEGER,
|
||||
CONSTRAINT positive_quantity CHECK (quantity >= 0)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should parse composite primary keys', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchantment_items (
|
||||
order_id INTEGER,
|
||||
potion_id INTEGER,
|
||||
quantity INTEGER,
|
||||
PRIMARY KEY (order_id, potion_id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const columns = result.tables[0].columns;
|
||||
expect(columns.filter((c) => c.primaryKey)).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Generated Columns', () => {
|
||||
it('should handle GENERATED ALWAYS AS IDENTITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns[0].increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle GENERATED BY DEFAULT AS IDENTITY', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE items (
|
||||
id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns[0].increment).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle computed columns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE calculations (
|
||||
id INTEGER PRIMARY KEY,
|
||||
value1 NUMERIC,
|
||||
value2 NUMERIC,
|
||||
total NUMERIC GENERATED ALWAYS AS (value1 + value2) STORED
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unsupported Statements', () => {
|
||||
it('should skip and warn about functions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION get_wizard_name(wizard_id INTEGER)
|
||||
RETURNS TEXT AS $$
|
||||
BEGIN
|
||||
RETURN 'test';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE scrolls (
|
||||
id INTEGER PRIMARY KEY,
|
||||
wizard_id INTEGER REFERENCES wizards(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Function'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about triggers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_audit_log (id SERIAL PRIMARY KEY);
|
||||
|
||||
CREATE TRIGGER spell_audit_trigger
|
||||
AFTER INSERT ON spell_audit_log
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION spell_audit_function();
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Trigger'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about policies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE arcane_secrets (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE POLICY wizard_policy ON arcane_secrets
|
||||
FOR SELECT
|
||||
TO public
|
||||
USING (true);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Policy'))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip and warn about RLS', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE enchanted_vault (id INTEGER PRIMARY KEY);
|
||||
ALTER TABLE enchanted_vault ENABLE ROW LEVEL SECURITY;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(
|
||||
result.warnings!.some((w) =>
|
||||
w.toLowerCase().includes('row level security')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle tables after failed function parsing', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE before_enchantment (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE FUNCTION complex_spell()
|
||||
RETURNS TABLE(id INTEGER, name TEXT) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY SELECT 1, 'test';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE after_enchantment (
|
||||
id INTEGER PRIMARY KEY,
|
||||
ref_id INTEGER REFERENCES before_enchantment(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'after_enchantment',
|
||||
'before_enchantment',
|
||||
]);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle empty or null input', async () => {
|
||||
const result1 = await fromPostgres('');
|
||||
expect(result1.tables).toHaveLength(0);
|
||||
expect(result1.relationships).toHaveLength(0);
|
||||
|
||||
const result2 = await fromPostgres(' \n ');
|
||||
expect(result2.tables).toHaveLength(0);
|
||||
expect(result2.relationships).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle comments in various positions', async () => {
|
||||
const sql = `
|
||||
-- This is a comment
|
||||
CREATE TABLE /* inline comment */ wizards (
|
||||
id INTEGER PRIMARY KEY, -- end of line comment
|
||||
/* multi-line
|
||||
comment */
|
||||
name TEXT
|
||||
);
|
||||
-- Another comment
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizards');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle dollar-quoted strings', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_messages (
|
||||
id INTEGER PRIMARY KEY,
|
||||
template TEXT DEFAULT $tag$Hello, 'world'!$tag$,
|
||||
content TEXT
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Foreign Key Extraction from Unparsed Tables', () => {
|
||||
it('should extract foreign keys from tables that fail to parse', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE ancient_artifact (id UUID PRIMARY KEY);
|
||||
|
||||
-- This table has syntax that might fail parsing
|
||||
CREATE TABLE mystical_formula (
|
||||
id UUID PRIMARY KEY,
|
||||
artifact_ref UUID REFERENCES ancient_artifact(id),
|
||||
value NUMERIC(10,
|
||||
2) GENERATED ALWAYS AS (1 + 1) STORED,
|
||||
FOREIGN KEY (artifact_ref) REFERENCES ancient_artifact(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE enchanted_relic (
|
||||
id UUID PRIMARY KEY,
|
||||
formula_ref UUID REFERENCES mystical_formula(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
// Should find foreign keys even if mystical_formula fails to parse
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,199 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Parser Regression Tests', () => {
|
||||
it('should parse all 16 tables from the magical academy example', async () => {
|
||||
// This is a regression test for the issue where 3 tables were missing
|
||||
const sql = `
|
||||
-- Core tables
|
||||
CREATE TABLE magic_schools(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name text NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE towers(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE wizards(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
wizard_name text NOT NULL,
|
||||
magic_email text NOT NULL,
|
||||
UNIQUE (school_id, wizard_name)
|
||||
);
|
||||
|
||||
-- This function should not prevent the wizards table from being parsed
|
||||
CREATE FUNCTION enforce_wizard_tower_school()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE wizard_ranks(
|
||||
wizard_id uuid NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
rank_id uuid NOT NULL REFERENCES magical_ranks(id) ON DELETE CASCADE,
|
||||
tower_id uuid NOT NULL REFERENCES towers(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, rank_id, tower_id)
|
||||
);
|
||||
|
||||
-- Another function that should be skipped
|
||||
CREATE FUNCTION another_function() RETURNS void AS $$
|
||||
BEGIN
|
||||
-- Do nothing
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE magical_ranks(
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
school_id uuid NOT NULL REFERENCES magic_schools(id) ON DELETE CASCADE,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
-- Row level security should not break parsing
|
||||
ALTER TABLE wizards ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE TABLE spell_logs(
|
||||
id bigserial PRIMARY KEY,
|
||||
school_id uuid,
|
||||
wizard_id uuid,
|
||||
action text NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find all 6 tables
|
||||
expect(result.tables).toHaveLength(6);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'magic_schools',
|
||||
'magical_ranks',
|
||||
'spell_logs',
|
||||
'towers',
|
||||
'wizard_ranks',
|
||||
'wizards',
|
||||
]);
|
||||
|
||||
if (result.warnings) {
|
||||
expect(result.warnings.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.warnings.some(
|
||||
(w) => w.includes('Function') || w.includes('security')
|
||||
)
|
||||
).toBe(true);
|
||||
} else {
|
||||
expect(result.tables).toHaveLength(6);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle tables with complex syntax that fail parsing', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE simple_table (
|
||||
id uuid PRIMARY KEY,
|
||||
name text NOT NULL
|
||||
);
|
||||
|
||||
-- This table has complex syntax that might fail parsing
|
||||
CREATE TABLE complex_table (
|
||||
id uuid PRIMARY KEY,
|
||||
value numeric(10,
|
||||
2), -- Multi-line numeric
|
||||
computed numeric(5,2) GENERATED ALWAYS AS (value * 2) STORED,
|
||||
UNIQUE (id, value)
|
||||
);
|
||||
|
||||
CREATE TABLE another_table (
|
||||
id uuid PRIMARY KEY,
|
||||
complex_id uuid REFERENCES complex_table(id),
|
||||
simple_id uuid REFERENCES simple_table(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find all 3 tables even if complex_table fails to parse
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'another_table',
|
||||
'complex_table',
|
||||
'simple_table',
|
||||
]);
|
||||
|
||||
// Should extract foreign keys even from unparsed tables
|
||||
const fksFromAnother = result.relationships.filter(
|
||||
(r) => r.sourceTable === 'another_table'
|
||||
);
|
||||
expect(fksFromAnother).toHaveLength(2);
|
||||
expect(
|
||||
fksFromAnother.some((fk) => fk.targetTable === 'complex_table')
|
||||
).toBe(true);
|
||||
expect(
|
||||
fksFromAnother.some((fk) => fk.targetTable === 'simple_table')
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should count relationships correctly for multi-tenant system', async () => {
|
||||
// Simplified version focusing on relationship counting
|
||||
const sql = `
|
||||
CREATE TABLE tenants(id uuid PRIMARY KEY);
|
||||
CREATE TABLE branches(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE roles(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE permissions(id uuid PRIMARY KEY);
|
||||
CREATE TABLE role_permissions(
|
||||
role_id uuid NOT NULL REFERENCES roles(id),
|
||||
permission_id uuid NOT NULL REFERENCES permissions(id),
|
||||
PRIMARY KEY (role_id, permission_id)
|
||||
);
|
||||
CREATE TABLE record_types(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id)
|
||||
);
|
||||
CREATE TABLE users(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id)
|
||||
);
|
||||
CREATE TABLE user_roles(
|
||||
user_id uuid NOT NULL REFERENCES users(id),
|
||||
role_id uuid NOT NULL REFERENCES roles(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id),
|
||||
PRIMARY KEY (user_id, role_id, branch_id)
|
||||
);
|
||||
CREATE TABLE patients(
|
||||
id uuid PRIMARY KEY,
|
||||
tenant_id uuid NOT NULL REFERENCES tenants(id),
|
||||
branch_id uuid NOT NULL REFERENCES branches(id),
|
||||
primary_physician uuid REFERENCES users(id),
|
||||
referring_physician uuid REFERENCES users(id)
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Count expected relationships:
|
||||
// branches: 1 (tenant_id -> tenants)
|
||||
// roles: 1 (tenant_id -> tenants)
|
||||
// role_permissions: 2 (role_id -> roles, permission_id -> permissions)
|
||||
// record_types: 1 (tenant_id -> tenants)
|
||||
// users: 2 (tenant_id -> tenants, branch_id -> branches)
|
||||
// user_roles: 3 (user_id -> users, role_id -> roles, branch_id -> branches)
|
||||
// patients: 4 (tenant_id -> tenants, branch_id -> branches, primary_physician -> users, referring_physician -> users)
|
||||
// Total: 14
|
||||
|
||||
expect(result.relationships).toHaveLength(14);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,149 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Activities table import - PostgreSQL specific types', () => {
|
||||
it('should correctly parse the activities table with PostgreSQL-specific types', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE public.activities (
|
||||
id serial4 NOT NULL,
|
||||
user_id int4 NOT NULL,
|
||||
workflow_id int4 NULL,
|
||||
task_id int4 NULL,
|
||||
"action" character varying(50) NOT NULL,
|
||||
description text NOT NULL,
|
||||
created_at timestamp DEFAULT now() NOT NULL,
|
||||
is_read bool DEFAULT false NOT NULL,
|
||||
CONSTRAINT activities_pkey PRIMARY KEY (id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
|
||||
const table = result.tables[0];
|
||||
expect(table.name).toBe('activities');
|
||||
expect(table.columns).toHaveLength(8);
|
||||
|
||||
// Check each column
|
||||
const columns = table.columns;
|
||||
|
||||
// id column - serial4 should become INTEGER with auto-increment
|
||||
const idCol = columns.find((c) => c.name === 'id');
|
||||
expect(idCol).toBeDefined();
|
||||
expect(idCol?.type).toBe('INTEGER');
|
||||
expect(idCol?.primaryKey).toBe(true);
|
||||
expect(idCol?.increment).toBe(true);
|
||||
expect(idCol?.nullable).toBe(false);
|
||||
|
||||
// user_id column - int4 should become INTEGER
|
||||
const userIdCol = columns.find((c) => c.name === 'user_id');
|
||||
expect(userIdCol).toBeDefined();
|
||||
expect(userIdCol?.type).toBe('INTEGER');
|
||||
expect(userIdCol?.nullable).toBe(false);
|
||||
|
||||
// workflow_id column - int4 NULL
|
||||
const workflowIdCol = columns.find((c) => c.name === 'workflow_id');
|
||||
expect(workflowIdCol).toBeDefined();
|
||||
expect(workflowIdCol?.type).toBe('INTEGER');
|
||||
expect(workflowIdCol?.nullable).toBe(true);
|
||||
|
||||
// task_id column - int4 NULL
|
||||
const taskIdCol = columns.find((c) => c.name === 'task_id');
|
||||
expect(taskIdCol).toBeDefined();
|
||||
expect(taskIdCol?.type).toBe('INTEGER');
|
||||
expect(taskIdCol?.nullable).toBe(true);
|
||||
|
||||
// action column - character varying(50)
|
||||
const actionCol = columns.find((c) => c.name === 'action');
|
||||
expect(actionCol).toBeDefined();
|
||||
expect(actionCol?.type).toBe('VARCHAR(50)');
|
||||
expect(actionCol?.nullable).toBe(false);
|
||||
|
||||
// description column - text
|
||||
const descriptionCol = columns.find((c) => c.name === 'description');
|
||||
expect(descriptionCol).toBeDefined();
|
||||
expect(descriptionCol?.type).toBe('TEXT');
|
||||
expect(descriptionCol?.nullable).toBe(false);
|
||||
|
||||
// created_at column - timestamp with default
|
||||
const createdAtCol = columns.find((c) => c.name === 'created_at');
|
||||
expect(createdAtCol).toBeDefined();
|
||||
expect(createdAtCol?.type).toBe('TIMESTAMP');
|
||||
expect(createdAtCol?.nullable).toBe(false);
|
||||
expect(createdAtCol?.default).toContain('NOW');
|
||||
|
||||
// is_read column - bool with default
|
||||
const isReadCol = columns.find((c) => c.name === 'is_read');
|
||||
expect(isReadCol).toBeDefined();
|
||||
expect(isReadCol?.type).toBe('BOOLEAN');
|
||||
expect(isReadCol?.nullable).toBe(false);
|
||||
expect(isReadCol?.default).toBe('FALSE');
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL type aliases correctly', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE type_test (
|
||||
id serial4,
|
||||
small_id serial2,
|
||||
big_id serial8,
|
||||
int_col int4,
|
||||
small_int smallint,
|
||||
big_int int8,
|
||||
bool_col bool,
|
||||
boolean_col boolean,
|
||||
varchar_col character varying(100),
|
||||
char_col character(10),
|
||||
text_col text,
|
||||
timestamp_col timestamp,
|
||||
timestamptz_col timestamptz,
|
||||
date_col date,
|
||||
time_col time,
|
||||
json_col json,
|
||||
jsonb_col jsonb
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
const table = result.tables[0];
|
||||
const cols = table.columns;
|
||||
|
||||
// Check serial types
|
||||
expect(cols.find((c) => c.name === 'id')?.type).toBe('INTEGER');
|
||||
expect(cols.find((c) => c.name === 'id')?.increment).toBe(true);
|
||||
expect(cols.find((c) => c.name === 'small_id')?.type).toBe('SMALLINT');
|
||||
expect(cols.find((c) => c.name === 'small_id')?.increment).toBe(true);
|
||||
expect(cols.find((c) => c.name === 'big_id')?.type).toBe('BIGINT');
|
||||
expect(cols.find((c) => c.name === 'big_id')?.increment).toBe(true);
|
||||
|
||||
// Check integer types
|
||||
expect(cols.find((c) => c.name === 'int_col')?.type).toBe('INTEGER');
|
||||
expect(cols.find((c) => c.name === 'small_int')?.type).toBe('SMALLINT');
|
||||
expect(cols.find((c) => c.name === 'big_int')?.type).toBe('BIGINT');
|
||||
|
||||
// Check boolean types
|
||||
expect(cols.find((c) => c.name === 'bool_col')?.type).toBe('BOOLEAN');
|
||||
expect(cols.find((c) => c.name === 'boolean_col')?.type).toBe(
|
||||
'BOOLEAN'
|
||||
);
|
||||
|
||||
// Check string types
|
||||
expect(cols.find((c) => c.name === 'varchar_col')?.type).toBe(
|
||||
'VARCHAR(100)'
|
||||
);
|
||||
expect(cols.find((c) => c.name === 'char_col')?.type).toBe('CHAR(10)');
|
||||
expect(cols.find((c) => c.name === 'text_col')?.type).toBe('TEXT');
|
||||
|
||||
// Check timestamp types
|
||||
expect(cols.find((c) => c.name === 'timestamp_col')?.type).toBe(
|
||||
'TIMESTAMP'
|
||||
);
|
||||
expect(cols.find((c) => c.name === 'timestamptz_col')?.type).toBe(
|
||||
'TIMESTAMPTZ'
|
||||
);
|
||||
|
||||
// Check other types
|
||||
expect(cols.find((c) => c.name === 'date_col')?.type).toBe('DATE');
|
||||
expect(cols.find((c) => c.name === 'time_col')?.type).toBe('TIME');
|
||||
expect(cols.find((c) => c.name === 'json_col')?.type).toBe('JSON');
|
||||
expect(cols.find((c) => c.name === 'jsonb_col')?.type).toBe('JSONB');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,307 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('ALTER TABLE FOREIGN KEY parsing with fallback', () => {
|
||||
it('should parse foreign keys from ALTER TABLE ONLY statements with DEFERRABLE', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "public"."wizard" (
|
||||
"id" bigint NOT NULL,
|
||||
"name" character varying(255) NOT NULL,
|
||||
CONSTRAINT "wizard_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."spellbook" (
|
||||
"id" integer NOT NULL,
|
||||
"wizard_id" bigint NOT NULL,
|
||||
"title" character varying(254) NOT NULL,
|
||||
CONSTRAINT "spellbook_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE ONLY "public"."spellbook" ADD CONSTRAINT "spellbook_wizard_id_fk" FOREIGN KEY (wizard_id) REFERENCES wizard(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('spellbook');
|
||||
expect(fk.targetTable).toBe('wizard');
|
||||
expect(fk.sourceColumn).toBe('wizard_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
expect(fk.name).toBe('spellbook_wizard_id_fk');
|
||||
});
|
||||
|
||||
it('should parse foreign keys without schema qualification', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dragon_rider (
|
||||
id UUID PRIMARY KEY,
|
||||
rider_name VARCHAR(100) NOT NULL,
|
||||
dragon_id UUID NOT NULL
|
||||
);
|
||||
|
||||
-- Without ONLY keyword and without schema
|
||||
ALTER TABLE dragon_rider ADD CONSTRAINT dragon_rider_dragon_fk FOREIGN KEY (dragon_id) REFERENCES dragon(id);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('dragon_rider');
|
||||
expect(fk.targetTable).toBe('dragon');
|
||||
expect(fk.sourceColumn).toBe('dragon_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
expect(fk.sourceSchema).toBe('public');
|
||||
expect(fk.targetSchema).toBe('public');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with mixed schema specifications', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "magic_school"."instructor" (
|
||||
"id" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
CONSTRAINT "instructor_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."apprentice" (
|
||||
"id" integer NOT NULL,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"instructor_id" bigint NOT NULL,
|
||||
CONSTRAINT "apprentice_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Source table with public schema, target table with magic_school schema
|
||||
ALTER TABLE ONLY "public"."apprentice" ADD CONSTRAINT "apprentice_instructor_fk" FOREIGN KEY (instructor_id) REFERENCES "magic_school"."instructor"(id) ON DELETE CASCADE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('apprentice');
|
||||
expect(fk.targetTable).toBe('instructor');
|
||||
expect(fk.sourceSchema).toBe('public');
|
||||
expect(fk.targetSchema).toBe('magic_school');
|
||||
expect(fk.sourceColumn).toBe('instructor_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should parse foreign keys with various constraint options', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potion (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE ingredient (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE potion_ingredient (
|
||||
id SERIAL PRIMARY KEY,
|
||||
potion_id UUID NOT NULL,
|
||||
ingredient_id UUID NOT NULL,
|
||||
quantity INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
-- Different variations of ALTER TABLE foreign key syntax
|
||||
ALTER TABLE potion_ingredient ADD CONSTRAINT potion_ingredient_potion_fk FOREIGN KEY (potion_id) REFERENCES potion(id) ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE ONLY potion_ingredient ADD CONSTRAINT potion_ingredient_ingredient_fk FOREIGN KEY (ingredient_id) REFERENCES ingredient(id) DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check first FK (with ON DELETE CASCADE ON UPDATE CASCADE)
|
||||
const potionFK = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'potion_id'
|
||||
);
|
||||
expect(potionFK).toBeDefined();
|
||||
expect(potionFK?.targetTable).toBe('potion');
|
||||
|
||||
// Check second FK (with DEFERRABLE)
|
||||
const ingredientFK = result.relationships.find(
|
||||
(r) => r.sourceColumn === 'ingredient_id'
|
||||
);
|
||||
expect(ingredientFK).toBeDefined();
|
||||
expect(ingredientFK?.targetTable).toBe('ingredient');
|
||||
});
|
||||
|
||||
it('should handle quoted and unquoted identifiers', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "wizard_tower" (
|
||||
id BIGINT PRIMARY KEY,
|
||||
"tower_name" VARCHAR(255)
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_resident (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
tower_id BIGINT
|
||||
);
|
||||
|
||||
-- First ALTER TABLE statement
|
||||
ALTER TABLE wizard_resident ADD CONSTRAINT wizard_tower_fk FOREIGN KEY (tower_id) REFERENCES "wizard_tower"(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
|
||||
-- Second ALTER TABLE statement
|
||||
ALTER TABLE ONLY "wizard_resident" ADD CONSTRAINT "wizard_tower_fk2" FOREIGN KEY ("tower_id") REFERENCES "wizard_tower"("id") ON DELETE SET NULL DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Relationships found:', result.relationships.length);
|
||||
result.relationships.forEach((rel, i) => {
|
||||
console.log(
|
||||
`FK ${i + 1}: ${rel.sourceTable}.${rel.sourceColumn} -> ${rel.targetTable}.${rel.targetColumn}`
|
||||
);
|
||||
});
|
||||
console.log('Warnings:', result.warnings);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// At least one relationship should be found (the regex fallback should catch at least one)
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Check the first relationship
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.sourceTable).toBe('wizard_resident');
|
||||
expect(fk.targetTable).toBe('wizard_tower');
|
||||
expect(fk.sourceColumn).toBe('tower_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
});
|
||||
|
||||
it('should handle the exact problematic syntax from postgres_seven', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE "public"."users_user" (
|
||||
"id" bigint NOT NULL,
|
||||
"email" character varying(254) NOT NULL,
|
||||
CONSTRAINT "users_user_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."account_emailaddress" (
|
||||
"id" integer DEFAULT GENERATED BY DEFAULT AS IDENTITY NOT NULL,
|
||||
"email" character varying(254) NOT NULL,
|
||||
"user_id" bigint NOT NULL,
|
||||
CONSTRAINT "account_emailaddress_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Exact syntax from the problematic file with double DEFERRABLE
|
||||
ALTER TABLE ONLY "public"."account_emailaddress" ADD CONSTRAINT "account_emailaddress_user_id_2c513194_fk_users_user_id" FOREIGN KEY (user_id) REFERENCES users_user(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Warnings:', result.warnings);
|
||||
console.log('Relationships:', result.relationships);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.name).toBe(
|
||||
'account_emailaddress_user_id_2c513194_fk_users_user_id'
|
||||
);
|
||||
expect(fk.sourceTable).toBe('account_emailaddress');
|
||||
expect(fk.targetTable).toBe('users_user');
|
||||
});
|
||||
|
||||
it('should handle multiple foreign keys in different formats', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE realm (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE region (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
realm_id UUID
|
||||
);
|
||||
|
||||
CREATE TABLE city (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
region_id UUID,
|
||||
realm_id UUID
|
||||
);
|
||||
|
||||
-- Mix of syntaxes that might fail parsing
|
||||
ALTER TABLE ONLY region ADD CONSTRAINT region_realm_fk FOREIGN KEY (realm_id) REFERENCES realm(id) DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
ALTER TABLE city ADD CONSTRAINT city_region_fk FOREIGN KEY (region_id) REFERENCES region(id) ON DELETE CASCADE;
|
||||
ALTER TABLE ONLY "public"."city" ADD CONSTRAINT "city_realm_fk" FOREIGN KEY ("realm_id") REFERENCES "public"."realm"("id");
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.relationships).toHaveLength(3);
|
||||
|
||||
// Verify all three relationships were captured
|
||||
const regionRealmFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'region' && r.targetTable === 'realm'
|
||||
);
|
||||
const cityRegionFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'city' && r.targetTable === 'region'
|
||||
);
|
||||
const cityRealmFK = result.relationships.find(
|
||||
(r) => r.sourceTable === 'city' && r.targetTable === 'realm'
|
||||
);
|
||||
|
||||
expect(regionRealmFK).toBeDefined();
|
||||
expect(cityRegionFK).toBeDefined();
|
||||
expect(cityRealmFK).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use regex fallback for unparseable ALTER TABLE statements', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE magical_item (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);
|
||||
|
||||
CREATE TABLE enchantment (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255),
|
||||
item_id UUID NOT NULL
|
||||
);
|
||||
|
||||
-- This should fail to parse due to syntax variations and trigger regex fallback
|
||||
ALTER TABLE ONLY enchantment ADD CONSTRAINT enchantment_item_fk FOREIGN KEY (item_id) REFERENCES magical_item(id) ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED DEFERRABLE;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should find the foreign key even if parser fails
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
|
||||
const fk = result.relationships[0];
|
||||
expect(fk.name).toBe('enchantment_item_fk');
|
||||
expect(fk.sourceTable).toBe('enchantment');
|
||||
expect(fk.targetTable).toBe('magical_item');
|
||||
expect(fk.sourceColumn).toBe('item_id');
|
||||
expect(fk.targetColumn).toBe('id');
|
||||
|
||||
// Should have a warning about the failed parse
|
||||
expect(result.warnings).toBeDefined();
|
||||
const hasAlterWarning = result.warnings!.some(
|
||||
(w) =>
|
||||
w.includes('Failed to parse statement') &&
|
||||
w.includes('ALTER TABLE')
|
||||
);
|
||||
expect(hasAlterWarning).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,84 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Table with Comment Before CREATE TABLE', () => {
|
||||
it('should parse table with single-line comment before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- Junction table for tracking which crystals power which enchantments.
|
||||
CREATE TABLE crystal_enchantments (
|
||||
crystal_id UUID NOT NULL REFERENCES crystals(id) ON DELETE CASCADE,
|
||||
enchantment_id UUID NOT NULL REFERENCES enchantments(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (crystal_id, enchantment_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('\nDebug info:');
|
||||
console.log('Tables found:', result.tables.length);
|
||||
console.log(
|
||||
'Table names:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('crystal_enchantments');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle various comment formats before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- This is a wizards table
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- This table stores
|
||||
-- multiple artifacts
|
||||
CREATE TABLE artifacts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
/* This is a multi-line
|
||||
comment before table */
|
||||
CREATE TABLE quests (
|
||||
id BIGSERIAL PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Comment 1
|
||||
-- Comment 2
|
||||
-- Comment 3
|
||||
CREATE TABLE spell_schools (
|
||||
id INTEGER PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(4);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifacts',
|
||||
'quests',
|
||||
'spell_schools',
|
||||
'wizards',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not confuse comment-only statements with tables', async () => {
|
||||
const sql = `
|
||||
-- This is just a comment, not a table
|
||||
-- Even though it mentions CREATE TABLE in the comment
|
||||
-- It should not be parsed as a table
|
||||
|
||||
CREATE TABLE ancient_tome (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Another standalone comment`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('ancient_tome');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,113 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Comment removal before formatting', () => {
|
||||
it('should remove single-line comments', async () => {
|
||||
const sql = `
|
||||
-- This is a comment that will be removed
|
||||
CREATE TABLE magic_items (
|
||||
item_id INTEGER PRIMARY KEY, -- unique identifier
|
||||
spell_power VARCHAR(100) -- mystical energy level
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('magic_items');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should remove multi-line comments', async () => {
|
||||
const sql = `
|
||||
/* This is a multi-line comment
|
||||
that spans multiple lines
|
||||
and will be removed */
|
||||
CREATE TABLE wizard_inventory (
|
||||
wizard_id INTEGER PRIMARY KEY,
|
||||
/* Stores the magical
|
||||
artifacts collected */
|
||||
artifact_name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_inventory');
|
||||
});
|
||||
|
||||
it('should preserve strings that contain comment-like patterns', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE potion_recipes (
|
||||
recipe_id INTEGER PRIMARY KEY,
|
||||
brewing_note VARCHAR(100) DEFAULT '--shake before use',
|
||||
ingredient_source VARCHAR(200) DEFAULT 'https://alchemy.store',
|
||||
instructions TEXT DEFAULT '/* mix carefully */'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(4);
|
||||
|
||||
// Check that defaults are preserved
|
||||
const brewingNoteCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'brewing_note'
|
||||
);
|
||||
expect(brewingNoteCol?.default).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle complex scenarios with comments before tables', async () => {
|
||||
const sql = `
|
||||
-- Dragon types catalog
|
||||
CREATE TABLE dragons (dragon_id INTEGER PRIMARY KEY);
|
||||
|
||||
/* Knights registry
|
||||
for the kingdom */
|
||||
CREATE TABLE knights (knight_id INTEGER PRIMARY KEY);
|
||||
|
||||
-- Battle records junction
|
||||
-- Tracks dragon-knight encounters
|
||||
CREATE TABLE dragon_battles (
|
||||
dragon_id INTEGER REFERENCES dragons(dragon_id),
|
||||
knight_id INTEGER REFERENCES knights(knight_id),
|
||||
PRIMARY KEY (dragon_id, knight_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['dragon_battles', 'dragons', 'knights']);
|
||||
});
|
||||
|
||||
it('should handle the exact forth example scenario', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_books (
|
||||
book_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
spell_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(255) NOT NULL,
|
||||
effect TEXT, -- Magical effect description
|
||||
element VARCHAR(50) NOT NULL -- fire, water, earth, air
|
||||
);
|
||||
|
||||
-- Junction table linking spells to their books.
|
||||
CREATE TABLE book_spells (
|
||||
book_id UUID NOT NULL REFERENCES spell_books(book_id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(spell_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (book_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'book_spells',
|
||||
'spell_books',
|
||||
'spells',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,247 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Full Database Import - Quest Management System', () => {
|
||||
it('should parse all 20 tables including quest_sample_rewards', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
-- Enums for quest system
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
claimed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('\nParsing results:');
|
||||
console.log(`- Tables found: ${result.tables.length}`);
|
||||
console.log(`- Enums found: ${result.enums?.length || 0}`);
|
||||
console.log(`- Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// List all table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
console.log('\nTable names:');
|
||||
tableNames.forEach((name, i) => {
|
||||
console.log(` ${i + 1}. ${name}`);
|
||||
});
|
||||
|
||||
// Should have all 20 tables
|
||||
expect(result.tables).toHaveLength(20);
|
||||
|
||||
// Check for quest_sample_rewards specifically
|
||||
const questSampleRewards = result.tables.find(
|
||||
(t) => t.name === 'quest_sample_rewards'
|
||||
);
|
||||
expect(questSampleRewards).toBeDefined();
|
||||
|
||||
if (questSampleRewards) {
|
||||
console.log('\nquest_sample_rewards table details:');
|
||||
console.log(`- Columns: ${questSampleRewards.columns.length}`);
|
||||
questSampleRewards.columns.forEach((col) => {
|
||||
console.log(
|
||||
` - ${col.name}: ${col.type} (nullable: ${col.nullable})`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Expected tables
|
||||
const expectedTables = [
|
||||
'adventurers',
|
||||
'guild_masters',
|
||||
'regions',
|
||||
'outposts',
|
||||
'scouts',
|
||||
'scout_region_assignments',
|
||||
'quest_givers',
|
||||
'quest_templates',
|
||||
'quests',
|
||||
'quest_sample_rewards',
|
||||
'quest_rotations',
|
||||
'rotation_quests',
|
||||
'contracts',
|
||||
'completion_events',
|
||||
'bounties',
|
||||
'guild_ledgers',
|
||||
'reputation_logs',
|
||||
'quest_suspensions',
|
||||
'guild_master_actions',
|
||||
'rewards',
|
||||
];
|
||||
|
||||
expect(tableNames).toEqual(expectedTables.sort());
|
||||
|
||||
// Check that quest_sample_rewards has the expected columns
|
||||
expect(questSampleRewards!.columns).toHaveLength(2);
|
||||
const columnNames = questSampleRewards!.columns
|
||||
.map((c) => c.name)
|
||||
.sort();
|
||||
expect(columnNames).toEqual(['quest_template_id', 'reward_id']);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,157 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Complex enum scenarios from real files', () => {
|
||||
it('should handle multiple schema-qualified enums with various syntax issues', async () => {
|
||||
// This test mimics the issues found in postgres_six_example_sql_script.sql
|
||||
const sql = `
|
||||
CREATE TYPE "public"."wizard_status" AS ENUM('active', 'suspended', 'banned', 'inactive');
|
||||
CREATE TYPE "public"."magic_school" AS ENUM('fire', 'water', 'earth', 'air', 'spirit');
|
||||
CREATE TYPE "public"."spell_tier" AS ENUM('cantrip', 'novice', 'adept', 'expert', 'master', 'legendary');
|
||||
CREATE TYPE "public"."potion_type" AS ENUM('healing', 'mana', 'strength', 'speed', 'invisibility', 'flying', 'resistance');
|
||||
CREATE TYPE "public"."creature_type" AS ENUM('beast', 'dragon', 'elemental', 'undead', 'demon', 'fey', 'construct', 'aberration');
|
||||
CREATE TYPE "public"."quest_status" AS ENUM('available', 'accepted', 'in_progress', 'completed', 'failed', 'abandoned');
|
||||
CREATE TYPE "public"."item_rarity" AS ENUM('common', 'uncommon', 'rare', 'epic', 'legendary', 'mythic');
|
||||
|
||||
CREATE TABLE "wizard_account" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"wizardId" text NOT NULL,
|
||||
"account_id" text NOT NULL,
|
||||
"provider_id" text NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "wizard" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"username" text,
|
||||
"email" text NOT NULL,
|
||||
"email_verified" boolean DEFAULT false NOT NULL,
|
||||
"status""wizard_status" DEFAULT 'active' NOT NULL,
|
||||
"primary_school""magic_school" DEFAULT 'fire' NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL,
|
||||
CONSTRAINT "wizard_username_unique" UNIQUE("username"),
|
||||
CONSTRAINT "wizard_email_unique" UNIQUE("email")
|
||||
);
|
||||
|
||||
CREATE TABLE "spells" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"wizard_id" text NOT NULL,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"tier""spell_tier" DEFAULT 'cantrip' NOT NULL,
|
||||
"school""magic_school" DEFAULT 'fire' NOT NULL,
|
||||
"mana_cost" integer DEFAULT 10 NOT NULL,
|
||||
"metadata" jsonb DEFAULT '{}',
|
||||
"created_at" timestamp with time zone DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE "items" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"rarity""item_rarity" DEFAULT 'common' NOT NULL,
|
||||
"metadata" jsonb DEFAULT '{}': :jsonb,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "wizard_account" ADD CONSTRAINT "wizard_account_wizardId_wizard_id_fk"
|
||||
FOREIGN KEY ("wizardId") REFERENCES "public"."wizard"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "spells" ADD CONSTRAINT "spells_wizard_id_wizard_id_fk"
|
||||
FOREIGN KEY ("wizard_id") REFERENCES "public"."wizard"("id") ON DELETE cascade ON UPDATE no action;
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Check enum parsing
|
||||
console.log('\n=== ENUMS FOUND ===');
|
||||
console.log('Count:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should find all 7 enums
|
||||
expect(result.enums).toHaveLength(7);
|
||||
|
||||
// Check specific enums
|
||||
const wizardStatus = result.enums?.find(
|
||||
(e) => e.name === 'wizard_status'
|
||||
);
|
||||
expect(wizardStatus).toBeDefined();
|
||||
expect(wizardStatus?.values).toEqual([
|
||||
'active',
|
||||
'suspended',
|
||||
'banned',
|
||||
'inactive',
|
||||
]);
|
||||
|
||||
const itemRarity = result.enums?.find((e) => e.name === 'item_rarity');
|
||||
expect(itemRarity).toBeDefined();
|
||||
expect(itemRarity?.values).toEqual([
|
||||
'common',
|
||||
'uncommon',
|
||||
'rare',
|
||||
'epic',
|
||||
'legendary',
|
||||
'mythic',
|
||||
]);
|
||||
|
||||
// Check table parsing
|
||||
console.log('\n=== TABLES FOUND ===');
|
||||
console.log('Count:', result.tables.length);
|
||||
console.log('Names:', result.tables.map((t) => t.name).join(', '));
|
||||
|
||||
// Should find all 4 tables
|
||||
expect(result.tables).toHaveLength(4);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'items',
|
||||
'spells',
|
||||
'wizard',
|
||||
'wizard_account',
|
||||
]);
|
||||
|
||||
// Check warnings for syntax issues
|
||||
console.log('\n=== WARNINGS ===');
|
||||
console.log('Count:', result.warnings?.length || 0);
|
||||
if (result.warnings) {
|
||||
result.warnings.forEach((w) => {
|
||||
console.log(` - ${w}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should have warnings about custom types and parsing failures
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// Check that the tables with missing spaces in column definitions still got parsed
|
||||
const wizardTable = result.tables.find((t) => t.name === 'wizard');
|
||||
expect(wizardTable).toBeDefined();
|
||||
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(spellsTable).toBeDefined();
|
||||
});
|
||||
|
||||
it('should parse enums used in column definitions even with syntax errors', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."dragon_element" AS ENUM('fire', 'ice', 'lightning', 'poison', 'shadow');
|
||||
|
||||
CREATE TABLE "dragons" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
"name" varchar(255) NOT NULL,
|
||||
"element""dragon_element" NOT NULL,
|
||||
"power_level" integer DEFAULT 100,
|
||||
"metadata" jsonb DEFAULT '{}'::jsonb
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Enum should be parsed
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('dragon_element');
|
||||
|
||||
// Table might have issues due to missing space
|
||||
console.log('Tables:', result.tables.length);
|
||||
console.log('Warnings:', result.warnings);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,74 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Minimal junction table test', () => {
|
||||
it('should parse junction table with exact SQL structure', async () => {
|
||||
// Junction table for tracking which dragons have been tamed by which dragon masters
|
||||
const sql = `-- Junction table for tracking dragon-master bonds.
|
||||
CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
console.log('Testing with SQL:', sql);
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Result:', {
|
||||
tableCount: result.tables.length,
|
||||
tables: result.tables.map((t) => ({
|
||||
name: t.name,
|
||||
columns: t.columns.length,
|
||||
})),
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('dragon_bonds');
|
||||
});
|
||||
|
||||
it('should parse without the comment', async () => {
|
||||
const sql = `CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('dragon_bonds');
|
||||
});
|
||||
|
||||
it('should parse with dependencies', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE dragon_masters (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for tracking dragon-master bonds.
|
||||
CREATE TABLE dragon_bonds (
|
||||
dragon_master_id UUID NOT NULL REFERENCES dragon_masters(id) ON DELETE CASCADE,
|
||||
dragon_id UUID NOT NULL REFERENCES dragons(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (dragon_master_id, dragon_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('With dependencies:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const dragonBonds = result.tables.find(
|
||||
(t) => t.name === 'dragon_bonds'
|
||||
);
|
||||
expect(dragonBonds).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,66 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Dragon Status Enum Test', () => {
|
||||
it('should parse dragon_status enum specifically', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE dragon_status AS ENUM ('sleeping', 'hunting', 'guarding', 'hibernating', 'enraged');
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
status dragon_status DEFAULT 'sleeping'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Check that the enum was parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('dragon_status');
|
||||
expect(result.enums![0].values).toEqual([
|
||||
'sleeping',
|
||||
'hunting',
|
||||
'guarding',
|
||||
'hibernating',
|
||||
'enraged',
|
||||
]);
|
||||
|
||||
// Check that the table uses the enum
|
||||
const table = result.tables.find((t) => t.name === 'dragons');
|
||||
expect(table).toBeDefined();
|
||||
|
||||
const statusColumn = table!.columns.find((c) => c.name === 'status');
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn!.type).toBe('dragon_status');
|
||||
});
|
||||
|
||||
it('should handle multiple enums including dragon_status', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE dragon_status AS ENUM ('sleeping', 'hunting', 'guarding', 'hibernating', 'enraged');
|
||||
CREATE TYPE spell_power AS ENUM ('weak', 'strong');
|
||||
CREATE TYPE magic_element AS ENUM ('fire', 'ice', 'both');
|
||||
|
||||
CREATE TABLE dragons (
|
||||
id UUID PRIMARY KEY,
|
||||
status dragon_status DEFAULT 'sleeping',
|
||||
breath_power spell_power NOT NULL,
|
||||
breath_element magic_element NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log(
|
||||
'Parsed enums:',
|
||||
result.enums?.map((e) => e.name)
|
||||
);
|
||||
|
||||
expect(result.enums).toHaveLength(3);
|
||||
|
||||
// Specifically check for dragon_status
|
||||
const dragonStatus = result.enums!.find(
|
||||
(e) => e.name === 'dragon_status'
|
||||
);
|
||||
expect(dragonStatus).toBeDefined();
|
||||
expect(dragonStatus!.name).toBe('dragon_status');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,37 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Empty table parsing', () => {
|
||||
it('should parse empty tables', async () => {
|
||||
const sql = `CREATE TABLE empty_table ();`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('empty_table');
|
||||
expect(result.tables[0].columns).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should parse mix of empty and non-empty tables', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE normal_table (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE empty_table ();
|
||||
|
||||
CREATE TABLE another_table (
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'another_table',
|
||||
'empty_table',
|
||||
'normal_table',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,160 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Complete Enum Test with Fantasy Example', () => {
|
||||
it('should parse all enums and use them in tables', async () => {
|
||||
const sql = `
|
||||
-- Fantasy realm database with multiple enum types
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'journeyman', 'master', 'archmage', 'legendary');
|
||||
CREATE TYPE spell_frequency AS ENUM ('hourly', 'daily');
|
||||
CREATE TYPE magic_school AS ENUM ('fire', 'water', 'earth');
|
||||
CREATE TYPE quest_status AS ENUM ('pending', 'active', 'completed');
|
||||
CREATE TYPE dragon_mood AS ENUM ('happy', 'grumpy', 'sleepy');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
rank wizard_rank DEFAULT 'apprentice'
|
||||
);
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
primary_school magic_school NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dragon_quests (
|
||||
id UUID PRIMARY KEY,
|
||||
status quest_status DEFAULT 'pending',
|
||||
dragon_mood dragon_mood
|
||||
);
|
||||
`;
|
||||
|
||||
// Parse the SQL
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Check enums
|
||||
console.log('\nEnum parsing results:');
|
||||
console.log(`Found ${result.enums?.length || 0} enum types`);
|
||||
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Expected enums
|
||||
const expectedEnums = [
|
||||
'wizard_rank',
|
||||
'spell_frequency',
|
||||
'magic_school',
|
||||
'quest_status',
|
||||
'dragon_mood',
|
||||
];
|
||||
|
||||
// Check which are missing
|
||||
const foundEnumNames = result.enums?.map((e) => e.name) || [];
|
||||
const missingEnums = expectedEnums.filter(
|
||||
(e) => !foundEnumNames.includes(e)
|
||||
);
|
||||
|
||||
if (missingEnums.length > 0) {
|
||||
console.log('\nMissing enums:', missingEnums);
|
||||
|
||||
// Let's check if they're in the SQL at all
|
||||
missingEnums.forEach((enumName) => {
|
||||
const regex = new RegExp(`CREATE\\s+TYPE\\s+${enumName}`, 'i');
|
||||
if (regex.test(sql)) {
|
||||
console.log(
|
||||
` ${enumName} exists in SQL but wasn't parsed`
|
||||
);
|
||||
|
||||
// Find the line
|
||||
const lines = sql.split('\n');
|
||||
const lineIndex = lines.findIndex((line) =>
|
||||
regex.test(line)
|
||||
);
|
||||
if (lineIndex !== -1) {
|
||||
console.log(
|
||||
` Line ${lineIndex + 1}: ${lines[lineIndex].trim()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
result,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Check custom types in diagram
|
||||
console.log(
|
||||
'\nCustom types in diagram:',
|
||||
diagram.customTypes?.length || 0
|
||||
);
|
||||
|
||||
// Check wizards table
|
||||
const wizardsTable = diagram.tables?.find((t) => t.name === 'wizards');
|
||||
if (wizardsTable) {
|
||||
console.log('\nWizards table:');
|
||||
const rankField = wizardsTable.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
if (rankField) {
|
||||
console.log(
|
||||
` rank field type: ${rankField.type.name} (id: ${rankField.type.id})`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check spellbooks table
|
||||
const spellbooksTable = diagram.tables?.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
if (spellbooksTable) {
|
||||
console.log('\nSpellbooks table:');
|
||||
const frequencyField = spellbooksTable.fields.find(
|
||||
(f) => f.name === 'cast_frequency'
|
||||
);
|
||||
if (frequencyField) {
|
||||
console.log(
|
||||
` cast_frequency field type: ${frequencyField.type.name}`
|
||||
);
|
||||
}
|
||||
|
||||
const schoolField = spellbooksTable.fields.find(
|
||||
(f) => f.name === 'primary_school'
|
||||
);
|
||||
if (schoolField) {
|
||||
console.log(
|
||||
` primary_school field type: ${schoolField.type.name}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Assertions
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
expect(diagram.customTypes).toHaveLength(5);
|
||||
|
||||
// Check that wizard_rank is present
|
||||
const wizardRankEnum = result.enums!.find(
|
||||
(e) => e.name === 'wizard_rank'
|
||||
);
|
||||
expect(wizardRankEnum).toBeDefined();
|
||||
|
||||
// Check that the rank field uses wizard_rank type
|
||||
if (wizardsTable) {
|
||||
const rankField = wizardsTable.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
expect(rankField?.type.name.toLowerCase()).toBe('wizard_rank');
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,64 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
import { convertToChartDBDiagram } from '../../../common';
|
||||
import { DatabaseType } from '@/lib/domain/database-type';
|
||||
|
||||
describe('Enum to Diagram Conversion', () => {
|
||||
it('should convert all enums and use them in table columns', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE wizard_rank AS ENUM ('apprentice', 'journeyman', 'master', 'archmage', 'legendary');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_school AS ENUM ('fire', 'water', 'both');
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID NOT NULL,
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
primary_school magic_school NOT NULL,
|
||||
rank wizard_rank DEFAULT 'apprentice',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);`;
|
||||
|
||||
// Parse SQL
|
||||
const parserResult = await fromPostgres(sql);
|
||||
|
||||
// Should find all 3 enums
|
||||
expect(parserResult.enums).toHaveLength(3);
|
||||
|
||||
// Convert to diagram
|
||||
const diagram = convertToChartDBDiagram(
|
||||
parserResult,
|
||||
DatabaseType.POSTGRESQL,
|
||||
DatabaseType.POSTGRESQL
|
||||
);
|
||||
|
||||
// Should have 3 custom types
|
||||
expect(diagram.customTypes).toHaveLength(3);
|
||||
|
||||
// Check spellbooks table
|
||||
const spellbooksTable = diagram.tables?.find(
|
||||
(t) => t.name === 'spellbooks'
|
||||
);
|
||||
expect(spellbooksTable).toBeDefined();
|
||||
|
||||
// Check that enum columns use the correct types
|
||||
const rankField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'rank'
|
||||
);
|
||||
expect(rankField).toBeDefined();
|
||||
expect(rankField!.type.name).toBe('wizard_rank');
|
||||
expect(rankField!.type.id).toBe('wizard_rank');
|
||||
|
||||
const frequencyField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'cast_frequency'
|
||||
);
|
||||
expect(frequencyField).toBeDefined();
|
||||
expect(frequencyField!.type.name).toBe('spell_frequency');
|
||||
|
||||
const schoolField = spellbooksTable!.fields.find(
|
||||
(f) => f.name === 'primary_school'
|
||||
);
|
||||
expect(schoolField).toBeDefined();
|
||||
expect(schoolField!.type.name).toBe('magic_school');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,133 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Enum Type Parsing', () => {
|
||||
it('should parse CREATE TYPE ENUM statements', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('pending', 'in_progress', 'completed');
|
||||
CREATE TYPE difficulty_level AS ENUM ('easy', 'medium', 'hard');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY,
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
status quest_status DEFAULT 'pending',
|
||||
difficulty difficulty_level NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Check that enum types were parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
// Check first enum
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'pending',
|
||||
'in_progress',
|
||||
'completed',
|
||||
]);
|
||||
|
||||
// Check second enum
|
||||
const difficultyLevel = result.enums!.find(
|
||||
(e) => e.name === 'difficulty_level'
|
||||
);
|
||||
expect(difficultyLevel).toBeDefined();
|
||||
expect(difficultyLevel!.values).toEqual(['easy', 'medium', 'hard']);
|
||||
|
||||
// Check that tables were parsed
|
||||
expect(result.tables).toHaveLength(2);
|
||||
|
||||
// Check that columns have the correct enum types
|
||||
const questsTable = result.tables.find((t) => t.name === 'quests');
|
||||
expect(questsTable).toBeDefined();
|
||||
|
||||
const statusColumn = questsTable!.columns.find(
|
||||
(c) => c.name === 'status'
|
||||
);
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn!.type.toLowerCase()).toBe('quest_status');
|
||||
|
||||
const difficultyColumn = questsTable!.columns.find(
|
||||
(c) => c.name === 'difficulty'
|
||||
);
|
||||
expect(difficultyColumn).toBeDefined();
|
||||
expect(difficultyColumn!.type.toLowerCase()).toBe('difficulty_level');
|
||||
});
|
||||
|
||||
it('should handle enum types with various quote styles', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quote_test AS ENUM ('single', "double", 'mixed"quotes');
|
||||
CREATE TYPE number_status AS ENUM ('1', '2', '3-inactive');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const quoteTest = result.enums!.find((e) => e.name === 'quote_test');
|
||||
expect(quoteTest).toBeDefined();
|
||||
expect(quoteTest!.values).toEqual(['single', 'double', 'mixed"quotes']);
|
||||
|
||||
const numberStatus = result.enums!.find(
|
||||
(e) => e.name === 'number_status'
|
||||
);
|
||||
expect(numberStatus).toBeDefined();
|
||||
expect(numberStatus!.values).toEqual(['1', '2', '3-inactive']);
|
||||
});
|
||||
|
||||
it('should handle enums with special characters and longer values', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE spell_status AS ENUM ('learning', 'mastered', 'forgotten', 'partially_learned', 'fully_mastered', 'forbidden', 'failed');
|
||||
CREATE TYPE portal_status AS ENUM ('inactive', 'charging', 'active', 'unstable', 'collapsed');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const spellStatus = result.enums!.find(
|
||||
(e) => e.name === 'spell_status'
|
||||
);
|
||||
expect(spellStatus).toBeDefined();
|
||||
expect(spellStatus!.values).toHaveLength(7);
|
||||
expect(spellStatus!.values).toContain('partially_learned');
|
||||
|
||||
const portalStatus = result.enums!.find(
|
||||
(e) => e.name === 'portal_status'
|
||||
);
|
||||
expect(portalStatus).toBeDefined();
|
||||
expect(portalStatus!.values).toHaveLength(5);
|
||||
expect(portalStatus!.values).toContain('collapsed');
|
||||
});
|
||||
|
||||
it('should include warning for unsupported CREATE TYPE statements', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE creature_status AS ENUM ('dormant', 'awakened');
|
||||
|
||||
CREATE TABLE creatures (
|
||||
id INTEGER PRIMARY KEY,
|
||||
status creature_status
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// With the updated parser, enum types don't generate warnings
|
||||
// Only non-enum custom types generate warnings
|
||||
|
||||
// But still parse the enum
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('creature_status');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Diagnostic tests for magical spell parsing cases', () => {
|
||||
it('should correctly parse spells table with Ancient Fire Blast descriptions', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Overall description of the spell, e.g., "Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Spells table result:', {
|
||||
tableCount: result.tables.length,
|
||||
columns: result.tables[0]?.columns.map((c) => ({
|
||||
name: c.name,
|
||||
type: c.type,
|
||||
})),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
const spellsTable = result.tables[0];
|
||||
expect(spellsTable.name).toBe('spells');
|
||||
|
||||
// Debug: list all columns found
|
||||
console.log('Columns found:', spellsTable.columns.length);
|
||||
spellsTable.columns.forEach((col, idx) => {
|
||||
console.log(` ${idx + 1}. ${col.name}: ${col.type}`);
|
||||
});
|
||||
|
||||
expect(spellsTable.columns).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should handle magical enum types with mixed quotes', async () => {
|
||||
const sql = `CREATE TYPE quote_test AS ENUM ('single', "double", 'mixed"quotes');`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Enum result:', {
|
||||
enumCount: result.enums?.length || 0,
|
||||
values: result.enums?.[0]?.values,
|
||||
});
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].values).toEqual([
|
||||
'single',
|
||||
'double',
|
||||
'mixed"quotes',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Test All 5 Enums', () => {
|
||||
it('should parse all 5 enum types', async () => {
|
||||
// Test with exact SQL from the file
|
||||
const sql = `
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
|
||||
CREATE TABLE spellbooks (
|
||||
id UUID PRIMARY KEY,
|
||||
status quest_status DEFAULT 'active',
|
||||
cast_frequency spell_frequency NOT NULL,
|
||||
cast_time magic_time NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Debug output
|
||||
console.log('Enums found:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Check all enum names
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
|
||||
// Check quest_status specifically
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,79 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL parser - CREATE EXTENSION and CREATE TYPE', () => {
|
||||
it('should handle CREATE EXTENSION and CREATE TYPE statements', async () => {
|
||||
const testSQL = `
|
||||
-- Enable UUID extension
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Create custom type for creature alignment
|
||||
CREATE TYPE creature_alignment AS ENUM ('lawful', 'neutral', 'chaotic');
|
||||
|
||||
-- Create a table that uses the custom type
|
||||
CREATE TABLE mystical_creatures (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
species VARCHAR(255) UNIQUE NOT NULL,
|
||||
alignment creature_alignment DEFAULT 'neutral',
|
||||
discovered_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create another custom type
|
||||
CREATE TYPE magic_school AS ENUM ('illusion', 'evocation', 'necromancy', 'divination');
|
||||
|
||||
-- Create a table with foreign key
|
||||
CREATE TABLE creature_abilities (
|
||||
id SERIAL PRIMARY KEY,
|
||||
creature_id UUID REFERENCES mystical_creatures(id),
|
||||
ability_name VARCHAR(255) NOT NULL,
|
||||
school magic_school DEFAULT 'evocation',
|
||||
is_innate BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
`;
|
||||
|
||||
console.log(
|
||||
'Testing PostgreSQL parser with CREATE EXTENSION and CREATE TYPE...\n'
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await fromPostgres(testSQL);
|
||||
|
||||
console.log('Parse successful!');
|
||||
console.log('\nTables found:', result.tables.length);
|
||||
result.tables.forEach((table) => {
|
||||
console.log(`\n- Table: ${table.name}`);
|
||||
console.log(' Columns:');
|
||||
table.columns.forEach((col) => {
|
||||
console.log(
|
||||
` - ${col.name}: ${col.type}${col.nullable ? '' : ' NOT NULL'}${col.primaryKey ? ' PRIMARY KEY' : ''}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
console.log('\nRelationships found:', result.relationships.length);
|
||||
result.relationships.forEach((rel) => {
|
||||
console.log(
|
||||
`- ${rel.sourceTable}.${rel.sourceColumn} -> ${rel.targetTable}.${rel.targetColumn}`
|
||||
);
|
||||
});
|
||||
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\nWarnings:');
|
||||
result.warnings.forEach((warning) => {
|
||||
console.log(`- ${warning}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Basic assertions
|
||||
expect(result.tables.length).toBe(2);
|
||||
expect(result.tables[0].name).toBe('mystical_creatures');
|
||||
expect(result.tables[1].name).toBe('creature_abilities');
|
||||
expect(result.relationships.length).toBe(1);
|
||||
} catch (error) {
|
||||
console.error('Error parsing SQL:', (error as Error).message);
|
||||
console.error('\nStack trace:', (error as Error).stack);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,203 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Debug Missing Junction Table', () => {
|
||||
it('should find quest_sample_rewards junction table in the quest management system', async () => {
|
||||
const sql = `-- Quest Management System Database with Junction Tables
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze'
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id)
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft'
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for quest template sample rewards
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active'
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id)
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL
|
||||
);`;
|
||||
|
||||
// First, verify the table exists in the SQL
|
||||
const tableExists = sql.includes('CREATE TABLE quest_sample_rewards');
|
||||
console.log('\nDebugging quest_sample_rewards:');
|
||||
console.log('- Table exists in SQL:', tableExists);
|
||||
|
||||
// Extract the specific table definition
|
||||
const tableMatch = sql.match(
|
||||
/-- Junction table[\s\S]*?CREATE TABLE quest_sample_rewards[\s\S]*?;/
|
||||
);
|
||||
if (tableMatch) {
|
||||
console.log('- Table definition found, first 200 chars:');
|
||||
console.log(tableMatch[0].substring(0, 200) + '...');
|
||||
}
|
||||
|
||||
// Now parse
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('\nParsing results:');
|
||||
console.log('- Total tables:', result.tables.length);
|
||||
console.log(
|
||||
'- Table names:',
|
||||
result.tables.map((t) => t.name).join(', ')
|
||||
);
|
||||
|
||||
// Look for quest_sample_rewards
|
||||
const questSampleRewards = result.tables.find(
|
||||
(t) => t.name === 'quest_sample_rewards'
|
||||
);
|
||||
console.log('- quest_sample_rewards found:', !!questSampleRewards);
|
||||
|
||||
if (!questSampleRewards) {
|
||||
// Check warnings for clues
|
||||
console.log('\nWarnings that might be relevant:');
|
||||
result.warnings?.forEach((w, i) => {
|
||||
if (
|
||||
w.includes('quest_sample_rewards') ||
|
||||
w.includes('Failed to parse')
|
||||
) {
|
||||
console.log(` ${i}: ${w}`);
|
||||
}
|
||||
});
|
||||
|
||||
// List all tables to see what's missing
|
||||
console.log('\nAll parsed tables:');
|
||||
result.tables.forEach((t, i) => {
|
||||
console.log(
|
||||
` ${i + 1}. ${t.name} (${t.columns.length} columns)`
|
||||
);
|
||||
});
|
||||
} else {
|
||||
console.log('\nquest_sample_rewards details:');
|
||||
console.log('- Columns:', questSampleRewards.columns.length);
|
||||
questSampleRewards.columns.forEach((c) => {
|
||||
console.log(` - ${c.name}: ${c.type}`);
|
||||
});
|
||||
}
|
||||
|
||||
// The test expectation
|
||||
expect(tableExists).toBe(true);
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(19); // At least 19 tables
|
||||
expect(questSampleRewards).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,56 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Relationships Debug', () => {
|
||||
it('should parse simple foreign key', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
);
|
||||
|
||||
CREATE TABLE towers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
expect(result.relationships[0].sourceTable).toBe('towers');
|
||||
expect(result.relationships[0].targetTable).toBe('wizards');
|
||||
});
|
||||
|
||||
it('should handle custom types and foreign keys', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'completed');
|
||||
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4()
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
status quest_status DEFAULT 'active'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log(
|
||||
'Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('Relationships:', result.relationships);
|
||||
console.log('Warnings:', result.warnings);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.relationships).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Junction Table Parsing - Spell Plans Database', () => {
|
||||
it('should parse all 3 tables (spell_plans, spells, plan_sample_spells) and 2 relationships', async () => {
|
||||
const sql = `-- Spell Plans Database with Enums and Junction Table
|
||||
CREATE TYPE casting_difficulty AS ENUM ('simple', 'moderate', 'complex', 'arcane', 'forbidden');
|
||||
CREATE TYPE magic_school AS ENUM ('elemental', 'healing', 'illusion', 'necromancy', 'transmutation');
|
||||
CREATE TYPE spell_range AS ENUM ('touch', 'short', 'medium', 'long', 'sight');
|
||||
CREATE TYPE component_type AS ENUM ('verbal', 'somatic', 'material', 'focus', 'divine');
|
||||
CREATE TYPE power_source AS ENUM ('arcane', 'divine', 'nature', 'psionic', 'primal');
|
||||
|
||||
CREATE TABLE spell_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty casting_difficulty NOT NULL,
|
||||
school magic_school NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
mana_cost INTEGER NOT NULL,
|
||||
cast_time VARCHAR(100),
|
||||
range spell_range NOT NULL,
|
||||
components component_type[] NOT NULL,
|
||||
power_source power_source NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Junction table for showing sample spells in a spell plan
|
||||
CREATE TABLE plan_sample_spells (
|
||||
spell_plan_id UUID NOT NULL REFERENCES spell_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Parsing results:');
|
||||
console.log(
|
||||
'- Tables:',
|
||||
result.tables.map((t) => t.name)
|
||||
);
|
||||
console.log('- Table count:', result.tables.length);
|
||||
console.log('- Relationships:', result.relationships.length);
|
||||
console.log('- Enums:', result.enums?.length || 0);
|
||||
|
||||
// Should have 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// Check table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'plan_sample_spells',
|
||||
'spell_plans',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Should have 2 relationships (both from plan_sample_spells)
|
||||
expect(result.relationships).toHaveLength(2);
|
||||
|
||||
// Check plan_sample_spells specifically
|
||||
const planSampleSpells = result.tables.find(
|
||||
(t) => t.name === 'plan_sample_spells'
|
||||
);
|
||||
expect(planSampleSpells).toBeDefined();
|
||||
expect(planSampleSpells!.columns).toHaveLength(2);
|
||||
|
||||
// Should have 5 enum types
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should parse the exact junction table definition', async () => {
|
||||
const sql = `
|
||||
-- Junction table for showing sample spells on a grimoire's page.
|
||||
CREATE TABLE grimoire_sample_spells (
|
||||
grimoire_plan_id UUID NOT NULL REFERENCES grimoire_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (grimoire_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('grimoire_sample_spells');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Invalid multi-line string in SQL', () => {
|
||||
it('should handle SQL with orphaned string literal', async () => {
|
||||
// This SQL has a syntax error - string literal on its own line
|
||||
const sql = `
|
||||
CREATE TABLE test_table (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Example description
|
||||
"This is an orphaned string"
|
||||
name VARCHAR(100)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Even with syntax error, it should try to parse what it can
|
||||
console.log('Result:', {
|
||||
tables: result.tables.length,
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
// Should attempt to parse the table even if parser fails
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it('should parse all tables even if one has syntax errors', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE table1 (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE table2 (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Example
|
||||
"Orphaned string"
|
||||
name VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE table3 (
|
||||
id UUID PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Multi-table result:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings?.length || 0,
|
||||
});
|
||||
|
||||
// Should parse at least table1 and table3
|
||||
expect(result.tables.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name);
|
||||
expect(tableNames).toContain('table1');
|
||||
expect(tableNames).toContain('table3');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,246 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Magical junction table parsing for wizard spell associations', () => {
|
||||
it('should parse the wizard-spell junction table for tracking spell knowledge', async () => {
|
||||
// Test with a junction table for spells and wizards
|
||||
const sql = `
|
||||
-- Junction table for tracking which wizards know which spells.
|
||||
CREATE TABLE wizard_spells (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Test results:', {
|
||||
tableCount: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings,
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_spells');
|
||||
});
|
||||
|
||||
it('should count all CREATE TABLE statements for magical entities in quest system', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
// Count CREATE TABLE statements
|
||||
const createTableMatches = sql.match(/CREATE TABLE/gi) || [];
|
||||
console.log(
|
||||
`\nFound ${createTableMatches.length} CREATE TABLE statements in file`
|
||||
);
|
||||
|
||||
// Find all table names
|
||||
const tableNameMatches =
|
||||
sql.match(
|
||||
/CREATE TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["']?(\w+)["']?/gi
|
||||
) || [];
|
||||
const tableNames = tableNameMatches
|
||||
.map((match) => {
|
||||
const nameMatch = match.match(
|
||||
/CREATE TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["']?(\w+)["']?/i
|
||||
);
|
||||
return nameMatch ? nameMatch[1] : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
console.log('Table names found in SQL:', tableNames);
|
||||
console.log(
|
||||
'quest_sample_rewards in list?',
|
||||
tableNames.includes('quest_sample_rewards')
|
||||
);
|
||||
|
||||
// Parse the file
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log(`\nParsed ${result.tables.length} tables`);
|
||||
console.log(
|
||||
'Parsed table names:',
|
||||
result.tables.map((t) => t.name).sort()
|
||||
);
|
||||
|
||||
const junctionTable = result.tables.find(
|
||||
(t) => t.name.includes('_') && t.columns.length >= 2
|
||||
);
|
||||
console.log('junction table found?', !!junctionTable);
|
||||
|
||||
// All CREATE TABLE statements should be parsed
|
||||
expect(result.tables.length).toBe(createTableMatches.length);
|
||||
expect(junctionTable).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,134 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('junction table parsing fix', () => {
|
||||
it('should parse table with single-line comment before CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- Junction table for tracking which wizards have learned which spells.
|
||||
CREATE TABLE wizard_spellbook (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (wizard_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('wizard_spellbook');
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
expect(result.tables[0].columns[0].name).toBe('wizard_id');
|
||||
expect(result.tables[0].columns[1].name).toBe('spell_id');
|
||||
});
|
||||
|
||||
it('should handle multiple tables with comments', async () => {
|
||||
const sql = `
|
||||
-- First table
|
||||
CREATE TABLE mages (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for tracking spellbook contents.
|
||||
CREATE TABLE mage_grimoires (
|
||||
mage_id UUID NOT NULL REFERENCES mages(id) ON DELETE CASCADE,
|
||||
grimoire_id UUID NOT NULL REFERENCES grimoires(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (mage_id, grimoire_id)
|
||||
);
|
||||
|
||||
-- Another table
|
||||
CREATE TABLE grimoires (
|
||||
id UUID PRIMARY KEY
|
||||
);
|
||||
|
||||
CREATE TABLE enchantments (
|
||||
id UUID PRIMARY KEY
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(4);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'enchantments',
|
||||
'grimoires',
|
||||
'mage_grimoires',
|
||||
'mages',
|
||||
]);
|
||||
|
||||
// Verify mage_grimoires specifically
|
||||
const mageGrimoires = result.tables.find(
|
||||
(t) => t.name === 'mage_grimoires'
|
||||
);
|
||||
expect(mageGrimoires).toBeDefined();
|
||||
expect(mageGrimoires?.columns).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle statements that start with comment but include CREATE TABLE', async () => {
|
||||
const sql = `
|
||||
-- This comment mentions CREATE TABLE artifacts in the comment
|
||||
-- but it's just a comment
|
||||
;
|
||||
-- This is the actual table
|
||||
CREATE TABLE mystical_artifacts (
|
||||
id INTEGER PRIMARY KEY
|
||||
);
|
||||
|
||||
-- Junction table for artifact_enchantments
|
||||
CREATE TABLE artifact_enchantments (
|
||||
artifact_id INTEGER,
|
||||
enchantment_id INTEGER
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(2);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifact_enchantments',
|
||||
'mystical_artifacts',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse all three tables including junction table', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_categories (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE arcane_spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(255) NOT NULL,
|
||||
power_level INTEGER DEFAULT 1,
|
||||
mana_cost INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for categorizing spells
|
||||
CREATE TABLE spell_categorization (
|
||||
category_id UUID NOT NULL REFERENCES spell_categories(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES arcane_spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (category_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'arcane_spells',
|
||||
'spell_categories',
|
||||
'spell_categorization',
|
||||
]);
|
||||
|
||||
// Check the junction table exists and has correct structure
|
||||
const spellCategorization = result.tables.find(
|
||||
(t) => t.name === 'spell_categorization'
|
||||
);
|
||||
expect(spellCategorization).toBeDefined();
|
||||
expect(spellCategorization!.columns).toHaveLength(2);
|
||||
expect(spellCategorization!.columns.map((c) => c.name).sort()).toEqual([
|
||||
'category_id',
|
||||
'spell_id',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,322 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Complex Database - Enchanted Bazaar', () => {
|
||||
it('should parse the complete magical marketplace database', async () => {
|
||||
const sql = `-- Enchanted Bazaar Database Schema
|
||||
-- A complex magical marketplace system with many enums and relationships
|
||||
|
||||
-- Enums for the magical marketplace
|
||||
CREATE TYPE wizard_status AS ENUM ('active', 'suspended', 'banned', 'inactive');
|
||||
CREATE TYPE spell_category AS ENUM ('attack', 'defense', 'utility', 'healing', 'summoning');
|
||||
CREATE TYPE artifact_rarity AS ENUM ('common', 'uncommon', 'rare', 'epic', 'legendary');
|
||||
CREATE TYPE shop_status AS ENUM ('open', 'closed', 'under_renovation', 'abandoned');
|
||||
CREATE TYPE transaction_status AS ENUM ('pending', 'completed', 'failed', 'refunded');
|
||||
CREATE TYPE payment_method AS ENUM ('gold', 'crystals', 'barter', 'credit', 'quest_reward');
|
||||
CREATE TYPE listing_status AS ENUM ('draft', 'active', 'sold', 'expired', 'removed');
|
||||
CREATE TYPE enchantment_type AS ENUM ('fire', 'ice', 'lightning', 'holy', 'dark');
|
||||
CREATE TYPE potion_effect AS ENUM ('healing', 'mana', 'strength', 'speed', 'invisibility');
|
||||
CREATE TYPE scroll_type AS ENUM ('spell', 'recipe', 'map', 'contract', 'prophecy');
|
||||
CREATE TYPE merchant_tier AS ENUM ('novice', 'apprentice', 'journeyman', 'master', 'grandmaster');
|
||||
CREATE TYPE review_rating AS ENUM ('terrible', 'poor', 'average', 'good', 'excellent');
|
||||
CREATE TYPE dispute_status AS ENUM ('open', 'investigating', 'resolved', 'escalated');
|
||||
CREATE TYPE delivery_method AS ENUM ('instant', 'owl', 'portal', 'courier', 'pickup');
|
||||
CREATE TYPE market_zone AS ENUM ('north', 'south', 'east', 'west', 'central');
|
||||
|
||||
-- Core tables
|
||||
CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
username VARCHAR(255) UNIQUE NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
status wizard_status DEFAULT 'active',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE spell_verifications (
|
||||
wizard_id UUID PRIMARY KEY REFERENCES wizards(id),
|
||||
verified_at TIMESTAMP NOT NULL,
|
||||
verification_level INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE realms (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
zone market_zone NOT NULL,
|
||||
magical_tax_rate DECIMAL(5,4) DEFAULT 0.0500
|
||||
);
|
||||
|
||||
CREATE TABLE sanctuaries (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
realm_id UUID REFERENCES realms(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
protection_level INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE magic_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
merchant_tier merchant_tier NOT NULL,
|
||||
monthly_fee INTEGER NOT NULL,
|
||||
listing_limit INTEGER DEFAULT 10
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_subscriptions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
plan_id UUID REFERENCES magic_plans(id),
|
||||
status transaction_status DEFAULT 'pending',
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE shops (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
realm_id UUID REFERENCES realms(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
status shop_status DEFAULT 'open',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE shop_sanctuaries (
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
sanctuary_id UUID REFERENCES sanctuaries(id),
|
||||
assigned_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (shop_id, sanctuary_id)
|
||||
);
|
||||
|
||||
CREATE TABLE artifact_categories (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
parent_id UUID REFERENCES artifact_categories(id),
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE enchantments (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
type enchantment_type NOT NULL,
|
||||
power_level INTEGER DEFAULT 1,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE listings (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
category_id UUID REFERENCES artifact_categories(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
price INTEGER NOT NULL,
|
||||
quantity INTEGER DEFAULT 1,
|
||||
rarity artifact_rarity DEFAULT 'common',
|
||||
status listing_status DEFAULT 'draft',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE listing_enchantments (
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
enchantment_id UUID REFERENCES enchantments(id),
|
||||
strength INTEGER DEFAULT 1,
|
||||
PRIMARY KEY (listing_id, enchantment_id)
|
||||
);
|
||||
|
||||
CREATE TABLE potions (
|
||||
listing_id UUID PRIMARY KEY REFERENCES listings(id),
|
||||
effect potion_effect NOT NULL,
|
||||
duration_minutes INTEGER DEFAULT 30,
|
||||
potency INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE scrolls (
|
||||
listing_id UUID PRIMARY KEY REFERENCES listings(id),
|
||||
type scroll_type NOT NULL,
|
||||
spell_category spell_category,
|
||||
uses_remaining INTEGER DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE TABLE transactions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
buyer_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
quantity INTEGER NOT NULL,
|
||||
total_price INTEGER NOT NULL,
|
||||
payment_method payment_method NOT NULL,
|
||||
status transaction_status DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
transaction_id UUID REFERENCES transactions(id),
|
||||
reviewer_id UUID REFERENCES wizards(id),
|
||||
rating review_rating NOT NULL,
|
||||
comment TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE disputes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
transaction_id UUID REFERENCES transactions(id),
|
||||
filed_by UUID REFERENCES wizards(id),
|
||||
reason TEXT NOT NULL,
|
||||
status dispute_status DEFAULT 'open',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE messages (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
sender_id UUID REFERENCES wizards(id),
|
||||
recipient_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
content TEXT NOT NULL,
|
||||
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE favorites (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, listing_id)
|
||||
);
|
||||
|
||||
CREATE TABLE shop_followers (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
shop_id UUID REFERENCES shops(id),
|
||||
followed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, shop_id)
|
||||
);
|
||||
|
||||
CREATE TABLE delivery_options (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
method delivery_method NOT NULL,
|
||||
cost INTEGER DEFAULT 0,
|
||||
estimated_time_hours INTEGER DEFAULT 24
|
||||
);
|
||||
|
||||
CREATE TABLE transaction_deliveries (
|
||||
transaction_id UUID PRIMARY KEY REFERENCES transactions(id),
|
||||
delivery_option_id UUID REFERENCES delivery_options(id),
|
||||
tracking_number VARCHAR(100),
|
||||
delivered_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_badges (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
icon_url VARCHAR(500)
|
||||
);
|
||||
|
||||
CREATE TABLE wizard_achievements (
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
badge_id UUID REFERENCES wizard_badges(id),
|
||||
earned_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (wizard_id, badge_id)
|
||||
);
|
||||
|
||||
CREATE TABLE market_analytics (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
view_count INTEGER DEFAULT 0,
|
||||
favorite_count INTEGER DEFAULT 0,
|
||||
last_viewed TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE price_history (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
listing_id UUID REFERENCES listings(id),
|
||||
old_price INTEGER NOT NULL,
|
||||
new_price INTEGER NOT NULL,
|
||||
changed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE audit_logs (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
wizard_id UUID REFERENCES wizards(id),
|
||||
action VARCHAR(100) NOT NULL,
|
||||
table_name VARCHAR(100),
|
||||
record_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
console.log('Parsing SQL...');
|
||||
const startTime = Date.now();
|
||||
const result = await fromPostgres(sql);
|
||||
const parseTime = Date.now() - startTime;
|
||||
|
||||
console.log(`Parse completed in ${parseTime}ms`);
|
||||
|
||||
// Expected counts
|
||||
const expectedTables = 27;
|
||||
const expectedEnums = 15;
|
||||
const minExpectedRelationships = 36; // Adjusted based on actual relationships in the schema
|
||||
|
||||
console.log('\n=== PARSING RESULTS ===');
|
||||
console.log(
|
||||
`Tables parsed: ${result.tables.length} (expected: ${expectedTables})`
|
||||
);
|
||||
console.log(
|
||||
`Enums parsed: ${result.enums?.length || 0} (expected: ${expectedEnums})`
|
||||
);
|
||||
console.log(
|
||||
`Relationships parsed: ${result.relationships.length} (expected min: ${minExpectedRelationships})`
|
||||
);
|
||||
console.log(`Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// List parsed tables
|
||||
console.log('\n=== TABLES PARSED ===');
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
tableNames.forEach((name) => console.log(`- ${name}`));
|
||||
|
||||
// List enums
|
||||
if (result.enums && result.enums.length > 0) {
|
||||
console.log('\n=== ENUMS PARSED ===');
|
||||
result.enums.forEach((e) => {
|
||||
console.log(`- ${e.name}: ${e.values.length} values`);
|
||||
});
|
||||
}
|
||||
|
||||
// Show warnings if any
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\n=== WARNINGS ===');
|
||||
result.warnings.forEach((w) => console.log(`- ${w}`));
|
||||
}
|
||||
|
||||
// Verify counts
|
||||
expect(result.tables).toHaveLength(expectedTables);
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(expectedEnums);
|
||||
expect(result.relationships.length).toBeGreaterThanOrEqual(
|
||||
minExpectedRelationships
|
||||
);
|
||||
|
||||
// Check specific tables exist
|
||||
const criticalTables = [
|
||||
'wizards',
|
||||
'shops',
|
||||
'listings',
|
||||
'transactions',
|
||||
'reviews',
|
||||
];
|
||||
criticalTables.forEach((tableName) => {
|
||||
const table = result.tables.find((t) => t.name === tableName);
|
||||
expect(table).toBeDefined();
|
||||
});
|
||||
|
||||
// Check junction tables
|
||||
const junctionTables = [
|
||||
'shop_sanctuaries',
|
||||
'listing_enchantments',
|
||||
'favorites',
|
||||
'shop_followers',
|
||||
'wizard_achievements',
|
||||
];
|
||||
junctionTables.forEach((tableName) => {
|
||||
const table = result.tables.find((t) => t.name === tableName);
|
||||
expect(table).toBeDefined();
|
||||
expect(table!.columns.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,66 @@
|
||||
import { describe, it } from 'vitest';
|
||||
|
||||
describe('node-sql-parser - CREATE TYPE handling', () => {
|
||||
it('should show exact parser error for CREATE TYPE', async () => {
|
||||
const { Parser } = await import('node-sql-parser');
|
||||
const parser = new Parser();
|
||||
const parserOpts = {
|
||||
database: 'PostgreSQL',
|
||||
};
|
||||
|
||||
console.log('\n=== Testing CREATE TYPE statement ===');
|
||||
const createTypeSQL = `CREATE TYPE spell_element AS ENUM ('fire', 'water', 'earth', 'air');`;
|
||||
|
||||
try {
|
||||
parser.astify(createTypeSQL, parserOpts);
|
||||
console.log('CREATE TYPE parsed successfully');
|
||||
} catch (error) {
|
||||
console.log('CREATE TYPE parse error:', (error as Error).message);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE EXTENSION statement ===');
|
||||
const createExtensionSQL = `CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`;
|
||||
|
||||
try {
|
||||
parser.astify(createExtensionSQL, parserOpts);
|
||||
console.log('CREATE EXTENSION parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE EXTENSION parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with custom type ===');
|
||||
const createTableWithTypeSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element spell_element DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableWithTypeSQL, parserOpts);
|
||||
console.log('CREATE TABLE with custom type parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with custom type parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
|
||||
console.log('\n=== Testing CREATE TABLE with standard types only ===');
|
||||
const createTableStandardSQL = `CREATE TABLE wizards (
|
||||
id UUID PRIMARY KEY,
|
||||
element VARCHAR(20) DEFAULT 'fire'
|
||||
);`;
|
||||
|
||||
try {
|
||||
parser.astify(createTableStandardSQL, parserOpts);
|
||||
console.log('CREATE TABLE with standard types parsed successfully');
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'CREATE TABLE with standard types parse error:',
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,61 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Minimal Type Test', () => {
|
||||
it('should handle CREATE EXTENSION, CREATE TYPE, and multi-line comments', async () => {
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
CREATE TYPE spell_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY,
|
||||
description TEXT, -- Overall description of the spell, e.g., "Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE rituals (
|
||||
id UUID PRIMARY KEY,
|
||||
day_of_week INTEGER NOT NULL, -- 1=Monday, 7=Sunday
|
||||
cast_time spell_time NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should parse tables
|
||||
expect(result.tables).toHaveLength(2);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'rituals',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Should have warnings about extension and type
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.some((w) => w.includes('Extension'))).toBe(
|
||||
true
|
||||
);
|
||||
// Enum types no longer generate warnings with the updated parser
|
||||
|
||||
// Check that the enum was parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe('spell_time');
|
||||
expect(result.enums![0].values).toEqual(['dawn', 'dusk', 'both']);
|
||||
|
||||
// Check that multi-line comments were handled
|
||||
const spellsTable = result.tables.find((t) => t.name === 'spells');
|
||||
expect(spellsTable).toBeDefined();
|
||||
expect(spellsTable!.columns).toHaveLength(3); // id, description, category
|
||||
|
||||
const ritualsTable = result.tables.find((t) => t.name === 'rituals');
|
||||
expect(ritualsTable).toBeDefined();
|
||||
expect(ritualsTable!.columns).toHaveLength(3); // id, day_of_week, cast_time
|
||||
|
||||
// Custom type should be preserved (possibly uppercase)
|
||||
const castTimeColumn = ritualsTable!.columns.find(
|
||||
(c) => c.name === 'cast_time'
|
||||
);
|
||||
expect(castTimeColumn).toBeDefined();
|
||||
expect(castTimeColumn!.type.toLowerCase()).toBe('spell_time');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Test All Five Enums', () => {
|
||||
it('should find all 5 enums from the exact SQL in the file', async () => {
|
||||
// Exact copy from the file
|
||||
const sql = `
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Check we got all 5
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Check each one exists
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle CREATE TYPE statements with semicolons on same line', async () => {
|
||||
// Test different formatting
|
||||
const sql = `CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Specifically check quest_status
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toHaveLength(5);
|
||||
expect(questStatus!.values).toContain('grace_period');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Table Count Validation', () => {
|
||||
it('should parse all CREATE TABLE statements without missing any', async () => {
|
||||
const sql = `
|
||||
-- Table 1 comment
|
||||
CREATE TABLE table1 (id INTEGER PRIMARY KEY);
|
||||
|
||||
/* Multi-line comment
|
||||
for table 2 */
|
||||
CREATE TABLE table2 (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS table3 (id INTEGER PRIMARY KEY);
|
||||
|
||||
-- Junction table
|
||||
CREATE TABLE table1_table2 (
|
||||
table1_id INTEGER REFERENCES table1(id),
|
||||
table2_id INTEGER REFERENCES table2(id),
|
||||
PRIMARY KEY (table1_id, table2_id)
|
||||
);
|
||||
|
||||
CREATE TABLE "quoted_table" (id INTEGER PRIMARY KEY);
|
||||
|
||||
CREATE TABLE schema1.table_with_schema (id INTEGER PRIMARY KEY);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Count CREATE TABLE statements in the SQL
|
||||
const createTableCount = (sql.match(/CREATE TABLE/gi) || []).length;
|
||||
|
||||
console.log(`\nValidation:`);
|
||||
console.log(`- CREATE TABLE statements in SQL: ${createTableCount}`);
|
||||
console.log(`- Tables parsed: ${result.tables.length}`);
|
||||
console.log(
|
||||
`- Table names: ${result.tables.map((t) => t.name).join(', ')}`
|
||||
);
|
||||
|
||||
// All CREATE TABLE statements should result in a parsed table
|
||||
expect(result.tables).toHaveLength(createTableCount);
|
||||
|
||||
// Verify specific tables
|
||||
const expectedTables = [
|
||||
'table1',
|
||||
'table2',
|
||||
'table3',
|
||||
'table1_table2',
|
||||
'quoted_table',
|
||||
'table_with_schema',
|
||||
];
|
||||
const actualTables = result.tables.map((t) => t.name).sort();
|
||||
expect(actualTables).toEqual(expectedTables.sort());
|
||||
});
|
||||
|
||||
it('should handle edge cases that might cause tables to be missed', async () => {
|
||||
const sql = `
|
||||
-- This tests various edge cases
|
||||
|
||||
-- 1. Table with only foreign key columns (no regular columns)
|
||||
CREATE TABLE only_fks (
|
||||
user_id UUID REFERENCES users(id),
|
||||
role_id UUID REFERENCES roles(id),
|
||||
PRIMARY KEY (user_id, role_id)
|
||||
);
|
||||
|
||||
-- 2. Table with no PRIMARY KEY
|
||||
CREATE TABLE no_pk (
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- 3. Empty table (pathological case)
|
||||
CREATE TABLE empty_table ();
|
||||
|
||||
-- 4. Table with complex constraints
|
||||
CREATE TABLE complex_constraints (
|
||||
id INTEGER,
|
||||
CONSTRAINT pk_complex PRIMARY KEY (id),
|
||||
CONSTRAINT chk_positive CHECK (id > 0)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
const createTableCount = (sql.match(/CREATE TABLE/gi) || []).length;
|
||||
|
||||
console.log(`\nEdge case validation:`);
|
||||
console.log(`- CREATE TABLE statements: ${createTableCount}`);
|
||||
console.log(`- Tables parsed: ${result.tables.length}`);
|
||||
console.log(
|
||||
`- Expected tables: only_fks, no_pk, empty_table, complex_constraints`
|
||||
);
|
||||
console.log(
|
||||
`- Actual tables: ${result.tables.map((t) => t.name).join(', ')}`
|
||||
);
|
||||
result.tables.forEach((t) => {
|
||||
console.log(`- ${t.name}: ${t.columns.length} columns`);
|
||||
});
|
||||
|
||||
// Even edge cases should be parsed
|
||||
expect(result.tables).toHaveLength(createTableCount);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,258 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Quest Management Database', () => {
|
||||
it('should parse the magical quest management database', async () => {
|
||||
const sql = `-- Quest Management System Database
|
||||
-- Enable UUID extension
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Type definitions
|
||||
CREATE TYPE quest_status AS ENUM ('draft', 'active', 'on_hold', 'completed', 'abandoned');
|
||||
CREATE TYPE difficulty_level AS ENUM ('novice', 'apprentice', 'journeyman', 'expert', 'master');
|
||||
CREATE TYPE reward_type AS ENUM ('gold', 'item', 'experience', 'reputation', 'special');
|
||||
CREATE TYPE adventurer_rank AS ENUM ('bronze', 'silver', 'gold', 'platinum', 'legendary');
|
||||
CREATE TYPE region_climate AS ENUM ('temperate', 'arctic', 'desert', 'tropical', 'magical');
|
||||
|
||||
CREATE TABLE adventurers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
rank adventurer_rank DEFAULT 'bronze',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_masters (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
specialization VARCHAR(100),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE regions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
climate region_climate NOT NULL,
|
||||
danger_level INTEGER CHECK (danger_level BETWEEN 1 AND 10),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE outposts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
location_coordinates POINT,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE scouts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
outpost_id UUID REFERENCES outposts(id),
|
||||
scouting_range INTEGER DEFAULT 50,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE scout_region_assignments (
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
region_id UUID REFERENCES regions(id),
|
||||
assigned_date DATE NOT NULL,
|
||||
PRIMARY KEY (scout_id, region_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_givers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(100),
|
||||
location VARCHAR(255),
|
||||
reputation_required INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_templates (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
difficulty difficulty_level NOT NULL,
|
||||
base_reward_gold INTEGER DEFAULT 0,
|
||||
quest_giver_id UUID REFERENCES quest_givers(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quests (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
status quest_status DEFAULT 'draft',
|
||||
reward_multiplier DECIMAL(3,2) DEFAULT 1.0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE rewards (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
reward_type reward_type NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_sample_rewards (
|
||||
quest_template_id UUID REFERENCES quest_templates(id),
|
||||
reward_id UUID REFERENCES rewards(id),
|
||||
PRIMARY KEY (quest_template_id, reward_id)
|
||||
);
|
||||
|
||||
CREATE TABLE quest_rotations (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
rotation_name VARCHAR(100) NOT NULL,
|
||||
start_date DATE NOT NULL,
|
||||
end_date DATE NOT NULL,
|
||||
is_active BOOLEAN DEFAULT false,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE rotation_quests (
|
||||
rotation_id UUID REFERENCES quest_rotations(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
day_of_week INTEGER CHECK (day_of_week BETWEEN 1 AND 7),
|
||||
PRIMARY KEY (rotation_id, quest_id, day_of_week)
|
||||
);
|
||||
|
||||
CREATE TABLE contracts (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
status quest_status DEFAULT 'active',
|
||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
CREATE TABLE completion_events (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
scout_id UUID REFERENCES scouts(id),
|
||||
verification_notes TEXT,
|
||||
event_timestamp TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE bounties (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
amount_gold INTEGER NOT NULL,
|
||||
payment_status VARCHAR(50) DEFAULT 'pending',
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_ledgers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
entry_type VARCHAR(50) NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
balance_after INTEGER NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE reputation_logs (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
adventurer_id UUID REFERENCES adventurers(id),
|
||||
quest_id UUID REFERENCES quests(id),
|
||||
reputation_change INTEGER NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE quest_suspensions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
contract_id UUID REFERENCES contracts(id),
|
||||
suspension_date DATE NOT NULL,
|
||||
reason VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE guild_master_actions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
guild_master_id UUID REFERENCES guild_masters(id),
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
target_table VARCHAR(100),
|
||||
target_id UUID,
|
||||
details JSONB,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should parse tables despite extensions and custom types
|
||||
expect(result.tables.length).toBeGreaterThan(0);
|
||||
|
||||
// Should have warnings about unsupported features
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(
|
||||
result.warnings!.some(
|
||||
(w) => w.includes('Extension') || w.includes('type')
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Should have parsed all 20 tables
|
||||
expect(result.tables).toHaveLength(20);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
const expectedTables = [
|
||||
'adventurers',
|
||||
'guild_masters',
|
||||
'regions',
|
||||
'outposts',
|
||||
'scouts',
|
||||
'scout_region_assignments',
|
||||
'quest_givers',
|
||||
'quest_templates',
|
||||
'quests',
|
||||
'rewards',
|
||||
'quest_sample_rewards',
|
||||
'quest_rotations',
|
||||
'rotation_quests',
|
||||
'contracts',
|
||||
'completion_events',
|
||||
'bounties',
|
||||
'guild_ledgers',
|
||||
'reputation_logs',
|
||||
'quest_suspensions',
|
||||
'guild_master_actions',
|
||||
];
|
||||
expect(tableNames).toEqual(expectedTables.sort());
|
||||
|
||||
// Check that enum types were parsed
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums!.length).toBe(5);
|
||||
|
||||
// Check specific enums
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.values).toEqual([
|
||||
'draft',
|
||||
'active',
|
||||
'on_hold',
|
||||
'completed',
|
||||
'abandoned',
|
||||
]);
|
||||
|
||||
// Check that custom enum types are handled in columns
|
||||
const contractsTable = result.tables.find(
|
||||
(t) => t.name === 'contracts'
|
||||
);
|
||||
expect(contractsTable).toBeDefined();
|
||||
const statusColumn = contractsTable!.columns.find(
|
||||
(c) => c.name === 'status'
|
||||
);
|
||||
expect(statusColumn).toBeDefined();
|
||||
expect(statusColumn?.type).toMatch(/quest_status/i);
|
||||
|
||||
// Verify foreign keys are still extracted
|
||||
if (result.tables.length > 3) {
|
||||
expect(result.relationships.length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,69 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Missing quest_status Bug - Magical Quest Management System', () => {
|
||||
it('should parse all 5 magical enums including quest_status for adventurer tracking', async () => {
|
||||
// Exact content from the file
|
||||
const sql = `
|
||||
-- ##################################################
|
||||
-- # TYPE DEFINITIONS
|
||||
-- ##################################################
|
||||
|
||||
-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
console.log('Testing with fromPostgres...');
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log(
|
||||
'Enums found:',
|
||||
result.enums?.map((e) => e.name)
|
||||
);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
// Specifically check for quest_status
|
||||
const questStatus = result.enums!.find(
|
||||
(e) => e.name === 'quest_status'
|
||||
);
|
||||
expect(questStatus).toBeDefined();
|
||||
expect(questStatus!.name).toBe('quest_status');
|
||||
expect(questStatus!.values).toEqual([
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should also work with the improved parser for magical quest and spell enums', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
|
||||
const enumNames = result.enums!.map((e) => e.name).sort();
|
||||
expect(enumNames).toEqual([
|
||||
'magic_time',
|
||||
'mana_status',
|
||||
'quest_status',
|
||||
'ritual_status',
|
||||
'spell_frequency',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,142 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Real-world PostgreSQL import examples', () => {
|
||||
it('should successfully parse a complex real-world schema with enums', async () => {
|
||||
// This example demonstrates how the parser handles real-world PostgreSQL exports
|
||||
// that may contain schema-qualified identifiers and syntax variations
|
||||
const sql = `
|
||||
-- Example of a real PostgreSQL database export with schema-qualified types
|
||||
CREATE TYPE "public"."mage_rank" AS ENUM('novice', 'apprentice', 'journeyman', 'expert', 'master', 'archmage');
|
||||
CREATE TYPE "public"."spell_category" AS ENUM('combat', 'healing', 'utility', 'summoning', 'enchantment');
|
||||
CREATE TYPE "public"."artifact_quality" AS ENUM('crude', 'common', 'fine', 'exceptional', 'masterwork', 'legendary');
|
||||
|
||||
-- Tables with proper spacing in column definitions
|
||||
CREATE TABLE "mages" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"email" text NOT NULL,
|
||||
"rank" "mage_rank" DEFAULT 'novice' NOT NULL,
|
||||
"specialization" "spell_category",
|
||||
"created_at" timestamp with time zone NOT NULL,
|
||||
"updated_at" timestamp with time zone NOT NULL,
|
||||
CONSTRAINT "mages_email_unique" UNIQUE("email")
|
||||
);
|
||||
|
||||
-- Example of a table with missing spaces (common in some exports)
|
||||
CREATE TABLE "grimoires" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"mage_id" text NOT NULL,
|
||||
"title" varchar(255) NOT NULL,
|
||||
"category""spell_category" NOT NULL,
|
||||
"quality""artifact_quality" DEFAULT 'common' NOT NULL,
|
||||
"pages" integer DEFAULT 100 NOT NULL,
|
||||
"created_at" timestamp DEFAULT now()
|
||||
);
|
||||
|
||||
-- Table with JSON syntax issues (: :jsonb instead of ::jsonb)
|
||||
CREATE TABLE "spell_components" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"spell_id" uuid NOT NULL,
|
||||
"component_name" text NOT NULL,
|
||||
"quantity" integer DEFAULT 1,
|
||||
"properties" jsonb DEFAULT '{}': :jsonb,
|
||||
"created_at" timestamp DEFAULT now()
|
||||
);
|
||||
|
||||
-- Foreign key constraints using schema-qualified references
|
||||
ALTER TABLE "grimoires" ADD CONSTRAINT "grimoires_mage_id_mages_id_fk"
|
||||
FOREIGN KEY ("mage_id") REFERENCES "public"."mages"("id") ON DELETE cascade;
|
||||
|
||||
-- Indexes
|
||||
CREATE UNIQUE INDEX "mages_rank_email_idx" ON "mages" ("rank", "email");
|
||||
CREATE INDEX "grimoires_category_idx" ON "grimoires" ("category");
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Verify enum parsing
|
||||
console.log('\n=== IMPORT RESULTS ===');
|
||||
console.log(`Enums parsed: ${result.enums?.length || 0}`);
|
||||
console.log(`Tables parsed: ${result.tables.length}`);
|
||||
console.log(`Relationships found: ${result.relationships.length}`);
|
||||
console.log(`Warnings: ${result.warnings?.length || 0}`);
|
||||
|
||||
// All enums should be parsed despite schema qualification
|
||||
expect(result.enums).toHaveLength(3);
|
||||
expect(result.enums?.map((e) => e.name).sort()).toEqual([
|
||||
'artifact_quality',
|
||||
'mage_rank',
|
||||
'spell_category',
|
||||
]);
|
||||
|
||||
// All tables should be parsed, even with syntax issues
|
||||
expect(result.tables).toHaveLength(3);
|
||||
expect(result.tables.map((t) => t.name).sort()).toEqual([
|
||||
'grimoires',
|
||||
'mages',
|
||||
'spell_components',
|
||||
]);
|
||||
|
||||
// Foreign keys should be recognized
|
||||
expect(result.relationships.length).toBeGreaterThan(0);
|
||||
const fk = result.relationships.find(
|
||||
(r) => r.sourceTable === 'grimoires' && r.targetTable === 'mages'
|
||||
);
|
||||
expect(fk).toBeDefined();
|
||||
|
||||
// Note: Index parsing may not be fully implemented in the current parser
|
||||
// This is acceptable as the main focus is on tables, enums, and relationships
|
||||
|
||||
// Check specific enum values
|
||||
const mageRank = result.enums?.find((e) => e.name === 'mage_rank');
|
||||
expect(mageRank?.values).toEqual([
|
||||
'novice',
|
||||
'apprentice',
|
||||
'journeyman',
|
||||
'expert',
|
||||
'master',
|
||||
'archmage',
|
||||
]);
|
||||
|
||||
// Log warnings for visibility
|
||||
if (result.warnings && result.warnings.length > 0) {
|
||||
console.log('\n=== WARNINGS ===');
|
||||
result.warnings.forEach((w) => console.log(`- ${w}`));
|
||||
}
|
||||
});
|
||||
|
||||
it('should provide actionable feedback for common syntax issues', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."potion_effect" AS ENUM('healing', 'mana', 'strength', 'speed');
|
||||
|
||||
CREATE TABLE "potions" (
|
||||
"id" uuid PRIMARY KEY,
|
||||
"name" text NOT NULL,
|
||||
"effect""potion_effect" NOT NULL,
|
||||
"duration" interval DEFAULT '30 minutes': :interval,
|
||||
"power" integer DEFAULT 50
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Enum should still be parsed
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('potion_effect');
|
||||
|
||||
// Table should be parsed despite issues
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].name).toBe('potions');
|
||||
|
||||
// Should have warnings about parsing issues
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBeGreaterThan(0);
|
||||
|
||||
// The warning should indicate which statement failed
|
||||
const hasParseWarning = result.warnings!.some(
|
||||
(w) =>
|
||||
w.includes('Failed to parse statement') && w.includes('potions')
|
||||
);
|
||||
expect(hasParseWarning).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,71 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Schema-qualified enum parsing', () => {
|
||||
it('should parse enums with schema prefix', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."wizard_rank" AS ENUM('apprentice', 'journeyman', 'master', 'grandmaster');
|
||||
CREATE TYPE "public"."spell_school" AS ENUM('fire', 'water', 'earth', 'air', 'spirit');
|
||||
|
||||
CREATE TABLE "wizards" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"rank" "wizard_rank" DEFAULT 'apprentice' NOT NULL,
|
||||
"primary_school" "spell_school" NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Enums found:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}: ${e.values.join(', ')}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Should find both enums
|
||||
expect(result.enums).toHaveLength(2);
|
||||
|
||||
const wizardRank = result.enums?.find((e) => e.name === 'wizard_rank');
|
||||
expect(wizardRank).toBeDefined();
|
||||
expect(wizardRank?.values).toEqual([
|
||||
'apprentice',
|
||||
'journeyman',
|
||||
'master',
|
||||
'grandmaster',
|
||||
]);
|
||||
|
||||
const spellSchool = result.enums?.find(
|
||||
(e) => e.name === 'spell_school'
|
||||
);
|
||||
expect(spellSchool).toBeDefined();
|
||||
expect(spellSchool?.values).toEqual([
|
||||
'fire',
|
||||
'water',
|
||||
'earth',
|
||||
'air',
|
||||
'spirit',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle missing spaces between column name and type', async () => {
|
||||
const sql = `
|
||||
CREATE TYPE "public"."dragon_type" AS ENUM('fire', 'ice', 'storm', 'earth');
|
||||
|
||||
CREATE TABLE "dragons" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type""dragon_type" DEFAULT 'fire' NOT NULL
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should still parse the enum
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums?.[0].name).toBe('dragon_type');
|
||||
|
||||
// Table parsing might fail due to syntax error
|
||||
console.log('Tables found:', result.tables.length);
|
||||
console.log('Warnings:', result.warnings);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,60 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Simple Enum Test', () => {
|
||||
it('should parse 5 simple enum types', async () => {
|
||||
// Test with just the enum definitions
|
||||
const sql = `
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Result enums:', result.enums?.length || 0);
|
||||
if (result.enums) {
|
||||
result.enums.forEach((e) => {
|
||||
console.log(` - ${e.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should parse enums one by one', async () => {
|
||||
const enums = [
|
||||
{
|
||||
sql: "CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');",
|
||||
name: 'quest_status',
|
||||
values: [
|
||||
'active',
|
||||
'paused',
|
||||
'grace_period',
|
||||
'expired',
|
||||
'completed',
|
||||
],
|
||||
},
|
||||
{
|
||||
sql: "CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');",
|
||||
name: 'spell_frequency',
|
||||
values: ['daily', 'weekly'],
|
||||
},
|
||||
];
|
||||
|
||||
for (const enumDef of enums) {
|
||||
const result = await fromPostgres(enumDef.sql);
|
||||
|
||||
console.log(`\nTesting ${enumDef.name}:`);
|
||||
console.log(` Found enums: ${result.enums?.length || 0}`);
|
||||
|
||||
expect(result.enums).toBeDefined();
|
||||
expect(result.enums).toHaveLength(1);
|
||||
expect(result.enums![0].name).toBe(enumDef.name);
|
||||
expect(result.enums![0].values).toEqual(enumDef.values);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,110 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Junction Table Parsing', () => {
|
||||
it('should parse junction table with composite primary key', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_books (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
title VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
incantation VARCHAR(100) NOT NULL
|
||||
);
|
||||
|
||||
-- Junction table for tracking which spells are contained in which books.
|
||||
CREATE TABLE book_spells (
|
||||
spell_book_id UUID NOT NULL REFERENCES spell_books(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_book_id, spell_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
// Should parse all 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual(['book_spells', 'spell_books', 'spells']);
|
||||
|
||||
// Check book_spells specifically
|
||||
const bookSpells = result.tables.find((t) => t.name === 'book_spells');
|
||||
expect(bookSpells).toBeDefined();
|
||||
expect(bookSpells!.columns).toHaveLength(2);
|
||||
|
||||
const columnNames = bookSpells!.columns.map((c) => c.name).sort();
|
||||
expect(columnNames).toEqual(['spell_book_id', 'spell_id']);
|
||||
|
||||
// Check that both columns are recognized as foreign keys
|
||||
const spellBookIdColumn = bookSpells!.columns.find(
|
||||
(c) => c.name === 'spell_book_id'
|
||||
);
|
||||
expect(spellBookIdColumn).toBeDefined();
|
||||
expect(spellBookIdColumn!.type).toBe('UUID');
|
||||
expect(spellBookIdColumn!.nullable).toBe(false);
|
||||
|
||||
const spellIdColumn = bookSpells!.columns.find(
|
||||
(c) => c.name === 'spell_id'
|
||||
);
|
||||
expect(spellIdColumn).toBeDefined();
|
||||
expect(spellIdColumn!.type).toBe('UUID');
|
||||
expect(spellIdColumn!.nullable).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle various junction table formats', async () => {
|
||||
const sql = `
|
||||
-- Format 1: Inline references
|
||||
CREATE TABLE artifact_enchantments (
|
||||
artifact_id INTEGER NOT NULL REFERENCES artifacts(id),
|
||||
enchantment_id INTEGER NOT NULL REFERENCES enchantments(id),
|
||||
PRIMARY KEY (artifact_id, enchantment_id)
|
||||
);
|
||||
|
||||
-- Format 2: With additional columns
|
||||
CREATE TABLE wizard_guilds (
|
||||
wizard_id UUID NOT NULL REFERENCES wizards(id),
|
||||
guild_id UUID NOT NULL REFERENCES guilds(id),
|
||||
joined_at TIMESTAMP DEFAULT NOW(),
|
||||
recruited_by UUID REFERENCES wizards(id),
|
||||
PRIMARY KEY (wizard_id, guild_id)
|
||||
);
|
||||
|
||||
-- Format 3: With named constraint
|
||||
CREATE TABLE potion_ingredients (
|
||||
potion_id BIGINT NOT NULL REFERENCES potions(id) ON DELETE CASCADE,
|
||||
ingredient_id BIGINT NOT NULL REFERENCES ingredients(id) ON DELETE CASCADE,
|
||||
quantity INTEGER DEFAULT 1,
|
||||
CONSTRAINT pk_potion_ingredients PRIMARY KEY (potion_id, ingredient_id)
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// All tables should be found
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'artifact_enchantments',
|
||||
'potion_ingredients',
|
||||
'wizard_guilds',
|
||||
]);
|
||||
|
||||
// Check each table has the expected columns
|
||||
const artifactEnchantments = result.tables.find(
|
||||
(t) => t.name === 'artifact_enchantments'
|
||||
);
|
||||
expect(artifactEnchantments!.columns).toHaveLength(2);
|
||||
|
||||
const wizardGuilds = result.tables.find(
|
||||
(t) => t.name === 'wizard_guilds'
|
||||
);
|
||||
expect(wizardGuilds!.columns).toHaveLength(4); // Including joined_at and recruited_by
|
||||
|
||||
const potionIngredients = result.tables.find(
|
||||
(t) => t.name === 'potion_ingredients'
|
||||
);
|
||||
expect(potionIngredients!.columns).toHaveLength(3); // Including quantity
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,75 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('Exact forth example reproduction - Spell Plans Database', () => {
|
||||
it('should parse the exact SQL from forth example with spell plans and magical components', async () => {
|
||||
// Exact copy of the SQL that's failing
|
||||
const sql = `-- Using ENUM types for fixed sets of values improves data integrity.
|
||||
CREATE TYPE quest_status AS ENUM ('active', 'paused', 'grace_period', 'expired', 'completed');
|
||||
CREATE TYPE spell_frequency AS ENUM ('daily', 'weekly');
|
||||
CREATE TYPE magic_time AS ENUM ('dawn', 'dusk', 'both');
|
||||
CREATE TYPE ritual_status AS ENUM ('pending', 'channeling', 'completed', 'failed', 'skipped');
|
||||
CREATE TYPE mana_status AS ENUM ('pending', 'charged', 'depleted');
|
||||
|
||||
CREATE TABLE spell_plans (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
duration_days INTEGER NOT NULL,
|
||||
total_skips INTEGER NOT NULL,
|
||||
validity_days INTEGER NOT NULL,
|
||||
mana_cost INTEGER NOT NULL,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE spells (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
wizard_tower_id UUID NOT NULL REFERENCES wizard_towers(id),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT, -- Overall description of the spell, e.g.,"Ancient Fire Blast"
|
||||
category VARCHAR(50) NOT NULL, -- combat, healing
|
||||
-- Structured breakdown of the spell's components.
|
||||
-- Example: [{"name": "Dragon Scale", "category": "Reagent"}, {"name": "Phoenix Feather", "category": "Catalyst"} ]
|
||||
components JSONB,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Junction table for showing sample spells on a plan's grimoire page.
|
||||
CREATE TABLE plan_sample_spells (
|
||||
spell_plan_id UUID NOT NULL REFERENCES spell_plans(id) ON DELETE CASCADE,
|
||||
spell_id UUID NOT NULL REFERENCES spells(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (spell_plan_id, spell_id)
|
||||
);`;
|
||||
|
||||
console.log('Testing exact SQL from forth example...');
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('Results:', {
|
||||
tables: result.tables.length,
|
||||
tableNames: result.tables.map((t) => t.name),
|
||||
warnings: result.warnings?.length || 0,
|
||||
});
|
||||
|
||||
// Should have 3 tables
|
||||
expect(result.tables).toHaveLength(3);
|
||||
|
||||
// Check all table names
|
||||
const tableNames = result.tables.map((t) => t.name).sort();
|
||||
expect(tableNames).toEqual([
|
||||
'plan_sample_spells',
|
||||
'spell_plans',
|
||||
'spells',
|
||||
]);
|
||||
|
||||
// Verify plan_sample_spells exists
|
||||
const planSampleSpells = result.tables.find(
|
||||
(t) => t.name === 'plan_sample_spells'
|
||||
);
|
||||
expect(planSampleSpells).toBeDefined();
|
||||
expect(planSampleSpells!.columns).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,162 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { DatabaseType } from '@/lib/domain';
|
||||
import { validateSQL } from '../../../sql-validator';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('PostgreSQL Import - Split DECIMAL Handling', () => {
|
||||
it('should successfully import tables with split DECIMAL declarations using auto-fix', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE financial_records (
|
||||
id SERIAL PRIMARY KEY,
|
||||
account_balance DECIMAL(15,
|
||||
2) NOT NULL,
|
||||
interest_rate NUMERIC(5,
|
||||
4) DEFAULT 0.0000,
|
||||
transaction_fee DECIMAL(10,
|
||||
2) DEFAULT 0.00
|
||||
);
|
||||
|
||||
CREATE TABLE market_data (
|
||||
id INTEGER PRIMARY KEY,
|
||||
price DECIMAL(18,
|
||||
8) NOT NULL,
|
||||
volume NUMERIC(20,
|
||||
0) NOT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
const validationResult = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
// Validation should detect issues but provide auto-fix
|
||||
expect(validationResult.isValid).toBe(false);
|
||||
expect(validationResult.fixedSQL).toBeDefined();
|
||||
|
||||
// Parse the fixed SQL
|
||||
const diagramResult = await fromPostgres(validationResult.fixedSQL!);
|
||||
|
||||
expect(diagramResult).toBeDefined();
|
||||
expect(diagramResult?.tables).toHaveLength(2);
|
||||
|
||||
// Check first table
|
||||
const financialTable = diagramResult?.tables.find(
|
||||
(t) => t.name === 'financial_records'
|
||||
);
|
||||
expect(financialTable).toBeDefined();
|
||||
expect(financialTable?.columns).toHaveLength(4);
|
||||
|
||||
// Check that DECIMAL columns were parsed correctly
|
||||
const balanceColumn = financialTable?.columns.find(
|
||||
(c) => c.name === 'account_balance'
|
||||
);
|
||||
expect(balanceColumn?.type).toMatch(/DECIMAL|NUMERIC/i);
|
||||
|
||||
const interestColumn = financialTable?.columns.find(
|
||||
(c) => c.name === 'interest_rate'
|
||||
);
|
||||
expect(interestColumn?.type).toMatch(/DECIMAL|NUMERIC/i);
|
||||
|
||||
// Check second table
|
||||
const marketTable = diagramResult?.tables.find(
|
||||
(t) => t.name === 'market_data'
|
||||
);
|
||||
expect(marketTable).toBeDefined();
|
||||
expect(marketTable?.columns).toHaveLength(3);
|
||||
|
||||
// Verify warnings about auto-fix
|
||||
expect(validationResult.warnings).toBeDefined();
|
||||
expect(
|
||||
validationResult.warnings?.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle complex SQL with multiple issues including split DECIMAL', async () => {
|
||||
const sql = `
|
||||
-- Financial system with various data types
|
||||
CREATE TABLE accounts (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
balance DECIMAL(20,
|
||||
2) NOT NULL DEFAULT 0.00,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Query with cast operator issues
|
||||
SELECT
|
||||
id: :text AS account_id,
|
||||
balance: :DECIMAL(10,
|
||||
2) AS rounded_balance
|
||||
FROM accounts;
|
||||
|
||||
CREATE TABLE transactions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
account_id UUID REFERENCES accounts(id),
|
||||
amount DECIMAL(15,
|
||||
2) NOT NULL,
|
||||
fee NUMERIC(10,
|
||||
4) DEFAULT 0.0000
|
||||
);
|
||||
`;
|
||||
|
||||
const validationResult = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
// Validation should detect issues but provide auto-fix
|
||||
expect(validationResult.isValid).toBe(false);
|
||||
expect(validationResult.fixedSQL).toBeDefined();
|
||||
|
||||
// Parse the fixed SQL
|
||||
const diagramResult = await fromPostgres(validationResult.fixedSQL!);
|
||||
|
||||
expect(diagramResult).toBeDefined();
|
||||
expect(diagramResult?.tables).toHaveLength(2);
|
||||
|
||||
// Verify both types of fixes were applied
|
||||
expect(validationResult?.warnings).toBeDefined();
|
||||
expect(
|
||||
validationResult?.warnings?.some((w) =>
|
||||
w.message.includes('Auto-fixed cast operator')
|
||||
)
|
||||
).toBe(true);
|
||||
expect(
|
||||
validationResult?.warnings?.some((w) =>
|
||||
w.message.includes('Auto-fixed split DECIMAL/NUMERIC')
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// Check foreign key relationship was preserved
|
||||
expect(diagramResult?.relationships).toHaveLength(1);
|
||||
const fk = diagramResult?.relationships[0];
|
||||
expect(fk?.sourceTable).toBe('transactions');
|
||||
expect(fk?.targetTable).toBe('accounts');
|
||||
});
|
||||
|
||||
it('should fallback to regex extraction for tables with split DECIMAL that cause parser errors', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE complex_table (
|
||||
id INTEGER PRIMARY KEY,
|
||||
-- This might cause parser issues
|
||||
weird_decimal DECIMAL(10,
|
||||
2) ARRAY NOT NULL,
|
||||
normal_column VARCHAR(100),
|
||||
another_decimal NUMERIC(5,
|
||||
3) CHECK (another_decimal > 0)
|
||||
);
|
||||
`;
|
||||
|
||||
const validationResult = validateSQL(sql, DatabaseType.POSTGRESQL);
|
||||
|
||||
// Validation should detect issues but provide auto-fix
|
||||
expect(validationResult.isValid).toBe(false);
|
||||
expect(validationResult.fixedSQL).toBeDefined();
|
||||
|
||||
// Parse the fixed SQL
|
||||
const diagramResult = await fromPostgres(validationResult.fixedSQL!);
|
||||
|
||||
// Even if parser fails, should still import with regex fallback
|
||||
expect(diagramResult?.tables).toHaveLength(1);
|
||||
|
||||
const table = diagramResult?.tables[0];
|
||||
expect(table?.name).toBe('complex_table');
|
||||
expect(table?.columns.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,48 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { fromPostgres } from '../postgresql';
|
||||
|
||||
describe('String preservation during comment removal', () => {
|
||||
it('should preserve strings containing -- pattern', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE spell_ingredients (
|
||||
ingredient_id INTEGER PRIMARY KEY,
|
||||
preparation_note VARCHAR(100) DEFAULT '--grind finely'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
console.log('String preservation result:', {
|
||||
tableCount: result.tables.length,
|
||||
columns: result.tables[0]?.columns.map((c) => ({
|
||||
name: c.name,
|
||||
type: c.type,
|
||||
default: c.default,
|
||||
})),
|
||||
});
|
||||
|
||||
expect(result.tables).toHaveLength(1);
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
|
||||
const noteCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'preparation_note'
|
||||
);
|
||||
expect(noteCol).toBeDefined();
|
||||
expect(noteCol?.default).toBeDefined();
|
||||
});
|
||||
|
||||
it('should preserve URL strings with double slashes', async () => {
|
||||
const sql = `
|
||||
CREATE TABLE artifact_sources (
|
||||
artifact_id INTEGER,
|
||||
origin_url VARCHAR(200) DEFAULT 'https://ancient-library.realm'
|
||||
);`;
|
||||
|
||||
const result = await fromPostgres(sql);
|
||||
|
||||
expect(result.tables[0].columns).toHaveLength(2);
|
||||
const urlCol = result.tables[0].columns.find(
|
||||
(c) => c.name === 'origin_url'
|
||||
);
|
||||
expect(urlCol).toBeDefined();
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user