refactor: rename temp-chunks to temp-uploads and update related configurations

- Changed references from 'temp-chunks' to 'temp-uploads' across .dockerignore, Dockerfile, and various configuration files for consistency.
- Introduced a new directories configuration file to manage directory paths more effectively.
- Updated file handling in the server code to utilize streaming for uploads and downloads, improving performance and memory management.
- Enhanced cleanup processes for temporary directories to maintain a tidy file structure.
This commit is contained in:
Daniel Luiz Alves
2025-07-06 00:06:09 -03:00
parent c9a9f1d6cf
commit 95939f8f47
21 changed files with 296 additions and 217 deletions

View File

@@ -62,9 +62,9 @@ docker-compose*
# Storage directories (created at runtime)
uploads/
temp-chunks/
temp-uploads/
apps/server/uploads/
apps/server/temp-chunks/
apps/server/temp-uploads/
# Static files
apps/server/prisma/*.db

View File

@@ -137,11 +137,9 @@ echo "Database: SQLite"
# Set global environment variables
export DATABASE_URL="file:/app/server/prisma/palmr.db"
export UPLOAD_PATH="/app/server/uploads"
export TEMP_CHUNKS_PATH="/app/server/temp-chunks"
# Ensure /app/server directory exists for bind mounts
mkdir -p /app/server/uploads /app/server/temp-chunks /app/server/uploads/logo /app/server/prisma
mkdir -p /app/server/uploads /app/server/temp-uploads /app/server/prisma
echo "Data directories ready for first run..."

View File

@@ -2,5 +2,5 @@ node_modules
.env
dist/*
uploads/*
temp-chunks/*
temp-uploads/*
prisma/*.db

View File

@@ -2,5 +2,6 @@
/dist
/build
/uploads
/temp-chunks
/temp-uploads
/logs
/prisma/migrations

View File

@@ -0,0 +1,52 @@
import * as path from "path";
import { IS_RUNNING_IN_CONTAINER } from "../utils/container-detection";
/**
* Directory Configuration for Palmr Server
*
* This configuration manages all directory paths used by the server,
* including temporary directories for uploads.
*/
export interface DirectoryConfig {
baseDir: string;
uploads: string;
tempUploads: string;
}
const BASE_DIR = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
export const directoriesConfig: DirectoryConfig = {
baseDir: BASE_DIR,
uploads: path.join(BASE_DIR, "uploads"),
tempUploads: path.join(BASE_DIR, "temp-uploads"),
};
/**
* Get the temporary directory for upload operations
* This is where files are temporarily stored during streaming uploads
*/
export function getTempUploadDir(): string {
return directoriesConfig.tempUploads;
}
/**
* Get the uploads directory
* This is where final files are stored
*/
export function getUploadsDir(): string {
return directoriesConfig.uploads;
}
/**
* Get temporary path for a file during upload
* This ensures unique temporary file names to avoid conflicts
* Files are stored directly in temp-uploads with timestamp + random suffix
*/
export function getTempFilePath(objectName: string): string {
const sanitizedName = objectName.replace(/[^a-zA-Z0-9\-_./]/g, "_");
const timestamp = Date.now();
const randomSuffix = Math.random().toString(36).substring(2, 8);
return path.join(getTempUploadDir(), `${timestamp}-${randomSuffix}-${sanitizedName}.tmp`);
}

View File

@@ -1,30 +1,9 @@
import fs from "fs";
import path from "path";
import { FastifyReply, FastifyRequest } from "fastify";
import { EmailService } from "../email/service";
import { LogoService } from "./logo.service";
import { AppService } from "./service";
const isDocker = (() => {
try {
require("fs").statSync("/.dockerenv");
return true;
} catch {
try {
return require("fs").readFileSync("/proc/self/cgroup", "utf8").includes("docker");
} catch {
return false;
}
}
})();
const baseDir = isDocker ? "/app/server" : process.cwd();
const uploadsDir = path.join(baseDir, "uploads/logo");
if (!fs.existsSync(uploadsDir)) {
fs.mkdirSync(uploadsDir, { recursive: true });
}
export class AppController {
private appService = new AppService();
private logoService = new LogoService();
@@ -102,7 +81,20 @@ export class AppController {
return reply.status(400).send({ error: "Only images are allowed" });
}
const buffer = await file.toBuffer();
// Logo files should be small (max 5MB), so we can safely use streaming to buffer
const chunks: Buffer[] = [];
const maxLogoSize = 5 * 1024 * 1024; // 5MB
let totalSize = 0;
for await (const chunk of file.file) {
totalSize += chunk.length;
if (totalSize > maxLogoSize) {
throw new Error("Logo file too large. Maximum size is 5MB.");
}
chunks.push(chunk);
}
const buffer = Buffer.concat(chunks);
const base64Logo = await this.logoService.uploadLogo(buffer);
await this.appService.updateConfig("appLogo", base64Logo);

View File

@@ -65,14 +65,8 @@ export class FilesystemController {
return reply.status(400).send({ error: "Invalid or expired upload token" });
}
const contentLength = parseInt(request.headers["content-length"] || "0");
const isLargeFile = contentLength > 50 * 1024 * 1024;
if (isLargeFile) {
await this.uploadLargeFile(request, provider, tokenData.objectName);
} else {
await this.uploadSmallFile(request, provider, tokenData.objectName);
}
// Use streaming for all files to avoid loading into RAM
await this.uploadFileStream(request, provider, tokenData.objectName);
provider.consumeUploadToken(token);
reply.status(200).send({ message: "File uploaded successfully" });
@@ -82,99 +76,9 @@ export class FilesystemController {
}
}
private async uploadLargeFile(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
const filePath = provider.getFilePath(objectName);
const dir = path.dirname(filePath);
await fs.promises.mkdir(dir, { recursive: true });
const tempPath = `${filePath}.tmp`;
const writeStream = fs.createWriteStream(tempPath);
const encryptStream = provider.createEncryptStream();
try {
await pipeline(request.raw, encryptStream, writeStream);
await fs.promises.rename(tempPath, filePath);
} catch (error) {
try {
await fs.promises.unlink(tempPath);
} catch (cleanupError) {
console.error("Error deleting temp file:", cleanupError);
}
throw error;
}
}
private async uploadSmallFile(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
const body = request.body as any;
if (Buffer.isBuffer(body)) {
if (body.length === 0) {
throw new Error("No file data received");
}
await provider.uploadFile(objectName, body);
return;
}
if (typeof body === "string") {
const buffer = Buffer.from(body, "utf8");
if (buffer.length === 0) {
throw new Error("No file data received");
}
await provider.uploadFile(objectName, buffer);
return;
}
if (typeof body === "object" && body !== null && !body.on) {
const buffer = Buffer.from(JSON.stringify(body), "utf8");
if (buffer.length === 0) {
throw new Error("No file data received");
}
await provider.uploadFile(objectName, buffer);
return;
}
if (body && typeof body.on === "function") {
const chunks: Buffer[] = [];
return new Promise<void>((resolve, reject) => {
body.on("data", (chunk: Buffer) => {
chunks.push(chunk);
});
body.on("end", async () => {
try {
const buffer = Buffer.concat(chunks);
if (buffer.length === 0) {
throw new Error("No file data received");
}
await provider.uploadFile(objectName, buffer);
resolve();
} catch (error) {
console.error("Error uploading small file:", error);
reject(error);
}
});
body.on("error", (error: Error) => {
console.error("Error reading upload stream:", error);
reject(error);
});
});
}
try {
const buffer = Buffer.from(body);
if (buffer.length === 0) {
throw new Error("No file data received");
}
await provider.uploadFile(objectName, buffer);
} catch (error) {
throw new Error(`Unsupported request body type: ${typeof body}. Expected stream, buffer, string, or object.`);
}
private async uploadFileStream(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
// Use the provider's streaming upload method directly
await provider.uploadFileFromStream(objectName, request.raw);
}
async download(request: FastifyRequest, reply: FastifyReply) {

View File

@@ -533,8 +533,23 @@ export class ReverseShareService {
const { FilesystemStorageProvider } = await import("../../providers/filesystem-storage.provider.js");
const provider = FilesystemStorageProvider.getInstance();
const sourceBuffer = await provider.downloadFile(file.objectName);
await provider.uploadFile(newObjectName, sourceBuffer);
// Use streaming copy for filesystem mode
const sourcePath = provider.getFilePath(file.objectName);
const fs = await import("fs");
const { pipeline } = await import("stream/promises");
const sourceStream = fs.createReadStream(sourcePath);
const decryptStream = provider.createDecryptStream();
// Create a passthrough stream to get the decrypted content
const { PassThrough } = await import("stream");
const passThrough = new PassThrough();
// First, decrypt the source file into the passthrough stream
await pipeline(sourceStream, decryptStream, passThrough);
// Then upload the decrypted content
await provider.uploadFileFromStream(newObjectName, passThrough);
} else {
const downloadUrl = await this.fileService.getPresignedGetUrl(file.objectName, 300);
const uploadUrl = await this.fileService.getPresignedPutUrl(newObjectName, 300);
@@ -544,11 +559,13 @@ export class ReverseShareService {
throw new Error(`Failed to download file: ${response.statusText}`);
}
const fileBuffer = Buffer.from(await response.arrayBuffer());
if (!response.body) {
throw new Error("No response body received");
}
const uploadResponse = await fetch(uploadUrl, {
method: "PUT",
body: fileBuffer,
body: response.body,
headers: {
"Content-Type": "application/octet-stream",
},

View File

@@ -106,7 +106,20 @@ export class UserController {
return reply.status(400).send({ error: "Only images are allowed" });
}
const buffer = await file.toBuffer();
// Avatar files should be small (max 5MB), so we can safely use streaming to buffer
const chunks: Buffer[] = [];
const maxAvatarSize = 5 * 1024 * 1024; // 5MB
let totalSize = 0;
for await (const chunk of file.file) {
totalSize += chunk.length;
if (totalSize > maxAvatarSize) {
throw new Error("Avatar file too large. Maximum size is 5MB.");
}
chunks.push(chunk);
}
const buffer = Buffer.concat(chunks);
const base64Image = await this.avatarService.uploadAvatar(buffer);
const updatedUser = await this.userService.updateUserImage(userId, base64Image);

View File

@@ -5,6 +5,7 @@ import * as path from "path";
import { Transform } from "stream";
import { pipeline } from "stream/promises";
import { directoriesConfig, getTempFilePath } from "../config/directories.config";
import { env } from "../env";
import { StorageProvider } from "../types/storage";
import { IS_RUNNING_IN_CONTAINER } from "../utils/container-detection";
@@ -17,10 +18,11 @@ export class FilesystemStorageProvider implements StorageProvider {
private downloadTokens = new Map<string, { objectName: string; expiresAt: number; fileName?: string }>();
private constructor() {
this.uploadsDir = IS_RUNNING_IN_CONTAINER ? "/app/server/uploads" : path.join(process.cwd(), "uploads");
this.uploadsDir = directoriesConfig.uploads;
this.ensureUploadsDir();
setInterval(() => this.cleanExpiredTokens(), 5 * 60 * 1000);
setInterval(() => this.cleanupEmptyTempDirs(), 10 * 60 * 1000); // Every 10 minutes
}
public static getInstance(): FilesystemStorageProvider {
@@ -177,28 +179,40 @@ export class FilesystemStorageProvider implements StorageProvider {
}
async uploadFile(objectName: string, buffer: Buffer): Promise<void> {
// For backward compatibility, convert buffer to stream and use streaming upload
const filePath = this.getFilePath(objectName);
const dir = path.dirname(filePath);
await fs.mkdir(dir, { recursive: true });
if (buffer.length > 50 * 1024 * 1024) {
await this.uploadFileStream(objectName, buffer);
} else {
const encryptedBuffer = this.encryptFileBuffer(buffer);
await fs.writeFile(filePath, encryptedBuffer);
}
const { Readable } = await import("stream");
const readable = Readable.from(buffer);
await this.uploadFileFromStream(objectName, readable);
}
private async uploadFileStream(objectName: string, buffer: Buffer): Promise<void> {
async uploadFileFromStream(objectName: string, inputStream: NodeJS.ReadableStream): Promise<void> {
const filePath = this.getFilePath(objectName);
const { Readable } = await import("stream");
const dir = path.dirname(filePath);
const readable = Readable.from(buffer);
const writeStream = fsSync.createWriteStream(filePath);
await fs.mkdir(dir, { recursive: true });
// Use the new temp file system for better organization
const tempPath = getTempFilePath(objectName);
const tempDir = path.dirname(tempPath);
await fs.mkdir(tempDir, { recursive: true });
const writeStream = fsSync.createWriteStream(tempPath);
const encryptStream = this.createEncryptStream();
await pipeline(readable, encryptStream, writeStream);
try {
await pipeline(inputStream, encryptStream, writeStream);
await fs.rename(tempPath, filePath);
} catch (error) {
await this.cleanupTempFile(tempPath);
throw error;
}
}
private encryptFileBuffer(buffer: Buffer): Buffer {
@@ -288,4 +302,81 @@ export class FilesystemStorageProvider implements StorageProvider {
consumeDownloadToken(token: string): void {
this.downloadTokens.delete(token);
}
/**
* Clean up temporary file and its parent directory if empty
*/
private async cleanupTempFile(tempPath: string): Promise<void> {
try {
// Remove the temp file
await fs.unlink(tempPath);
// Try to remove the parent directory if it's empty
const tempDir = path.dirname(tempPath);
try {
const files = await fs.readdir(tempDir);
if (files.length === 0) {
await fs.rmdir(tempDir);
}
} catch (dirError: any) {
// Ignore errors when trying to remove directory (might not be empty or might not exist)
if (dirError.code !== "ENOTEMPTY" && dirError.code !== "ENOENT") {
console.warn("Warning: Could not remove temp directory:", dirError.message);
}
}
} catch (cleanupError: any) {
if (cleanupError.code !== "ENOENT") {
console.error("Error deleting temp file:", cleanupError);
}
}
}
/**
* Clean up empty temporary directories periodically
*/
private async cleanupEmptyTempDirs(): Promise<void> {
try {
const tempUploadsDir = directoriesConfig.tempUploads;
// Check if temp-uploads directory exists
try {
await fs.access(tempUploadsDir);
} catch {
return; // Directory doesn't exist, nothing to clean
}
const items = await fs.readdir(tempUploadsDir);
for (const item of items) {
const itemPath = path.join(tempUploadsDir, item);
try {
const stat = await fs.stat(itemPath);
if (stat.isDirectory()) {
// Check if directory is empty
const dirContents = await fs.readdir(itemPath);
if (dirContents.length === 0) {
await fs.rmdir(itemPath);
console.log(`🧹 Cleaned up empty temp directory: ${itemPath}`);
}
} else if (stat.isFile()) {
// Check if file is older than 1 hour (stale temp files)
const oneHourAgo = Date.now() - 60 * 60 * 1000;
if (stat.mtime.getTime() < oneHourAgo) {
await fs.unlink(itemPath);
console.log(`🧹 Cleaned up stale temp file: ${itemPath}`);
}
}
} catch (error: any) {
// Ignore errors for individual items
if (error.code !== "ENOENT") {
console.warn(`Warning: Could not process temp item ${itemPath}:`, error.message);
}
}
}
} catch (error) {
console.error("Error during temp directory cleanup:", error);
}
}
}

View File

@@ -5,6 +5,7 @@ import fastifyMultipart from "@fastify/multipart";
import fastifyStatic from "@fastify/static";
import { buildApp } from "./app";
import { directoriesConfig } from "./config/directories.config";
import { env } from "./env";
import { appRoutes } from "./modules/app/routes";
import { authProvidersRoutes } from "./modules/auth-providers/routes";
@@ -27,22 +28,18 @@ if (typeof global.crypto === "undefined") {
}
async function ensureDirectories() {
const baseDir = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
const uploadsDir = path.join(baseDir, "uploads");
const tempChunksDir = path.join(baseDir, "temp-chunks");
const dirsToCreate = [
{ path: directoriesConfig.uploads, name: "uploads" },
{ path: directoriesConfig.tempUploads, name: "temp-uploads" },
];
try {
await fs.access(uploadsDir);
} catch {
await fs.mkdir(uploadsDir, { recursive: true });
console.log(`📁 Created uploads directory: ${uploadsDir}`);
}
try {
await fs.access(tempChunksDir);
} catch {
await fs.mkdir(tempChunksDir, { recursive: true });
console.log(`📁 Created temp-chunks directory: ${tempChunksDir}`);
for (const dir of dirsToCreate) {
try {
await fs.access(dir.path);
} catch {
await fs.mkdir(dir.path, { recursive: true });
console.log(`📁 Created ${dir.name} directory: ${dir.path}`);
}
}
}
@@ -63,11 +60,8 @@ async function startServer() {
});
if (env.ENABLE_S3 !== "true") {
const baseDir = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
const uploadsPath = path.join(baseDir, "uploads");
await app.register(fastifyStatic, {
root: uploadsPath,
root: directoriesConfig.uploads,
prefix: "/uploads/",
decorateReply: false,
});

View File

@@ -21,12 +21,6 @@ const nextConfig: NextConfig = {
bodySizeLimit: "1pb",
},
},
api: {
bodyParser: {
sizeLimit: "1pb",
},
responseLimit: false,
},
};
const withNextIntl = createNextIntlPlugin();

View File

@@ -1,5 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 300; // 5 minutes for logo uploads
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function POST(req: NextRequest) {

View File

@@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 3000;
export const maxDuration = 30000;
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";

View File

@@ -1,5 +1,7 @@
import { NextRequest, NextResponse } from "next/server";
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function POST(req: NextRequest, { params }: { params: Promise<{ alias: string }> }) {

View File

@@ -1,5 +1,7 @@
import { NextRequest, NextResponse } from "next/server";
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {

View File

@@ -1,5 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 300; // 5 minutes for avatar uploads
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function POST(req: NextRequest) {

View File

@@ -1,62 +1,51 @@
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
export interface FileSizeInputProps {
value: string;
onChange: (value: string) => void;
disabled?: boolean;
error?: any;
placeholder?: string;
}
type Unit = "MB" | "GB" | "TB";
type Unit = "MB" | "GB" | "TB" | "PB";
const UNIT_MULTIPLIERS: Record<Unit, number> = {
MB: 1024 * 1024,
GB: 1024 * 1024 * 1024,
TB: 1024 * 1024 * 1024 * 1024,
PB: 1024 * 1024 * 1024 * 1024 * 1024,
};
function bytesToHumanReadable(bytes: string): { value: string; unit: Unit } {
const numBytes = parseInt(bytes, 10);
if (!numBytes || numBytes <= 0) {
return { value: "0", unit: "GB" };
return { value: "0", unit: "MB" };
}
if (numBytes >= UNIT_MULTIPLIERS.TB) {
const tbValue = numBytes / UNIT_MULTIPLIERS.TB;
if (tbValue === Math.floor(tbValue)) {
return {
value: tbValue.toString(),
unit: "TB",
};
const units: Unit[] = ["PB", "TB", "GB", "MB"];
for (const unit of units) {
const multiplier = UNIT_MULTIPLIERS[unit];
const value = numBytes / multiplier;
if (value >= 1) {
const rounded = Math.round(value * 100) / 100;
if (Math.abs(rounded - Math.round(rounded)) < 0.01) {
return { value: Math.round(rounded).toString(), unit };
} else {
return { value: rounded.toFixed(2), unit };
}
}
}
if (numBytes >= UNIT_MULTIPLIERS.GB) {
const gbValue = numBytes / UNIT_MULTIPLIERS.GB;
if (gbValue === Math.floor(gbValue)) {
return {
value: gbValue.toString(),
unit: "GB",
};
}
}
if (numBytes >= UNIT_MULTIPLIERS.MB) {
const mbValue = numBytes / UNIT_MULTIPLIERS.MB;
return {
value: mbValue === Math.floor(mbValue) ? mbValue.toString() : mbValue.toFixed(2),
unit: "MB",
};
}
const mbValue = numBytes / UNIT_MULTIPLIERS.MB;
return {
value: mbValue.toFixed(3),
unit: "MB",
};
return { value: mbValue.toFixed(2), unit: "MB" as Unit };
}
function humanReadableToBytes(value: string, unit: Unit): string {
@@ -68,9 +57,9 @@ function humanReadableToBytes(value: string, unit: Unit): string {
return Math.floor(numValue * UNIT_MULTIPLIERS[unit]).toString();
}
export function FileSizeInput({ value, onChange, disabled = false, error }: FileSizeInputProps) {
export function FileSizeInput({ value, onChange, disabled = false, error, placeholder = "0" }: FileSizeInputProps) {
const [displayValue, setDisplayValue] = useState("");
const [selectedUnit, setSelectedUnit] = useState<Unit>("GB");
const [selectedUnit, setSelectedUnit] = useState<Unit>("MB");
useEffect(() => {
if (value && value !== "0") {
@@ -79,7 +68,7 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
setSelectedUnit(unit);
} else {
setDisplayValue("");
setSelectedUnit("GB");
setSelectedUnit("MB");
}
}, [value]);
@@ -100,6 +89,10 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
};
const handleUnitChange = (newUnit: Unit) => {
if (!newUnit || !["MB", "GB", "TB", "PB"].includes(newUnit)) {
return;
}
setSelectedUnit(newUnit);
if (displayValue && displayValue !== "0") {
@@ -114,21 +107,27 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
type="text"
value={displayValue}
onChange={(e) => handleValueChange(e.target.value)}
placeholder="0"
placeholder={placeholder}
className="flex-1"
disabled={disabled}
aria-invalid={!!error}
/>
<select
<Select
key={`${selectedUnit}-${displayValue}`}
value={selectedUnit}
onChange={(e) => handleUnitChange(e.target.value as Unit)}
className="w-20 rounded-md border border-input bg-transparent px-3 py-2 text-sm"
onValueChange={handleUnitChange}
disabled={disabled}
>
<option value="MB">MB</option>
<option value="GB">GB</option>
<option value="TB">TB</option>
</select>
<SelectTrigger className="w-20">
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="MB">MB</SelectItem>
<SelectItem value="GB">GB</SelectItem>
<SelectItem value="TB">TB</SelectItem>
<SelectItem value="PB">PB</SelectItem>
</SelectContent>
</Select>
</div>
);
}

View File

@@ -8,6 +8,7 @@ import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch";
import { Textarea } from "@/components/ui/textarea";
import { Config } from "../types";
import { FileSizeInput } from "./file-size-input";
const HIDDEN_FIELDS = ["serverUrl", "firstUserAccess"];
@@ -64,6 +65,19 @@ export function SettingsInput({
);
}
// Use FileSizeInput for storage size fields
if (config.key === "maxFileSize" || config.key === "maxTotalStoragePerUser") {
const currentValue = watch(`configs.${config.key}`) || "0";
return (
<FileSizeInput
value={currentValue}
onChange={(value) => setValue(`configs.${config.key}`, value)}
disabled={isDisabled}
placeholder="0"
/>
);
}
if (config.type === "number" || config.type === "bigint") {
return (
<Input

View File

@@ -28,7 +28,7 @@ echo "📂 Data directory: /app/server"
echo "💾 Database: $DATABASE_URL"
echo "📁 Creating data directories..."
mkdir -p /app/server/prisma /app/server/uploads /app/server/temp-chunks /app/server/uploads/logo
mkdir -p /app/server/prisma /app/server/uploads /app/server/temp-uploads
if [ "$(id -u)" = "0" ]; then
echo "🔐 Ensuring proper ownership before database operations..."

View File

@@ -7,7 +7,7 @@ pidfile=/var/run/supervisord.pid
loglevel=info
[program:server]
command=/bin/sh -c "export DATABASE_URL='file:/app/server/prisma/palmr.db' && export UPLOAD_PATH='/app/server/uploads' && export TEMP_CHUNKS_PATH='/app/server/temp-chunks' && /app/server-start.sh"
command=/bin/sh -c "export DATABASE_URL='file:/app/server/prisma/palmr.db' && /app/server-start.sh"
directory=/app/palmr-app
user=root
autostart=true