mirror of
https://github.com/kyantech/Palmr.git
synced 2025-10-23 06:11:58 +00:00
refactor: rename temp-chunks to temp-uploads and update related configurations
- Changed references from 'temp-chunks' to 'temp-uploads' across .dockerignore, Dockerfile, and various configuration files for consistency. - Introduced a new directories configuration file to manage directory paths more effectively. - Updated file handling in the server code to utilize streaming for uploads and downloads, improving performance and memory management. - Enhanced cleanup processes for temporary directories to maintain a tidy file structure.
This commit is contained in:
@@ -62,9 +62,9 @@ docker-compose*
|
|||||||
|
|
||||||
# Storage directories (created at runtime)
|
# Storage directories (created at runtime)
|
||||||
uploads/
|
uploads/
|
||||||
temp-chunks/
|
temp-uploads/
|
||||||
apps/server/uploads/
|
apps/server/uploads/
|
||||||
apps/server/temp-chunks/
|
apps/server/temp-uploads/
|
||||||
|
|
||||||
# Static files
|
# Static files
|
||||||
apps/server/prisma/*.db
|
apps/server/prisma/*.db
|
||||||
|
@@ -137,11 +137,9 @@ echo "Database: SQLite"
|
|||||||
|
|
||||||
# Set global environment variables
|
# Set global environment variables
|
||||||
export DATABASE_URL="file:/app/server/prisma/palmr.db"
|
export DATABASE_URL="file:/app/server/prisma/palmr.db"
|
||||||
export UPLOAD_PATH="/app/server/uploads"
|
|
||||||
export TEMP_CHUNKS_PATH="/app/server/temp-chunks"
|
|
||||||
|
|
||||||
# Ensure /app/server directory exists for bind mounts
|
# Ensure /app/server directory exists for bind mounts
|
||||||
mkdir -p /app/server/uploads /app/server/temp-chunks /app/server/uploads/logo /app/server/prisma
|
mkdir -p /app/server/uploads /app/server/temp-uploads /app/server/prisma
|
||||||
|
|
||||||
echo "Data directories ready for first run..."
|
echo "Data directories ready for first run..."
|
||||||
|
|
||||||
|
2
apps/server/.gitignore
vendored
2
apps/server/.gitignore
vendored
@@ -2,5 +2,5 @@ node_modules
|
|||||||
.env
|
.env
|
||||||
dist/*
|
dist/*
|
||||||
uploads/*
|
uploads/*
|
||||||
temp-chunks/*
|
temp-uploads/*
|
||||||
prisma/*.db
|
prisma/*.db
|
||||||
|
@@ -2,5 +2,6 @@
|
|||||||
/dist
|
/dist
|
||||||
/build
|
/build
|
||||||
/uploads
|
/uploads
|
||||||
/temp-chunks
|
/temp-uploads
|
||||||
|
/logs
|
||||||
/prisma/migrations
|
/prisma/migrations
|
52
apps/server/src/config/directories.config.ts
Normal file
52
apps/server/src/config/directories.config.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { IS_RUNNING_IN_CONTAINER } from "../utils/container-detection";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Directory Configuration for Palmr Server
|
||||||
|
*
|
||||||
|
* This configuration manages all directory paths used by the server,
|
||||||
|
* including temporary directories for uploads.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface DirectoryConfig {
|
||||||
|
baseDir: string;
|
||||||
|
uploads: string;
|
||||||
|
tempUploads: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const BASE_DIR = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
|
||||||
|
|
||||||
|
export const directoriesConfig: DirectoryConfig = {
|
||||||
|
baseDir: BASE_DIR,
|
||||||
|
uploads: path.join(BASE_DIR, "uploads"),
|
||||||
|
tempUploads: path.join(BASE_DIR, "temp-uploads"),
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the temporary directory for upload operations
|
||||||
|
* This is where files are temporarily stored during streaming uploads
|
||||||
|
*/
|
||||||
|
export function getTempUploadDir(): string {
|
||||||
|
return directoriesConfig.tempUploads;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the uploads directory
|
||||||
|
* This is where final files are stored
|
||||||
|
*/
|
||||||
|
export function getUploadsDir(): string {
|
||||||
|
return directoriesConfig.uploads;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get temporary path for a file during upload
|
||||||
|
* This ensures unique temporary file names to avoid conflicts
|
||||||
|
* Files are stored directly in temp-uploads with timestamp + random suffix
|
||||||
|
*/
|
||||||
|
export function getTempFilePath(objectName: string): string {
|
||||||
|
const sanitizedName = objectName.replace(/[^a-zA-Z0-9\-_./]/g, "_");
|
||||||
|
const timestamp = Date.now();
|
||||||
|
const randomSuffix = Math.random().toString(36).substring(2, 8);
|
||||||
|
return path.join(getTempUploadDir(), `${timestamp}-${randomSuffix}-${sanitizedName}.tmp`);
|
||||||
|
}
|
@@ -1,30 +1,9 @@
|
|||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
import { FastifyReply, FastifyRequest } from "fastify";
|
import { FastifyReply, FastifyRequest } from "fastify";
|
||||||
|
|
||||||
import { EmailService } from "../email/service";
|
import { EmailService } from "../email/service";
|
||||||
import { LogoService } from "./logo.service";
|
import { LogoService } from "./logo.service";
|
||||||
import { AppService } from "./service";
|
import { AppService } from "./service";
|
||||||
|
|
||||||
const isDocker = (() => {
|
|
||||||
try {
|
|
||||||
require("fs").statSync("/.dockerenv");
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
try {
|
|
||||||
return require("fs").readFileSync("/proc/self/cgroup", "utf8").includes("docker");
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
const baseDir = isDocker ? "/app/server" : process.cwd();
|
|
||||||
const uploadsDir = path.join(baseDir, "uploads/logo");
|
|
||||||
if (!fs.existsSync(uploadsDir)) {
|
|
||||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AppController {
|
export class AppController {
|
||||||
private appService = new AppService();
|
private appService = new AppService();
|
||||||
private logoService = new LogoService();
|
private logoService = new LogoService();
|
||||||
@@ -102,7 +81,20 @@ export class AppController {
|
|||||||
return reply.status(400).send({ error: "Only images are allowed" });
|
return reply.status(400).send({ error: "Only images are allowed" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const buffer = await file.toBuffer();
|
// Logo files should be small (max 5MB), so we can safely use streaming to buffer
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
const maxLogoSize = 5 * 1024 * 1024; // 5MB
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
for await (const chunk of file.file) {
|
||||||
|
totalSize += chunk.length;
|
||||||
|
if (totalSize > maxLogoSize) {
|
||||||
|
throw new Error("Logo file too large. Maximum size is 5MB.");
|
||||||
|
}
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = Buffer.concat(chunks);
|
||||||
const base64Logo = await this.logoService.uploadLogo(buffer);
|
const base64Logo = await this.logoService.uploadLogo(buffer);
|
||||||
await this.appService.updateConfig("appLogo", base64Logo);
|
await this.appService.updateConfig("appLogo", base64Logo);
|
||||||
|
|
||||||
|
@@ -65,14 +65,8 @@ export class FilesystemController {
|
|||||||
return reply.status(400).send({ error: "Invalid or expired upload token" });
|
return reply.status(400).send({ error: "Invalid or expired upload token" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const contentLength = parseInt(request.headers["content-length"] || "0");
|
// Use streaming for all files to avoid loading into RAM
|
||||||
const isLargeFile = contentLength > 50 * 1024 * 1024;
|
await this.uploadFileStream(request, provider, tokenData.objectName);
|
||||||
|
|
||||||
if (isLargeFile) {
|
|
||||||
await this.uploadLargeFile(request, provider, tokenData.objectName);
|
|
||||||
} else {
|
|
||||||
await this.uploadSmallFile(request, provider, tokenData.objectName);
|
|
||||||
}
|
|
||||||
|
|
||||||
provider.consumeUploadToken(token);
|
provider.consumeUploadToken(token);
|
||||||
reply.status(200).send({ message: "File uploaded successfully" });
|
reply.status(200).send({ message: "File uploaded successfully" });
|
||||||
@@ -82,99 +76,9 @@ export class FilesystemController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async uploadLargeFile(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
|
private async uploadFileStream(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
|
||||||
const filePath = provider.getFilePath(objectName);
|
// Use the provider's streaming upload method directly
|
||||||
const dir = path.dirname(filePath);
|
await provider.uploadFileFromStream(objectName, request.raw);
|
||||||
|
|
||||||
await fs.promises.mkdir(dir, { recursive: true });
|
|
||||||
|
|
||||||
const tempPath = `${filePath}.tmp`;
|
|
||||||
const writeStream = fs.createWriteStream(tempPath);
|
|
||||||
const encryptStream = provider.createEncryptStream();
|
|
||||||
|
|
||||||
try {
|
|
||||||
await pipeline(request.raw, encryptStream, writeStream);
|
|
||||||
|
|
||||||
await fs.promises.rename(tempPath, filePath);
|
|
||||||
} catch (error) {
|
|
||||||
try {
|
|
||||||
await fs.promises.unlink(tempPath);
|
|
||||||
} catch (cleanupError) {
|
|
||||||
console.error("Error deleting temp file:", cleanupError);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async uploadSmallFile(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
|
|
||||||
const body = request.body as any;
|
|
||||||
|
|
||||||
if (Buffer.isBuffer(body)) {
|
|
||||||
if (body.length === 0) {
|
|
||||||
throw new Error("No file data received");
|
|
||||||
}
|
|
||||||
await provider.uploadFile(objectName, body);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof body === "string") {
|
|
||||||
const buffer = Buffer.from(body, "utf8");
|
|
||||||
if (buffer.length === 0) {
|
|
||||||
throw new Error("No file data received");
|
|
||||||
}
|
|
||||||
await provider.uploadFile(objectName, buffer);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof body === "object" && body !== null && !body.on) {
|
|
||||||
const buffer = Buffer.from(JSON.stringify(body), "utf8");
|
|
||||||
if (buffer.length === 0) {
|
|
||||||
throw new Error("No file data received");
|
|
||||||
}
|
|
||||||
await provider.uploadFile(objectName, buffer);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body && typeof body.on === "function") {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
|
|
||||||
return new Promise<void>((resolve, reject) => {
|
|
||||||
body.on("data", (chunk: Buffer) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
body.on("end", async () => {
|
|
||||||
try {
|
|
||||||
const buffer = Buffer.concat(chunks);
|
|
||||||
|
|
||||||
if (buffer.length === 0) {
|
|
||||||
throw new Error("No file data received");
|
|
||||||
}
|
|
||||||
|
|
||||||
await provider.uploadFile(objectName, buffer);
|
|
||||||
resolve();
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error uploading small file:", error);
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
body.on("error", (error: Error) => {
|
|
||||||
console.error("Error reading upload stream:", error);
|
|
||||||
reject(error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const buffer = Buffer.from(body);
|
|
||||||
if (buffer.length === 0) {
|
|
||||||
throw new Error("No file data received");
|
|
||||||
}
|
|
||||||
await provider.uploadFile(objectName, buffer);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Unsupported request body type: ${typeof body}. Expected stream, buffer, string, or object.`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async download(request: FastifyRequest, reply: FastifyReply) {
|
async download(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
@@ -533,8 +533,23 @@ export class ReverseShareService {
|
|||||||
const { FilesystemStorageProvider } = await import("../../providers/filesystem-storage.provider.js");
|
const { FilesystemStorageProvider } = await import("../../providers/filesystem-storage.provider.js");
|
||||||
const provider = FilesystemStorageProvider.getInstance();
|
const provider = FilesystemStorageProvider.getInstance();
|
||||||
|
|
||||||
const sourceBuffer = await provider.downloadFile(file.objectName);
|
// Use streaming copy for filesystem mode
|
||||||
await provider.uploadFile(newObjectName, sourceBuffer);
|
const sourcePath = provider.getFilePath(file.objectName);
|
||||||
|
const fs = await import("fs");
|
||||||
|
const { pipeline } = await import("stream/promises");
|
||||||
|
|
||||||
|
const sourceStream = fs.createReadStream(sourcePath);
|
||||||
|
const decryptStream = provider.createDecryptStream();
|
||||||
|
|
||||||
|
// Create a passthrough stream to get the decrypted content
|
||||||
|
const { PassThrough } = await import("stream");
|
||||||
|
const passThrough = new PassThrough();
|
||||||
|
|
||||||
|
// First, decrypt the source file into the passthrough stream
|
||||||
|
await pipeline(sourceStream, decryptStream, passThrough);
|
||||||
|
|
||||||
|
// Then upload the decrypted content
|
||||||
|
await provider.uploadFileFromStream(newObjectName, passThrough);
|
||||||
} else {
|
} else {
|
||||||
const downloadUrl = await this.fileService.getPresignedGetUrl(file.objectName, 300);
|
const downloadUrl = await this.fileService.getPresignedGetUrl(file.objectName, 300);
|
||||||
const uploadUrl = await this.fileService.getPresignedPutUrl(newObjectName, 300);
|
const uploadUrl = await this.fileService.getPresignedPutUrl(newObjectName, 300);
|
||||||
@@ -544,11 +559,13 @@ export class ReverseShareService {
|
|||||||
throw new Error(`Failed to download file: ${response.statusText}`);
|
throw new Error(`Failed to download file: ${response.statusText}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileBuffer = Buffer.from(await response.arrayBuffer());
|
if (!response.body) {
|
||||||
|
throw new Error("No response body received");
|
||||||
|
}
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
const uploadResponse = await fetch(uploadUrl, {
|
||||||
method: "PUT",
|
method: "PUT",
|
||||||
body: fileBuffer,
|
body: response.body,
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
},
|
},
|
||||||
|
@@ -106,7 +106,20 @@ export class UserController {
|
|||||||
return reply.status(400).send({ error: "Only images are allowed" });
|
return reply.status(400).send({ error: "Only images are allowed" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const buffer = await file.toBuffer();
|
// Avatar files should be small (max 5MB), so we can safely use streaming to buffer
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
const maxAvatarSize = 5 * 1024 * 1024; // 5MB
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
for await (const chunk of file.file) {
|
||||||
|
totalSize += chunk.length;
|
||||||
|
if (totalSize > maxAvatarSize) {
|
||||||
|
throw new Error("Avatar file too large. Maximum size is 5MB.");
|
||||||
|
}
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = Buffer.concat(chunks);
|
||||||
const base64Image = await this.avatarService.uploadAvatar(buffer);
|
const base64Image = await this.avatarService.uploadAvatar(buffer);
|
||||||
const updatedUser = await this.userService.updateUserImage(userId, base64Image);
|
const updatedUser = await this.userService.updateUserImage(userId, base64Image);
|
||||||
|
|
||||||
|
@@ -5,6 +5,7 @@ import * as path from "path";
|
|||||||
import { Transform } from "stream";
|
import { Transform } from "stream";
|
||||||
import { pipeline } from "stream/promises";
|
import { pipeline } from "stream/promises";
|
||||||
|
|
||||||
|
import { directoriesConfig, getTempFilePath } from "../config/directories.config";
|
||||||
import { env } from "../env";
|
import { env } from "../env";
|
||||||
import { StorageProvider } from "../types/storage";
|
import { StorageProvider } from "../types/storage";
|
||||||
import { IS_RUNNING_IN_CONTAINER } from "../utils/container-detection";
|
import { IS_RUNNING_IN_CONTAINER } from "../utils/container-detection";
|
||||||
@@ -17,10 +18,11 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
private downloadTokens = new Map<string, { objectName: string; expiresAt: number; fileName?: string }>();
|
private downloadTokens = new Map<string, { objectName: string; expiresAt: number; fileName?: string }>();
|
||||||
|
|
||||||
private constructor() {
|
private constructor() {
|
||||||
this.uploadsDir = IS_RUNNING_IN_CONTAINER ? "/app/server/uploads" : path.join(process.cwd(), "uploads");
|
this.uploadsDir = directoriesConfig.uploads;
|
||||||
|
|
||||||
this.ensureUploadsDir();
|
this.ensureUploadsDir();
|
||||||
setInterval(() => this.cleanExpiredTokens(), 5 * 60 * 1000);
|
setInterval(() => this.cleanExpiredTokens(), 5 * 60 * 1000);
|
||||||
|
setInterval(() => this.cleanupEmptyTempDirs(), 10 * 60 * 1000); // Every 10 minutes
|
||||||
}
|
}
|
||||||
|
|
||||||
public static getInstance(): FilesystemStorageProvider {
|
public static getInstance(): FilesystemStorageProvider {
|
||||||
@@ -177,28 +179,40 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async uploadFile(objectName: string, buffer: Buffer): Promise<void> {
|
async uploadFile(objectName: string, buffer: Buffer): Promise<void> {
|
||||||
|
// For backward compatibility, convert buffer to stream and use streaming upload
|
||||||
const filePath = this.getFilePath(objectName);
|
const filePath = this.getFilePath(objectName);
|
||||||
const dir = path.dirname(filePath);
|
const dir = path.dirname(filePath);
|
||||||
|
|
||||||
await fs.mkdir(dir, { recursive: true });
|
await fs.mkdir(dir, { recursive: true });
|
||||||
|
|
||||||
if (buffer.length > 50 * 1024 * 1024) {
|
|
||||||
await this.uploadFileStream(objectName, buffer);
|
|
||||||
} else {
|
|
||||||
const encryptedBuffer = this.encryptFileBuffer(buffer);
|
|
||||||
await fs.writeFile(filePath, encryptedBuffer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async uploadFileStream(objectName: string, buffer: Buffer): Promise<void> {
|
|
||||||
const filePath = this.getFilePath(objectName);
|
|
||||||
const { Readable } = await import("stream");
|
const { Readable } = await import("stream");
|
||||||
|
|
||||||
const readable = Readable.from(buffer);
|
const readable = Readable.from(buffer);
|
||||||
const writeStream = fsSync.createWriteStream(filePath);
|
|
||||||
|
await this.uploadFileFromStream(objectName, readable);
|
||||||
|
}
|
||||||
|
|
||||||
|
async uploadFileFromStream(objectName: string, inputStream: NodeJS.ReadableStream): Promise<void> {
|
||||||
|
const filePath = this.getFilePath(objectName);
|
||||||
|
const dir = path.dirname(filePath);
|
||||||
|
|
||||||
|
await fs.mkdir(dir, { recursive: true });
|
||||||
|
|
||||||
|
// Use the new temp file system for better organization
|
||||||
|
const tempPath = getTempFilePath(objectName);
|
||||||
|
const tempDir = path.dirname(tempPath);
|
||||||
|
|
||||||
|
await fs.mkdir(tempDir, { recursive: true });
|
||||||
|
|
||||||
|
const writeStream = fsSync.createWriteStream(tempPath);
|
||||||
const encryptStream = this.createEncryptStream();
|
const encryptStream = this.createEncryptStream();
|
||||||
|
|
||||||
await pipeline(readable, encryptStream, writeStream);
|
try {
|
||||||
|
await pipeline(inputStream, encryptStream, writeStream);
|
||||||
|
await fs.rename(tempPath, filePath);
|
||||||
|
} catch (error) {
|
||||||
|
await this.cleanupTempFile(tempPath);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private encryptFileBuffer(buffer: Buffer): Buffer {
|
private encryptFileBuffer(buffer: Buffer): Buffer {
|
||||||
@@ -288,4 +302,81 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
consumeDownloadToken(token: string): void {
|
consumeDownloadToken(token: string): void {
|
||||||
this.downloadTokens.delete(token);
|
this.downloadTokens.delete(token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up temporary file and its parent directory if empty
|
||||||
|
*/
|
||||||
|
private async cleanupTempFile(tempPath: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Remove the temp file
|
||||||
|
await fs.unlink(tempPath);
|
||||||
|
|
||||||
|
// Try to remove the parent directory if it's empty
|
||||||
|
const tempDir = path.dirname(tempPath);
|
||||||
|
try {
|
||||||
|
const files = await fs.readdir(tempDir);
|
||||||
|
if (files.length === 0) {
|
||||||
|
await fs.rmdir(tempDir);
|
||||||
|
}
|
||||||
|
} catch (dirError: any) {
|
||||||
|
// Ignore errors when trying to remove directory (might not be empty or might not exist)
|
||||||
|
if (dirError.code !== "ENOTEMPTY" && dirError.code !== "ENOENT") {
|
||||||
|
console.warn("Warning: Could not remove temp directory:", dirError.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (cleanupError: any) {
|
||||||
|
if (cleanupError.code !== "ENOENT") {
|
||||||
|
console.error("Error deleting temp file:", cleanupError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up empty temporary directories periodically
|
||||||
|
*/
|
||||||
|
private async cleanupEmptyTempDirs(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const tempUploadsDir = directoriesConfig.tempUploads;
|
||||||
|
|
||||||
|
// Check if temp-uploads directory exists
|
||||||
|
try {
|
||||||
|
await fs.access(tempUploadsDir);
|
||||||
|
} catch {
|
||||||
|
return; // Directory doesn't exist, nothing to clean
|
||||||
|
}
|
||||||
|
|
||||||
|
const items = await fs.readdir(tempUploadsDir);
|
||||||
|
|
||||||
|
for (const item of items) {
|
||||||
|
const itemPath = path.join(tempUploadsDir, item);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stat = await fs.stat(itemPath);
|
||||||
|
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
// Check if directory is empty
|
||||||
|
const dirContents = await fs.readdir(itemPath);
|
||||||
|
if (dirContents.length === 0) {
|
||||||
|
await fs.rmdir(itemPath);
|
||||||
|
console.log(`🧹 Cleaned up empty temp directory: ${itemPath}`);
|
||||||
|
}
|
||||||
|
} else if (stat.isFile()) {
|
||||||
|
// Check if file is older than 1 hour (stale temp files)
|
||||||
|
const oneHourAgo = Date.now() - 60 * 60 * 1000;
|
||||||
|
if (stat.mtime.getTime() < oneHourAgo) {
|
||||||
|
await fs.unlink(itemPath);
|
||||||
|
console.log(`🧹 Cleaned up stale temp file: ${itemPath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
// Ignore errors for individual items
|
||||||
|
if (error.code !== "ENOENT") {
|
||||||
|
console.warn(`Warning: Could not process temp item ${itemPath}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error during temp directory cleanup:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -5,6 +5,7 @@ import fastifyMultipart from "@fastify/multipart";
|
|||||||
import fastifyStatic from "@fastify/static";
|
import fastifyStatic from "@fastify/static";
|
||||||
|
|
||||||
import { buildApp } from "./app";
|
import { buildApp } from "./app";
|
||||||
|
import { directoriesConfig } from "./config/directories.config";
|
||||||
import { env } from "./env";
|
import { env } from "./env";
|
||||||
import { appRoutes } from "./modules/app/routes";
|
import { appRoutes } from "./modules/app/routes";
|
||||||
import { authProvidersRoutes } from "./modules/auth-providers/routes";
|
import { authProvidersRoutes } from "./modules/auth-providers/routes";
|
||||||
@@ -27,22 +28,18 @@ if (typeof global.crypto === "undefined") {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function ensureDirectories() {
|
async function ensureDirectories() {
|
||||||
const baseDir = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
|
const dirsToCreate = [
|
||||||
const uploadsDir = path.join(baseDir, "uploads");
|
{ path: directoriesConfig.uploads, name: "uploads" },
|
||||||
const tempChunksDir = path.join(baseDir, "temp-chunks");
|
{ path: directoriesConfig.tempUploads, name: "temp-uploads" },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const dir of dirsToCreate) {
|
||||||
try {
|
try {
|
||||||
await fs.access(uploadsDir);
|
await fs.access(dir.path);
|
||||||
} catch {
|
} catch {
|
||||||
await fs.mkdir(uploadsDir, { recursive: true });
|
await fs.mkdir(dir.path, { recursive: true });
|
||||||
console.log(`📁 Created uploads directory: ${uploadsDir}`);
|
console.log(`📁 Created ${dir.name} directory: ${dir.path}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
await fs.access(tempChunksDir);
|
|
||||||
} catch {
|
|
||||||
await fs.mkdir(tempChunksDir, { recursive: true });
|
|
||||||
console.log(`📁 Created temp-chunks directory: ${tempChunksDir}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -63,11 +60,8 @@ async function startServer() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (env.ENABLE_S3 !== "true") {
|
if (env.ENABLE_S3 !== "true") {
|
||||||
const baseDir = IS_RUNNING_IN_CONTAINER ? "/app/server" : process.cwd();
|
|
||||||
const uploadsPath = path.join(baseDir, "uploads");
|
|
||||||
|
|
||||||
await app.register(fastifyStatic, {
|
await app.register(fastifyStatic, {
|
||||||
root: uploadsPath,
|
root: directoriesConfig.uploads,
|
||||||
prefix: "/uploads/",
|
prefix: "/uploads/",
|
||||||
decorateReply: false,
|
decorateReply: false,
|
||||||
});
|
});
|
||||||
|
@@ -21,12 +21,6 @@ const nextConfig: NextConfig = {
|
|||||||
bodySizeLimit: "1pb",
|
bodySizeLimit: "1pb",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
api: {
|
|
||||||
bodyParser: {
|
|
||||||
sizeLimit: "1pb",
|
|
||||||
},
|
|
||||||
responseLimit: false,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const withNextIntl = createNextIntlPlugin();
|
const withNextIntl = createNextIntlPlugin();
|
||||||
|
@@ -1,5 +1,8 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export const maxDuration = 300; // 5 minutes for logo uploads
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
export async function POST(req: NextRequest) {
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
export const maxDuration = 3000;
|
export const maxDuration = 30000;
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
||||||
|
|
||||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ alias: string }> }) {
|
export async function POST(req: NextRequest, { params }: { params: Promise<{ alias: string }> }) {
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
||||||
|
|
||||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
|
@@ -1,5 +1,8 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export const maxDuration = 300; // 5 minutes for avatar uploads
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
export async function POST(req: NextRequest) {
|
||||||
|
@@ -1,62 +1,51 @@
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
|
|
||||||
import { Input } from "@/components/ui/input";
|
import { Input } from "@/components/ui/input";
|
||||||
|
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
|
||||||
|
|
||||||
export interface FileSizeInputProps {
|
export interface FileSizeInputProps {
|
||||||
value: string;
|
value: string;
|
||||||
onChange: (value: string) => void;
|
onChange: (value: string) => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
error?: any;
|
error?: any;
|
||||||
|
placeholder?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type Unit = "MB" | "GB" | "TB";
|
type Unit = "MB" | "GB" | "TB" | "PB";
|
||||||
|
|
||||||
const UNIT_MULTIPLIERS: Record<Unit, number> = {
|
const UNIT_MULTIPLIERS: Record<Unit, number> = {
|
||||||
MB: 1024 * 1024,
|
MB: 1024 * 1024,
|
||||||
GB: 1024 * 1024 * 1024,
|
GB: 1024 * 1024 * 1024,
|
||||||
TB: 1024 * 1024 * 1024 * 1024,
|
TB: 1024 * 1024 * 1024 * 1024,
|
||||||
|
PB: 1024 * 1024 * 1024 * 1024 * 1024,
|
||||||
};
|
};
|
||||||
|
|
||||||
function bytesToHumanReadable(bytes: string): { value: string; unit: Unit } {
|
function bytesToHumanReadable(bytes: string): { value: string; unit: Unit } {
|
||||||
const numBytes = parseInt(bytes, 10);
|
const numBytes = parseInt(bytes, 10);
|
||||||
|
|
||||||
if (!numBytes || numBytes <= 0) {
|
if (!numBytes || numBytes <= 0) {
|
||||||
return { value: "0", unit: "GB" };
|
return { value: "0", unit: "MB" };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (numBytes >= UNIT_MULTIPLIERS.TB) {
|
const units: Unit[] = ["PB", "TB", "GB", "MB"];
|
||||||
const tbValue = numBytes / UNIT_MULTIPLIERS.TB;
|
|
||||||
if (tbValue === Math.floor(tbValue)) {
|
|
||||||
return {
|
|
||||||
value: tbValue.toString(),
|
|
||||||
unit: "TB",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (numBytes >= UNIT_MULTIPLIERS.GB) {
|
for (const unit of units) {
|
||||||
const gbValue = numBytes / UNIT_MULTIPLIERS.GB;
|
const multiplier = UNIT_MULTIPLIERS[unit];
|
||||||
if (gbValue === Math.floor(gbValue)) {
|
const value = numBytes / multiplier;
|
||||||
return {
|
|
||||||
value: gbValue.toString(),
|
|
||||||
unit: "GB",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (numBytes >= UNIT_MULTIPLIERS.MB) {
|
if (value >= 1) {
|
||||||
const mbValue = numBytes / UNIT_MULTIPLIERS.MB;
|
const rounded = Math.round(value * 100) / 100;
|
||||||
return {
|
|
||||||
value: mbValue === Math.floor(mbValue) ? mbValue.toString() : mbValue.toFixed(2),
|
if (Math.abs(rounded - Math.round(rounded)) < 0.01) {
|
||||||
unit: "MB",
|
return { value: Math.round(rounded).toString(), unit };
|
||||||
};
|
} else {
|
||||||
|
return { value: rounded.toFixed(2), unit };
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const mbValue = numBytes / UNIT_MULTIPLIERS.MB;
|
const mbValue = numBytes / UNIT_MULTIPLIERS.MB;
|
||||||
return {
|
return { value: mbValue.toFixed(2), unit: "MB" as Unit };
|
||||||
value: mbValue.toFixed(3),
|
|
||||||
unit: "MB",
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function humanReadableToBytes(value: string, unit: Unit): string {
|
function humanReadableToBytes(value: string, unit: Unit): string {
|
||||||
@@ -68,9 +57,9 @@ function humanReadableToBytes(value: string, unit: Unit): string {
|
|||||||
return Math.floor(numValue * UNIT_MULTIPLIERS[unit]).toString();
|
return Math.floor(numValue * UNIT_MULTIPLIERS[unit]).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FileSizeInput({ value, onChange, disabled = false, error }: FileSizeInputProps) {
|
export function FileSizeInput({ value, onChange, disabled = false, error, placeholder = "0" }: FileSizeInputProps) {
|
||||||
const [displayValue, setDisplayValue] = useState("");
|
const [displayValue, setDisplayValue] = useState("");
|
||||||
const [selectedUnit, setSelectedUnit] = useState<Unit>("GB");
|
const [selectedUnit, setSelectedUnit] = useState<Unit>("MB");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (value && value !== "0") {
|
if (value && value !== "0") {
|
||||||
@@ -79,7 +68,7 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
|
|||||||
setSelectedUnit(unit);
|
setSelectedUnit(unit);
|
||||||
} else {
|
} else {
|
||||||
setDisplayValue("");
|
setDisplayValue("");
|
||||||
setSelectedUnit("GB");
|
setSelectedUnit("MB");
|
||||||
}
|
}
|
||||||
}, [value]);
|
}, [value]);
|
||||||
|
|
||||||
@@ -100,6 +89,10 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
|
|||||||
};
|
};
|
||||||
|
|
||||||
const handleUnitChange = (newUnit: Unit) => {
|
const handleUnitChange = (newUnit: Unit) => {
|
||||||
|
if (!newUnit || !["MB", "GB", "TB", "PB"].includes(newUnit)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setSelectedUnit(newUnit);
|
setSelectedUnit(newUnit);
|
||||||
|
|
||||||
if (displayValue && displayValue !== "0") {
|
if (displayValue && displayValue !== "0") {
|
||||||
@@ -114,21 +107,27 @@ export function FileSizeInput({ value, onChange, disabled = false, error }: File
|
|||||||
type="text"
|
type="text"
|
||||||
value={displayValue}
|
value={displayValue}
|
||||||
onChange={(e) => handleValueChange(e.target.value)}
|
onChange={(e) => handleValueChange(e.target.value)}
|
||||||
placeholder="0"
|
placeholder={placeholder}
|
||||||
className="flex-1"
|
className="flex-1"
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
aria-invalid={!!error}
|
aria-invalid={!!error}
|
||||||
/>
|
/>
|
||||||
<select
|
<Select
|
||||||
|
key={`${selectedUnit}-${displayValue}`}
|
||||||
value={selectedUnit}
|
value={selectedUnit}
|
||||||
onChange={(e) => handleUnitChange(e.target.value as Unit)}
|
onValueChange={handleUnitChange}
|
||||||
className="w-20 rounded-md border border-input bg-transparent px-3 py-2 text-sm"
|
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
>
|
>
|
||||||
<option value="MB">MB</option>
|
<SelectTrigger className="w-20">
|
||||||
<option value="GB">GB</option>
|
<SelectValue />
|
||||||
<option value="TB">TB</option>
|
</SelectTrigger>
|
||||||
</select>
|
<SelectContent>
|
||||||
|
<SelectItem value="MB">MB</SelectItem>
|
||||||
|
<SelectItem value="GB">GB</SelectItem>
|
||||||
|
<SelectItem value="TB">TB</SelectItem>
|
||||||
|
<SelectItem value="PB">PB</SelectItem>
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,7 @@ import { Label } from "@/components/ui/label";
|
|||||||
import { Switch } from "@/components/ui/switch";
|
import { Switch } from "@/components/ui/switch";
|
||||||
import { Textarea } from "@/components/ui/textarea";
|
import { Textarea } from "@/components/ui/textarea";
|
||||||
import { Config } from "../types";
|
import { Config } from "../types";
|
||||||
|
import { FileSizeInput } from "./file-size-input";
|
||||||
|
|
||||||
const HIDDEN_FIELDS = ["serverUrl", "firstUserAccess"];
|
const HIDDEN_FIELDS = ["serverUrl", "firstUserAccess"];
|
||||||
|
|
||||||
@@ -64,6 +65,19 @@ export function SettingsInput({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use FileSizeInput for storage size fields
|
||||||
|
if (config.key === "maxFileSize" || config.key === "maxTotalStoragePerUser") {
|
||||||
|
const currentValue = watch(`configs.${config.key}`) || "0";
|
||||||
|
return (
|
||||||
|
<FileSizeInput
|
||||||
|
value={currentValue}
|
||||||
|
onChange={(value) => setValue(`configs.${config.key}`, value)}
|
||||||
|
disabled={isDisabled}
|
||||||
|
placeholder="0"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (config.type === "number" || config.type === "bigint") {
|
if (config.type === "number" || config.type === "bigint") {
|
||||||
return (
|
return (
|
||||||
<Input
|
<Input
|
||||||
|
@@ -28,7 +28,7 @@ echo "📂 Data directory: /app/server"
|
|||||||
echo "💾 Database: $DATABASE_URL"
|
echo "💾 Database: $DATABASE_URL"
|
||||||
|
|
||||||
echo "📁 Creating data directories..."
|
echo "📁 Creating data directories..."
|
||||||
mkdir -p /app/server/prisma /app/server/uploads /app/server/temp-chunks /app/server/uploads/logo
|
mkdir -p /app/server/prisma /app/server/uploads /app/server/temp-uploads
|
||||||
|
|
||||||
if [ "$(id -u)" = "0" ]; then
|
if [ "$(id -u)" = "0" ]; then
|
||||||
echo "🔐 Ensuring proper ownership before database operations..."
|
echo "🔐 Ensuring proper ownership before database operations..."
|
||||||
|
@@ -7,7 +7,7 @@ pidfile=/var/run/supervisord.pid
|
|||||||
loglevel=info
|
loglevel=info
|
||||||
|
|
||||||
[program:server]
|
[program:server]
|
||||||
command=/bin/sh -c "export DATABASE_URL='file:/app/server/prisma/palmr.db' && export UPLOAD_PATH='/app/server/uploads' && export TEMP_CHUNKS_PATH='/app/server/temp-chunks' && /app/server-start.sh"
|
command=/bin/sh -c "export DATABASE_URL='file:/app/server/prisma/palmr.db' && /app/server-start.sh"
|
||||||
directory=/app/palmr-app
|
directory=/app/palmr-app
|
||||||
user=root
|
user=root
|
||||||
autostart=true
|
autostart=true
|
||||||
|
Reference in New Issue
Block a user