Merge branch 'feat/chunked-uploads' into next

This commit is contained in:
Daniel Luiz Alves
2025-07-10 18:03:29 -03:00
45 changed files with 1336 additions and 243 deletions

View File

@@ -0,0 +1,349 @@
import * as fs from "fs";
import * as path from "path";
import { getTempFilePath } from "../../config/directories.config";
import { FilesystemStorageProvider } from "../../providers/filesystem-storage.provider";
export interface ChunkMetadata {
fileId: string;
chunkIndex: number;
totalChunks: number;
chunkSize: number;
totalSize: number;
fileName: string;
isLastChunk: boolean;
}
export interface ChunkInfo {
fileId: string;
fileName: string;
totalSize: number;
totalChunks: number;
uploadedChunks: Set<number>;
tempPath: string;
createdAt: number;
}
export class ChunkManager {
private static instance: ChunkManager;
private activeUploads = new Map<string, ChunkInfo>();
private finalizingUploads = new Set<string>(); // Track uploads currently being finalized
private cleanupInterval: NodeJS.Timeout;
private constructor() {
// Cleanup expired uploads every 30 minutes
this.cleanupInterval = setInterval(
() => {
this.cleanupExpiredUploads();
},
30 * 60 * 1000
);
}
public static getInstance(): ChunkManager {
if (!ChunkManager.instance) {
ChunkManager.instance = new ChunkManager();
}
return ChunkManager.instance;
}
/**
* Process a chunk upload with streaming
*/
async processChunk(
metadata: ChunkMetadata,
inputStream: NodeJS.ReadableStream,
originalObjectName: string
): Promise<{ isComplete: boolean; finalPath?: string }> {
const startTime = Date.now();
const { fileId, chunkIndex, totalChunks, fileName, totalSize, isLastChunk } = metadata;
console.log(`Processing chunk ${chunkIndex + 1}/${totalChunks} for file ${fileName} (${fileId})`);
let chunkInfo = this.activeUploads.get(fileId);
if (!chunkInfo) {
if (chunkIndex !== 0) {
throw new Error("First chunk must be chunk 0");
}
const tempPath = getTempFilePath(fileId);
chunkInfo = {
fileId,
fileName,
totalSize,
totalChunks,
uploadedChunks: new Set(),
tempPath,
createdAt: Date.now(),
};
this.activeUploads.set(fileId, chunkInfo);
console.log(`Created new upload session for ${fileName} at ${tempPath}`);
}
console.log(
`Validating chunk ${chunkIndex} (total: ${totalChunks}, uploaded: ${Array.from(chunkInfo.uploadedChunks).join(",")})`
);
if (chunkIndex < 0 || chunkIndex >= totalChunks) {
throw new Error(`Invalid chunk index: ${chunkIndex} (must be 0-${totalChunks - 1})`);
}
if (chunkInfo.uploadedChunks.has(chunkIndex)) {
console.log(`Chunk ${chunkIndex} already uploaded, treating as success`);
if (isLastChunk && chunkInfo.uploadedChunks.size === totalChunks) {
// Check if already finalizing to prevent race condition
if (this.finalizingUploads.has(fileId)) {
console.log(`Upload ${fileId} is already being finalized, waiting...`);
return { isComplete: false };
}
console.log(`All chunks uploaded, finalizing ${fileName}`);
return await this.finalizeUpload(chunkInfo, metadata, originalObjectName);
}
return { isComplete: false };
}
const tempDir = path.dirname(chunkInfo.tempPath);
await fs.promises.mkdir(tempDir, { recursive: true });
console.log(`Temp directory ensured: ${tempDir}`);
await this.writeChunkToFile(chunkInfo.tempPath, inputStream, chunkIndex === 0);
chunkInfo.uploadedChunks.add(chunkIndex);
try {
const stats = await fs.promises.stat(chunkInfo.tempPath);
const processingTime = Date.now() - startTime;
console.log(
`Chunk ${chunkIndex + 1}/${totalChunks} uploaded successfully in ${processingTime}ms. Temp file size: ${stats.size} bytes`
);
} catch (error) {
console.warn(`Could not get temp file stats:`, error);
}
console.log(
`Checking completion: isLastChunk=${isLastChunk}, uploadedChunks.size=${chunkInfo.uploadedChunks.size}, totalChunks=${totalChunks}`
);
if (isLastChunk && chunkInfo.uploadedChunks.size === totalChunks) {
// Check if already finalizing to prevent race condition
if (this.finalizingUploads.has(fileId)) {
console.log(`Upload ${fileId} is already being finalized, waiting...`);
return { isComplete: false };
}
console.log(`All chunks uploaded, finalizing ${fileName}`);
const uploadedChunksArray = Array.from(chunkInfo.uploadedChunks).sort((a, b) => a - b);
console.log(`Uploaded chunks in order: ${uploadedChunksArray.join(", ")}`);
const expectedChunks = Array.from({ length: totalChunks }, (_, i) => i);
const missingChunks = expectedChunks.filter((chunk) => !chunkInfo.uploadedChunks.has(chunk));
if (missingChunks.length > 0) {
throw new Error(`Missing chunks: ${missingChunks.join(", ")}`);
}
return await this.finalizeUpload(chunkInfo, metadata, originalObjectName);
} else {
console.log(
`Not ready for finalization: isLastChunk=${isLastChunk}, uploadedChunks.size=${chunkInfo.uploadedChunks.size}, totalChunks=${totalChunks}`
);
}
return { isComplete: false };
}
/**
* Write chunk to file using streaming
*/
private async writeChunkToFile(
filePath: string,
inputStream: NodeJS.ReadableStream,
isFirstChunk: boolean
): Promise<void> {
return new Promise((resolve, reject) => {
console.log(`Writing chunk to ${filePath} (first: ${isFirstChunk})`);
if (isFirstChunk) {
const writeStream = fs.createWriteStream(filePath, {
highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance
});
writeStream.on("error", (error) => {
console.error("Write stream error:", error);
reject(error);
});
writeStream.on("finish", () => {
console.log("Write stream finished successfully");
resolve();
});
inputStream.pipe(writeStream);
} else {
const writeStream = fs.createWriteStream(filePath, {
flags: "a",
highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance
});
writeStream.on("error", (error) => {
console.error("Write stream error:", error);
reject(error);
});
writeStream.on("finish", () => {
console.log("Write stream finished successfully");
resolve();
});
inputStream.pipe(writeStream);
}
});
}
/**
* Finalize upload by moving temp file to final location and encrypting
*/
private async finalizeUpload(
chunkInfo: ChunkInfo,
metadata: ChunkMetadata,
originalObjectName: string
): Promise<{ isComplete: boolean; finalPath: string }> {
// Mark as finalizing to prevent race conditions
this.finalizingUploads.add(chunkInfo.fileId);
try {
console.log(`Finalizing upload for ${chunkInfo.fileName}`);
const tempStats = await fs.promises.stat(chunkInfo.tempPath);
console.log(`Temp file size: ${tempStats.size} bytes, expected: ${chunkInfo.totalSize} bytes`);
if (tempStats.size !== chunkInfo.totalSize) {
console.warn(`Size mismatch! Temp: ${tempStats.size}, Expected: ${chunkInfo.totalSize}`);
}
const provider = FilesystemStorageProvider.getInstance();
const finalObjectName = originalObjectName;
const filePath = provider.getFilePath(finalObjectName);
const dir = path.dirname(filePath);
console.log(`Starting encryption and finalization: ${finalObjectName}`);
await fs.promises.mkdir(dir, { recursive: true });
const tempReadStream = fs.createReadStream(chunkInfo.tempPath, {
highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance
});
const writeStream = fs.createWriteStream(filePath, {
highWaterMark: 64 * 1024 * 1024,
});
const encryptStream = provider.createEncryptStream();
// Wait for encryption to complete BEFORE cleaning up temp file
await new Promise<void>((resolve, reject) => {
const startTime = Date.now();
tempReadStream
.pipe(encryptStream)
.pipe(writeStream)
.on("finish", () => {
const duration = Date.now() - startTime;
console.log(`File encrypted and saved to: ${filePath} in ${duration}ms`);
resolve();
})
.on("error", (error) => {
console.error("Error during encryption:", error);
reject(error);
});
});
console.log(`File successfully uploaded and encrypted: ${finalObjectName}`);
// Clean up temp file AFTER encryption is complete
await this.cleanupTempFile(chunkInfo.tempPath);
this.activeUploads.delete(chunkInfo.fileId);
this.finalizingUploads.delete(chunkInfo.fileId);
return { isComplete: true, finalPath: finalObjectName };
} catch (error) {
console.error("Error during finalization:", error);
await this.cleanupTempFile(chunkInfo.tempPath);
this.activeUploads.delete(chunkInfo.fileId);
this.finalizingUploads.delete(chunkInfo.fileId);
throw error;
}
}
/**
* Cleanup temporary file
*/
private async cleanupTempFile(tempPath: string): Promise<void> {
try {
await fs.promises.access(tempPath);
await fs.promises.unlink(tempPath);
console.log(`Temp file cleaned up: ${tempPath}`);
} catch (error: any) {
if (error.code === "ENOENT") {
console.log(`Temp file already cleaned up: ${tempPath}`);
} else {
console.warn(`Failed to cleanup temp file ${tempPath}:`, error);
}
}
}
/**
* Cleanup expired uploads (older than 2 hours)
*/
private async cleanupExpiredUploads(): Promise<void> {
const now = Date.now();
const maxAge = 2 * 60 * 60 * 1000; // 2 hours
for (const [fileId, chunkInfo] of this.activeUploads.entries()) {
if (now - chunkInfo.createdAt > maxAge) {
console.log(`Cleaning up expired upload: ${fileId}`);
await this.cleanupTempFile(chunkInfo.tempPath);
this.activeUploads.delete(fileId);
this.finalizingUploads.delete(fileId);
}
}
}
/**
* Get upload progress
*/
getUploadProgress(fileId: string): { uploaded: number; total: number; percentage: number } | null {
const chunkInfo = this.activeUploads.get(fileId);
if (!chunkInfo) return null;
return {
uploaded: chunkInfo.uploadedChunks.size,
total: chunkInfo.totalChunks,
percentage: Math.round((chunkInfo.uploadedChunks.size / chunkInfo.totalChunks) * 100),
};
}
/**
* Cancel upload
*/
async cancelUpload(fileId: string): Promise<void> {
const chunkInfo = this.activeUploads.get(fileId);
if (chunkInfo) {
await this.cleanupTempFile(chunkInfo.tempPath);
this.activeUploads.delete(fileId);
this.finalizingUploads.delete(fileId);
}
}
/**
* Cleanup on shutdown
*/
destroy(): void {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval);
}
for (const [fileId, chunkInfo] of this.activeUploads.entries()) {
this.cleanupTempFile(chunkInfo.tempPath);
}
this.activeUploads.clear();
this.finalizingUploads.clear();
}
}

View File

@@ -1,13 +1,12 @@
import * as fs from "fs";
import * as path from "path";
import { pipeline } from "stream/promises";
import { FastifyReply, FastifyRequest } from "fastify";
import { FilesystemStorageProvider } from "../../providers/filesystem-storage.provider";
import { FileService } from "../file/service";
import { ChunkManager, ChunkMetadata } from "./chunk-manager";
export class FilesystemController {
private fileService = new FileService();
private chunkManager = ChunkManager.getInstance();
/**
* Safely encode filename for Content-Disposition header
@@ -65,22 +64,121 @@ export class FilesystemController {
return reply.status(400).send({ error: "Invalid or expired upload token" });
}
// Use streaming for all files to avoid loading into RAM
await this.uploadFileStream(request, provider, tokenData.objectName);
const chunkMetadata = this.extractChunkMetadata(request);
provider.consumeUploadToken(token);
reply.status(200).send({ message: "File uploaded successfully" });
if (chunkMetadata) {
try {
const result = await this.handleChunkedUpload(request, chunkMetadata, tokenData.objectName);
if (result.isComplete) {
provider.consumeUploadToken(token);
reply.status(200).send({
message: "File uploaded successfully",
objectName: result.finalPath,
finalObjectName: result.finalPath,
});
} else {
reply.status(200).send({
message: "Chunk uploaded successfully",
progress: this.chunkManager.getUploadProgress(chunkMetadata.fileId),
});
}
} catch (chunkError: any) {
return reply.status(400).send({
error: chunkError.message || "Chunked upload failed",
details: chunkError.toString(),
});
}
} else {
await this.uploadFileStream(request, provider, tokenData.objectName);
provider.consumeUploadToken(token);
reply.status(200).send({ message: "File uploaded successfully" });
}
} catch (error) {
console.error("Error in filesystem upload:", error);
return reply.status(500).send({ error: "Internal server error" });
}
}
private async uploadFileStream(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) {
// Use the provider's streaming upload method directly
await provider.uploadFileFromStream(objectName, request.raw);
}
/**
* Extract chunk metadata from request headers
*/
private extractChunkMetadata(request: FastifyRequest): ChunkMetadata | null {
const fileId = request.headers["x-file-id"] as string;
const chunkIndex = request.headers["x-chunk-index"] as string;
const totalChunks = request.headers["x-total-chunks"] as string;
const chunkSize = request.headers["x-chunk-size"] as string;
const totalSize = request.headers["x-total-size"] as string;
const fileName = request.headers["x-file-name"] as string;
const isLastChunk = request.headers["x-is-last-chunk"] as string;
if (!fileId || !chunkIndex || !totalChunks || !chunkSize || !totalSize || !fileName) {
return null;
}
const metadata = {
fileId,
chunkIndex: parseInt(chunkIndex, 10),
totalChunks: parseInt(totalChunks, 10),
chunkSize: parseInt(chunkSize, 10),
totalSize: parseInt(totalSize, 10),
fileName,
isLastChunk: isLastChunk === "true",
};
return metadata;
}
/**
* Handle chunked upload with streaming
*/
private async handleChunkedUpload(request: FastifyRequest, metadata: ChunkMetadata, originalObjectName: string) {
const stream = request.raw;
stream.on("error", (error) => {
console.error("Request stream error:", error);
});
return await this.chunkManager.processChunk(metadata, stream, originalObjectName);
}
/**
* Get upload progress for chunked uploads
*/
async getUploadProgress(request: FastifyRequest, reply: FastifyReply) {
try {
const { fileId } = request.params as { fileId: string };
const progress = this.chunkManager.getUploadProgress(fileId);
if (!progress) {
return reply.status(404).send({ error: "Upload not found" });
}
reply.status(200).send(progress);
} catch (error) {
return reply.status(500).send({ error: "Internal server error" });
}
}
/**
* Cancel chunked upload
*/
async cancelUpload(request: FastifyRequest, reply: FastifyReply) {
try {
const { fileId } = request.params as { fileId: string };
await this.chunkManager.cancelUpload(fileId);
reply.status(200).send({ message: "Upload cancelled successfully" });
} catch (error) {
return reply.status(500).send({ error: "Internal server error" });
}
}
async download(request: FastifyRequest, reply: FastifyReply) {
try {
const { token } = request.params as { token: string };
@@ -135,7 +233,6 @@ export class FilesystemController {
provider.consumeDownloadToken(token);
} catch (error) {
console.error("Error in filesystem download:", error);
return reply.status(500).send({ error: "Internal server error" });
}
}
@@ -147,7 +244,6 @@ export class FilesystemController {
try {
await pipeline(readStream, decryptStream, reply.raw);
} catch (error) {
console.error("Error streaming large file:", error);
throw error;
}
}

View File

@@ -67,4 +67,57 @@ export async function filesystemRoutes(app: FastifyInstance) {
},
filesystemController.download.bind(filesystemController)
);
app.get(
"/filesystem/upload-progress/:fileId",
{
schema: {
tags: ["Filesystem"],
operationId: "getUploadProgress",
summary: "Get chunked upload progress",
description: "Get the progress of a chunked upload",
params: z.object({
fileId: z.string().describe("File ID"),
}),
response: {
200: z.object({
uploaded: z.number(),
total: z.number(),
percentage: z.number(),
}),
404: z.object({
error: z.string(),
}),
500: z.object({
error: z.string(),
}),
},
},
},
filesystemController.getUploadProgress.bind(filesystemController)
);
app.delete(
"/filesystem/cancel-upload/:fileId",
{
schema: {
tags: ["Filesystem"],
operationId: "cancelUpload",
summary: "Cancel chunked upload",
description: "Cancel an ongoing chunked upload",
params: z.object({
fileId: z.string().describe("File ID"),
}),
response: {
200: z.object({
message: z.string(),
}),
500: z.object({
error: z.string(),
}),
},
},
},
filesystemController.cancelUpload.bind(filesystemController)
);
}

View File

@@ -2,7 +2,6 @@ import { FastifyReply, FastifyRequest } from "fastify";
import {
CreateReverseShareSchema,
GetPresignedUrlSchema,
ReverseSharePasswordSchema,
UpdateReverseSharePasswordSchema,
UpdateReverseShareSchema,
@@ -454,6 +453,7 @@ export class ReverseShareController {
async copyFileToUserFiles(request: FastifyRequest, reply: FastifyReply) {
try {
await request.jwtVerify();
const { fileId } = request.params as { fileId: string };
const userId = (request as any).user?.userId;
@@ -461,9 +461,16 @@ export class ReverseShareController {
return reply.status(401).send({ error: "Unauthorized" });
}
console.log(`Copy to my files: User ${userId} copying file ${fileId}`);
const file = await this.reverseShareService.copyReverseShareFileToUserFiles(fileId, userId);
console.log(`Copy to my files: Successfully copied file ${fileId}`);
return reply.send({ file, message: "File copied to your files successfully" });
} catch (error: any) {
console.error(`Copy to my files: Error:`, error.message);
if (error.message === "File not found") {
return reply.status(404).send({ error: "File not found" });
}

View File

@@ -514,7 +514,6 @@ export class ReverseShareService {
}
const maxTotalStorage = BigInt(await configService.getValue("maxTotalStoragePerUser"));
const userFiles = await prisma.file.findMany({
where: { userId: creatorId },
select: { size: true },
@@ -535,40 +534,70 @@ export class ReverseShareService {
const sourcePath = provider.getFilePath(file.objectName);
const fs = await import("fs");
const { pipeline } = await import("stream/promises");
const sourceStream = fs.createReadStream(sourcePath);
const decryptStream = provider.createDecryptStream();
const targetPath = provider.getFilePath(newObjectName);
const { PassThrough } = await import("stream");
const passThrough = new PassThrough();
const path = await import("path");
const targetDir = path.dirname(targetPath);
if (!fs.existsSync(targetDir)) {
fs.mkdirSync(targetDir, { recursive: true });
}
await pipeline(sourceStream, decryptStream, passThrough);
await provider.uploadFileFromStream(newObjectName, passThrough);
const { copyFile } = await import("fs/promises");
await copyFile(sourcePath, targetPath);
} else {
const fileSizeMB = Number(file.size) / (1024 * 1024);
const needsStreaming = fileSizeMB > 100;
const downloadUrl = await this.fileService.getPresignedGetUrl(file.objectName, 300);
const uploadUrl = await this.fileService.getPresignedPutUrl(newObjectName, 300);
const response = await fetch(downloadUrl);
if (!response.ok) {
throw new Error(`Failed to download file: ${response.statusText}`);
}
let retries = 0;
const maxRetries = 3;
let success = false;
if (!response.body) {
throw new Error("No response body received");
}
while (retries < maxRetries && !success) {
try {
const response = await fetch(downloadUrl, {
signal: AbortSignal.timeout(600000), // 10 minutes timeout
});
const uploadResponse = await fetch(uploadUrl, {
method: "PUT",
body: response.body,
headers: {
"Content-Type": "application/octet-stream",
},
});
if (!response.ok) {
throw new Error(`Failed to download file: ${response.statusText}`);
}
if (!uploadResponse.ok) {
throw new Error(`Failed to upload file: ${uploadResponse.statusText}`);
if (!response.body) {
throw new Error("No response body received");
}
const uploadOptions: any = {
method: "PUT",
body: response.body,
headers: {
"Content-Type": "application/octet-stream",
"Content-Length": file.size.toString(),
},
signal: AbortSignal.timeout(600000), // 10 minutes timeout
};
const uploadResponse = await fetch(uploadUrl, uploadOptions);
if (!uploadResponse.ok) {
const errorText = await uploadResponse.text();
throw new Error(`Failed to upload file: ${uploadResponse.statusText} - ${errorText}`);
}
success = true;
} catch (error: any) {
retries++;
if (retries >= maxRetries) {
throw new Error(`Failed to copy file after ${maxRetries} attempts: ${error.message}`);
}
const delay = Math.min(1000 * Math.pow(2, retries - 1), 10000);
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
}

View File

@@ -1,7 +1,7 @@
import { FastifyReply, FastifyRequest } from "fastify";
import { prisma } from "shared/prisma";
import { z } from "zod";
import { prisma } from "../../shared/prisma";
import { ConfigService } from "../config/service";
import { TwoFactorService } from "./service";

View File

@@ -11,6 +11,7 @@ import { appRoutes } from "./modules/app/routes";
import { authProvidersRoutes } from "./modules/auth-providers/routes";
import { authRoutes } from "./modules/auth/routes";
import { fileRoutes } from "./modules/file/routes";
import { ChunkManager } from "./modules/filesystem/chunk-manager";
import { filesystemRoutes } from "./modules/filesystem/routes";
import { healthRoutes } from "./modules/health/routes";
import { reverseShareRoutes } from "./modules/reverse-share/routes";
@@ -105,6 +106,18 @@ async function startServer() {
console.log("\n📚 API Documentation:");
console.log(` - API Reference: http://localhost:3333/docs\n`);
process.on("SIGINT", async () => {
const chunkManager = ChunkManager.getInstance();
chunkManager.destroy();
process.exit(0);
});
process.on("SIGTERM", async () => {
const chunkManager = ChunkManager.getInstance();
chunkManager.destroy();
process.exit(0);
});
}
startServer().catch((err) => {

View File

@@ -1,10 +0,0 @@
# Next.js generated files
.next/types/**/*
.next/build/**/*
# Node modules
node_modules/**/*
# Build outputs
dist/**/*
build/**/*

View File

@@ -770,7 +770,12 @@
"selectAll": "تحديد الكل",
"selectFile": "تحديد الملف {fileName}",
"deleteError": "خطأ في حذف الملف",
"deleteSuccess": "تم حذف الملف بنجاح"
"deleteSuccess": "تم حذف الملف بنجاح",
"copyErrors": {
"timeout": "انتهت مهلة عملية النسخ. يرجى المحاولة مرة أخرى باستخدام ملف أصغر أو التحقق من اتصالك.",
"failed": "فشلت عملية النسخ. يرجى المحاولة مرة أخرى.",
"aborted": "تم إلغاء عملية النسخ بسبب انتهاء المهلة."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "الملفات المختارة:",
"statusUploaded": "تم الرفع",
"statusError": "خطأ"
"statusError": "خطأ",
"retry": "إعادة المحاولة"
},
"form": {
"nameLabel": "الاسم",

View File

@@ -770,7 +770,12 @@
"selectAll": "Alle auswählen",
"selectFile": "Datei {fileName} auswählen",
"deleteError": "Fehler beim Löschen der Datei",
"deleteSuccess": "Datei erfolgreich gelöscht"
"deleteSuccess": "Datei erfolgreich gelöscht",
"copyErrors": {
"timeout": "Zeitüberschreitung beim Kopiervorgang. Bitte versuchen Sie es erneut mit einer kleineren Datei oder überprüfen Sie Ihre Verbindung.",
"failed": "Kopiervorgang fehlgeschlagen. Bitte versuchen Sie es erneut.",
"aborted": "Kopiervorgang wurde wegen Zeitüberschreitung abgebrochen."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Ausgewählte Dateien:",
"statusUploaded": "Hochgeladen",
"statusError": "Fehler"
"statusError": "Fehler",
"retry": "Wiederholen"
},
"form": {
"nameLabel": "Name",

View File

@@ -770,7 +770,12 @@
"delete": "Delete Selected"
},
"selectAll": "Select all",
"selectFile": "Select file {fileName}"
"selectFile": "Select file {fileName}",
"copyErrors": {
"timeout": "Copy operation timed out. Please try again with a smaller file or check your connection.",
"failed": "Copy operation failed. Please try again.",
"aborted": "Copy operation was cancelled due to timeout."
}
}
},
"form": {
@@ -922,7 +927,8 @@
"fileList": {
"title": "Selected files:",
"statusUploaded": "Uploaded",
"statusError": "Error"
"statusError": "Error",
"retry": "Retry"
},
"form": {
"nameLabel": "Name",
@@ -968,8 +974,6 @@
},
"fileActions": {
"edit": "Edit",
"save": "Save",
"cancel": "Cancel",
"preview": "Preview",
"download": "Download",
"delete": "Delete",

View File

@@ -770,7 +770,12 @@
"selectAll": "Seleccionar todo",
"selectFile": "Seleccionar archivo {fileName}",
"deleteError": "Error al eliminar el archivo",
"deleteSuccess": "Archivo eliminado correctamente"
"deleteSuccess": "Archivo eliminado correctamente",
"copyErrors": {
"timeout": "La operación de copia expiró. Por favor, inténtalo de nuevo con un archivo más pequeño o verifica tu conexión.",
"failed": "La operación de copia falló. Por favor, inténtalo de nuevo.",
"aborted": "La operación de copia fue cancelada debido al tiempo de espera."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Archivos seleccionados:",
"statusUploaded": "Enviado",
"statusError": "Error"
"statusError": "Error",
"retry": "Reintentar"
},
"form": {
"nameLabel": "Nombre",

View File

@@ -770,7 +770,12 @@
"selectAll": "Tout sélectionner",
"selectFile": "Sélectionner le fichier {fileName}",
"deleteError": "Erreur lors de la suppression du fichier",
"deleteSuccess": "Fichier supprimé avec succès"
"deleteSuccess": "Fichier supprimé avec succès",
"copyErrors": {
"timeout": "L'opération de copie a expiré. Veuillez réessayer avec un fichier plus petit ou vérifier votre connexion.",
"failed": "L'opération de copie a échoué. Veuillez réessayer.",
"aborted": "L'opération de copie a été annulée en raison d'un délai d'attente dépassé."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Fichiers sélectionnés :",
"statusUploaded": "Envoyé",
"statusError": "Erreur"
"statusError": "Erreur",
"retry": "Réessayer"
},
"form": {
"nameLabel": "Nom",

View File

@@ -770,7 +770,12 @@
"selectAll": "सभी चुनें",
"selectFile": "फ़ाइल {fileName} चुनें",
"deleteError": "फ़ाइल हटाने में त्रुटि",
"deleteSuccess": "फ़ाइल सफलतापूर्वक हटा दी गई"
"deleteSuccess": "फ़ाइल सफलतापूर्वक हटा दी गई",
"copyErrors": {
"timeout": "कॉपी ऑपरेशन का समय समाप्त हो गया। कृपया छोटी फ़ाइल के साथ पुनः प्रयास करें या अपना कनेक्शन जांचें।",
"failed": "कॉपी ऑपरेशन विफल हो गया। कृपया पुनः प्रयास करें।",
"aborted": "टाइमआउट के कारण कॉपी ऑपरेशन रद्द कर दिया गया।"
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "चयनित फ़ाइलें:",
"statusUploaded": "अपलोड की गई",
"statusError": "त्रुटि"
"statusError": "त्रुटि",
"retry": "पुनः प्रयास करें"
},
"form": {
"nameLabel": "नाम",

View File

@@ -770,7 +770,12 @@
"selectAll": "Seleziona tutto",
"selectFile": "Seleziona file {fileName}",
"deleteError": "Errore durante l'eliminazione del file",
"deleteSuccess": "File eliminato con successo"
"deleteSuccess": "File eliminato con successo",
"copyErrors": {
"timeout": "Operazione di copia scaduta. Riprova con un file più piccolo o controlla la tua connessione.",
"failed": "Operazione di copia fallita. Per favore riprova.",
"aborted": "L'operazione di copia è stata annullata per timeout."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "File selezionati:",
"statusUploaded": "Inviato",
"statusError": "Errore"
"statusError": "Errore",
"retry": "Riprova"
},
"form": {
"nameLabel": "Nome",

View File

@@ -770,7 +770,12 @@
"selectAll": "すべて選択",
"selectFile": "ファイル{fileName}を選択",
"deleteError": "ファイルの削除に失敗しました",
"deleteSuccess": "ファイルを正常に削除しました"
"deleteSuccess": "ファイルを正常に削除しました",
"copyErrors": {
"timeout": "コピー操作がタイムアウトしました。より小さいファイルで再試行するか、接続を確認してください。",
"failed": "コピー操作に失敗しました。もう一度お試しください。",
"aborted": "タイムアウトによりコピー操作がキャンセルされました。"
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "選択されたファイル:",
"statusUploaded": "アップロード済み",
"statusError": "エラー"
"statusError": "エラー",
"retry": "再試行"
},
"form": {
"nameLabel": "名前",

View File

@@ -770,7 +770,12 @@
"selectAll": "모두 선택",
"selectFile": "{fileName} 파일 선택",
"deleteError": "파일 삭제 오류",
"deleteSuccess": "파일이 성공적으로 삭제됨"
"deleteSuccess": "파일이 성공적으로 삭제됨",
"copyErrors": {
"timeout": "복사 작업 시간이 초과되었습니다. 더 작은 파일로 다시 시도하거나 연결을 확인하십시오.",
"failed": "복사 작업이 실패했습니다. 다시 시도해 주세요.",
"aborted": "시간 초과로 인해 복사 작업이 취소되었습니다."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "선택된 파일:",
"statusUploaded": "업로드됨",
"statusError": "오류"
"statusError": "오류",
"retry": "다시 시도"
},
"form": {
"nameLabel": "이름",

View File

@@ -770,7 +770,12 @@
"selectAll": "Alles selecteren",
"selectFile": "Selecteer bestand {fileName}",
"deleteError": "Fout bij verwijderen bestand",
"deleteSuccess": "Bestand succesvol verwijderd"
"deleteSuccess": "Bestand succesvol verwijderd",
"copyErrors": {
"timeout": "Kopieeroperatie verlopen. Probeer het opnieuw met een kleiner bestand of controleer uw verbinding.",
"failed": "Kopieeroperatie mislukt. Probeer het opnieuw.",
"aborted": "Kopieeroperatie is geannuleerd vanwege een time-out."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Geselecteerde bestanden:",
"statusUploaded": "Geüpload",
"statusError": "Fout"
"statusError": "Fout",
"retry": "Opnieuw proberen"
},
"form": {
"nameLabel": "Naam",

View File

@@ -770,7 +770,12 @@
"selectAll": "Zaznacz wszystko",
"selectFile": "Wybierz plik {fileName}",
"deleteError": "Błąd usuwania pliku",
"deleteSuccess": "Plik usunięty pomyślnie"
"deleteSuccess": "Plik usunięty pomyślnie",
"copyErrors": {
"timeout": "Operacja kopiowania przekroczyła limit czasu. Spróbuj ponownie z mniejszym plikiem lub sprawdź swoje połączenie.",
"failed": "Operacja kopiowania nie powiodła się. Spróbuj ponownie.",
"aborted": "Operacja kopiowania została anulowana z powodu przekroczenia limitu czasu."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Wybrane pliki:",
"statusUploaded": "Przesłano",
"statusError": "Błąd"
"statusError": "Błąd",
"retry": "Spróbuj Ponownie"
},
"form": {
"nameLabel": "Imię",

View File

@@ -770,7 +770,12 @@
"selectAll": "Selecionar todos",
"selectFile": "Selecionar arquivo {fileName}",
"deleteError": "Erro ao excluir arquivo",
"deleteSuccess": "Arquivo excluído com sucesso"
"deleteSuccess": "Arquivo excluído com sucesso",
"copyErrors": {
"timeout": "A operação de cópia expirou. Por favor, tente novamente com um arquivo menor ou verifique sua conexão.",
"failed": "A operação de cópia falhou. Por favor, tente novamente.",
"aborted": "A operação de cópia foi cancelada devido ao tempo limite."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Arquivos selecionados:",
"statusUploaded": "Enviado",
"statusError": "Erro"
"statusError": "Erro",
"retry": "Tentar Novamente"
},
"form": {
"nameLabel": "Nome",

View File

@@ -770,7 +770,12 @@
"selectAll": "Выбрать все",
"selectFile": "Выбрать файл {fileName}",
"deleteError": "Ошибка при удалении файла",
"deleteSuccess": "Файл успешно удален"
"deleteSuccess": "Файл успешно удален",
"copyErrors": {
"timeout": "Время операции копирования истекло. Пожалуйста, попробуйте еще раз с файлом меньшего размера или проверьте подключение.",
"failed": "Ошибка операции копирования. Пожалуйста, попробуйте еще раз.",
"aborted": "Операция копирования была отменена из-за истечения времени ожидания."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Выбранные файлы:",
"statusUploaded": "Загружено",
"statusError": "Ошибка"
"statusError": "Ошибка",
"retry": "Повторить"
},
"form": {
"nameLabel": "Имя",

View File

@@ -770,7 +770,12 @@
"selectAll": "Tümünü seç",
"selectFile": "{fileName} dosyasını seç",
"deleteError": "Dosya silinirken hata oluştu",
"deleteSuccess": "Dosya başarıyla silindi"
"deleteSuccess": "Dosya başarıyla silindi",
"copyErrors": {
"timeout": "Kopyalama işlemi zaman aşımına uğradı. Lütfen daha küçük bir dosya ile tekrar deneyin veya bağlantınızı kontrol edin.",
"failed": "Kopyalama işlemi başarısız oldu. Lütfen tekrar deneyin.",
"aborted": "Kopyalama işlemi zaman aşımı nedeniyle iptal edildi."
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "Seçilen dosyalar:",
"statusUploaded": "Yüklendi",
"statusError": "Hata"
"statusError": "Hata",
"retry": "Tekrar Dene"
},
"form": {
"nameLabel": "Ad",

View File

@@ -770,7 +770,12 @@
"selectAll": "全选",
"selectFile": "选择文件 {fileName}",
"deleteError": "删除文件时出错",
"deleteSuccess": "文件已成功删除"
"deleteSuccess": "文件已成功删除",
"copyErrors": {
"timeout": "复制操作超时。请尝试使用较小的文件或检查您的连接。",
"failed": "复制操作失败。请重试。",
"aborted": "由于超时,复制操作已取消。"
}
}
},
"form": {
@@ -921,7 +926,8 @@
"fileList": {
"title": "已选择的文件:",
"statusUploaded": "已上传",
"statusError": "错误"
"statusError": "错误",
"retry": "重试"
},
"form": {
"nameLabel": "姓名",

View File

@@ -14,6 +14,7 @@ import { Label } from "@/components/ui/label";
import { Progress } from "@/components/ui/progress";
import { Textarea } from "@/components/ui/textarea";
import { getPresignedUrlForUploadByAlias, registerFileUploadByAlias } from "@/http/endpoints";
import { ChunkedUploader } from "@/utils/chunked-upload";
import { formatFileSize } from "@/utils/format-file-size";
import { FILE_STATUS, UPLOAD_CONFIG, UPLOAD_PROGRESS } from "../constants";
import { FileUploadSectionProps, FileWithProgress } from "../types";
@@ -138,17 +139,34 @@ export function FileUploadSection({ reverseShare, password, alias, onUploadSucce
presignedUrl: string,
onProgress?: (progress: number) => void
): Promise<void> => {
await axios.put(presignedUrl, file, {
headers: {
"Content-Type": file.type,
},
onUploadProgress: (progressEvent) => {
if (onProgress && progressEvent.total) {
const progress = (progressEvent.loaded / progressEvent.total) * 100;
onProgress(Math.round(progress));
}
},
});
const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size);
if (shouldUseChunked) {
const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size);
const result = await ChunkedUploader.uploadFile({
file,
url: presignedUrl,
chunkSize,
onProgress,
});
if (!result.success) {
throw new Error(result.error || "Chunked upload failed");
}
} else {
await axios.put(presignedUrl, file, {
headers: {
"Content-Type": file.type,
},
onUploadProgress: (progressEvent) => {
if (onProgress && progressEvent.total) {
const progress = (progressEvent.loaded / progressEvent.total) * 100;
onProgress(Math.round(progress));
}
},
});
}
};
const registerUploadedFile = async (file: File, objectName: string): Promise<void> => {
@@ -195,7 +213,6 @@ export function FileUploadSection({ reverseShare, password, alias, onUploadSucce
updateFileStatus(index, { status: FILE_STATUS.SUCCESS });
} catch (error: any) {
console.error("Upload error:", error);
const errorMessage = error.response?.data?.error || t("reverseShares.upload.errors.uploadFailed");
updateFileStatus(index, {

View File

@@ -557,16 +557,21 @@ export function ReceivedFilesModal({
} catch (error: any) {
console.error("Error copying file:", error);
if (error.response?.data?.error) {
const errorMessage = error.response.data.error;
if (errorMessage.includes("File size exceeds") || errorMessage.includes("Insufficient storage")) {
toast.error(errorMessage);
} else {
toast.error(t("reverseShares.modals.receivedFiles.copyError"));
let errorMessage = t("reverseShares.modals.receivedFiles.copyError");
if (error.message?.includes("timeout") || error.code === "UND_ERR_SOCKET") {
errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.timeout");
} else if (error.response?.data?.error) {
const serverError = error.response.data.error;
if (serverError.includes("File size exceeds") || serverError.includes("Insufficient storage")) {
errorMessage = serverError;
} else if (serverError.includes("Copy operation failed")) {
errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.failed");
}
} else {
toast.error(t("reverseShares.modals.receivedFiles.copyError"));
} else if (error.name === "AbortError") {
errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.aborted");
}
toast.error(errorMessage);
} finally {
setCopyingFile(null);
}

View File

@@ -103,9 +103,6 @@ export function ReverseSharesModals({
onCopyLink={onCopyLink}
onUpdatePassword={onUpdatePassword}
onToggleActive={onToggleActive}
onSuccess={() => {
console.log("Operation completed successfully - data updated");
}}
/>
<ReceivedFilesModal

View File

@@ -0,0 +1,33 @@
import { NextRequest, NextResponse } from "next/server";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) {
const { fileId } = await params;
const cookieHeader = req.headers.get("cookie");
const url = `${API_BASE_URL}/filesystem/cancel-upload/${fileId}`;
const apiRes = await fetch(url, {
method: "DELETE",
headers: {
cookie: cookieHeader || "",
},
});
const contentType = apiRes.headers.get("Content-Type") || "application/json";
const resBody = await apiRes.text();
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": contentType,
},
});
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
}
return res;
}

View File

@@ -0,0 +1,33 @@
import { NextRequest, NextResponse } from "next/server";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function GET(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) {
const { fileId } = await params;
const cookieHeader = req.headers.get("cookie");
const url = `${API_BASE_URL}/filesystem/upload-progress/${fileId}`;
const apiRes = await fetch(url, {
method: "GET",
headers: {
cookie: cookieHeader || "",
},
});
const contentType = apiRes.headers.get("Content-Type") || "application/json";
const resBody = await apiRes.text();
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": contentType,
},
});
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
}
return res;
}

View File

@@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 30000;
export const maxDuration = 120000; // 2 minutes to handle large files
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
@@ -10,37 +10,60 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ toke
const cookieHeader = req.headers.get("cookie");
const url = `${API_BASE_URL}/filesystem/upload/${token}`;
const apiRes = await fetch(url, {
method: "PUT",
headers: {
cookie: cookieHeader || "",
"Content-Type": req.headers.get("Content-Type") || "application/octet-stream",
"Content-Length": req.headers.get("Content-Length") || "0",
},
body: req.body,
duplex: "half",
} as RequestInit);
const headers: Record<string, string> = {
cookie: cookieHeader || "",
"Content-Type": req.headers.get("Content-Type") || "application/octet-stream",
"Content-Length": req.headers.get("Content-Length") || "0",
};
const contentType = apiRes.headers.get("Content-Type") || "application/json";
let resBody;
if (contentType.includes("application/json")) {
resBody = await apiRes.text();
} else {
resBody = await apiRes.arrayBuffer();
}
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": contentType,
},
req.headers.forEach((value, key) => {
if (key.startsWith("x-") || key.startsWith("X-")) {
headers[key] = value;
}
});
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
}
try {
const apiRes = await fetch(url, {
method: "PUT",
headers,
body: req.body,
duplex: "half",
} as RequestInit);
return res;
const contentType = apiRes.headers.get("Content-Type") || "application/json";
let resBody;
if (contentType.includes("application/json")) {
resBody = await apiRes.text();
} else {
resBody = await apiRes.arrayBuffer();
}
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": contentType,
},
});
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
}
return res;
} catch (error) {
return new NextResponse(
JSON.stringify({
error: "Proxy request failed",
details: error instanceof Error ? error.message : "Unknown error",
}),
{
status: 500,
headers: {
"Content-Type": "application/json",
},
}
);
}
}

View File

@@ -1,5 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
export const maxDuration = 600000; // 10 minutes timeout for large file copies
export const dynamic = "force-dynamic";
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333";
export async function POST(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) {
@@ -7,27 +10,83 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ fil
const cookieHeader = req.headers.get("cookie");
const url = `${API_BASE_URL}/reverse-shares/files/${fileId}/copy`;
const apiRes = await fetch(url, {
method: "POST",
headers: {
cookie: cookieHeader || "",
},
redirect: "manual",
});
try {
const testResponse = await fetch(`${API_BASE_URL}/health`, {
method: "GET",
signal: AbortSignal.timeout(5000), // 5 seconds timeout
});
const resBody = await apiRes.text();
if (!testResponse.ok) {
throw new Error(`Backend health check failed: ${testResponse.status}`);
}
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": "application/json",
},
});
const controller = new AbortController();
const timeoutId = setTimeout(() => {
controller.abort();
}, 600000); // 10 minutes
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
const apiRes = await fetch(url, {
method: "POST",
headers: {
cookie: cookieHeader || "",
},
redirect: "manual",
signal: controller.signal,
});
clearTimeout(timeoutId);
const resBody = await apiRes.text();
const res = new NextResponse(resBody, {
status: apiRes.status,
headers: {
"Content-Type": "application/json",
},
});
const setCookie = apiRes.headers.getSetCookie?.() || [];
if (setCookie.length > 0) {
res.headers.set("Set-Cookie", setCookie.join(","));
}
return res;
} catch (error: any) {
console.error(`Copy to my files proxy error details:`, {
name: error.name,
message: error.message,
code: error.code,
cause: error.cause,
});
if (error.name === "AbortError") {
return new NextResponse(
JSON.stringify({
error: "Copy operation timed out",
details: "The operation took too long to complete",
fileId,
}),
{
status: 408,
headers: {
"Content-Type": "application/json",
},
}
);
}
return new NextResponse(
JSON.stringify({
error: "Copy operation failed",
details: error.message || "Unknown error",
fileId,
}),
{
status: 500,
headers: {
"Content-Type": "application/json",
},
}
);
}
return res;
}

View File

@@ -75,10 +75,8 @@ export function useTrustedDevices() {
(device: TrustedDevice) => {
const userAgent = device.userAgent;
// Extract browser and OS info from user agent
let deviceInfo = t("twoFactor.deviceNames.unknownDevice");
// Verificação de null safety
if (!userAgent) {
return deviceInfo;
}

View File

@@ -29,7 +29,6 @@ export function useAuthProviders() {
} | null>(null);
const [isDeleting, setIsDeleting] = useState(false);
// Load initial state from localStorage
useEffect(() => {
const savedState = localStorage.getItem("hideDisabledProviders");
if (savedState !== null) {
@@ -56,7 +55,6 @@ export function useAuthProviders() {
}
}, [t]);
// Load providers on mount
useEffect(() => {
loadProviders();
}, [loadProviders]);
@@ -226,7 +224,6 @@ export function useAuthProviders() {
setEditingFormData({});
};
// Computed values
const enabledCount = providers.filter((p) => p.enabled).length;
const filteredProviders = hideDisabledProviders ? providers.filter((p) => p.enabled) : providers;

View File

@@ -9,6 +9,7 @@ import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { Progress } from "@/components/ui/progress";
import { checkFile, getPresignedUrl, registerFile } from "@/http/endpoints";
import { ChunkedUploader } from "@/utils/chunked-upload";
import { getFileIcon } from "@/utils/file-icons";
import { generateSafeFileName } from "@/utils/file-utils";
import { formatFileSize } from "@/utils/format-file-size";
@@ -84,7 +85,7 @@ export function GlobalDropZone({ onSuccess, children }: GlobalDropZoneProps) {
try {
await checkFile({
name: fileName,
objectName: "checkFile",
objectName: safeObjectName,
size: file.size,
extension: extension,
});
@@ -94,9 +95,9 @@ export function GlobalDropZone({ onSuccess, children }: GlobalDropZoneProps) {
let errorMessage = t("uploadFile.error");
if (errorData.code === "fileSizeExceeded") {
errorMessage = t(`uploadFile.${errorData.code}`, { maxsizemb: t(`${errorData.details}`) });
errorMessage = t(`uploadFile.${errorData.code}`, { maxsizemb: errorData.details || "0" });
} else if (errorData.code === "insufficientStorage") {
errorMessage = t(`uploadFile.${errorData.code}`, { availablespace: t(`${errorData.details}`) });
errorMessage = t(`uploadFile.${errorData.code}`, { availablespace: errorData.details || "0" });
} else if (errorData.code) {
errorMessage = t(`uploadFile.${errorData.code}`);
}
@@ -123,23 +124,51 @@ export function GlobalDropZone({ onSuccess, children }: GlobalDropZoneProps) {
const abortController = new AbortController();
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, abortController } : u)));
await axios.put(url, file, {
headers: {
"Content-Type": file.type,
},
signal: abortController.signal,
onUploadProgress: (progressEvent: any) => {
const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100;
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u)));
},
});
const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size);
const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size);
await registerFile({
name: fileName,
objectName: objectName,
size: file.size,
extension: extension,
});
if (shouldUseChunked) {
const result = await ChunkedUploader.uploadFile({
file,
url,
chunkSize,
signal: abortController.signal,
onProgress: (progress) => {
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress } : u)));
},
});
if (!result.success) {
throw new Error(result.error || "Chunked upload failed");
}
const finalObjectName = result.finalObjectName || objectName;
await registerFile({
name: fileName,
objectName: finalObjectName,
size: file.size,
extension: extension,
});
} else {
await axios.put(url, file, {
headers: {
"Content-Type": file.type,
},
signal: abortController.signal,
onUploadProgress: (progressEvent: any) => {
const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100;
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u)));
},
});
await registerFile({
name: fileName,
objectName: objectName,
size: file.size,
extension: extension,
});
}
setFileUploads((prev) =>
prev.map((u) =>

View File

@@ -10,6 +10,7 @@ import { Button } from "@/components/ui/button";
import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle } from "@/components/ui/dialog";
import { Progress } from "@/components/ui/progress";
import { checkFile, getPresignedUrl, registerFile } from "@/http/endpoints";
import { ChunkedUploader } from "@/utils/chunked-upload";
import { getFileIcon } from "@/utils/file-icons";
import { generateSafeFileName } from "@/utils/file-utils";
import { formatFileSize } from "@/utils/format-file-size";
@@ -212,7 +213,7 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP
try {
await checkFile({
name: fileName,
objectName: "checkFile",
objectName: safeObjectName,
size: file.size,
extension: extension,
});
@@ -222,9 +223,9 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP
let errorMessage = t("uploadFile.error");
if (errorData.code === "fileSizeExceeded") {
errorMessage = t(`uploadFile.${errorData.code}`, { maxsizemb: t(`${errorData.details}`) });
errorMessage = t(`uploadFile.${errorData.code}`, { maxsizemb: errorData.details || "0" });
} else if (errorData.code === "insufficientStorage") {
errorMessage = t(`uploadFile.${errorData.code}`, { availablespace: t(`${errorData.details}`) });
errorMessage = t(`uploadFile.${errorData.code}`, { availablespace: errorData.details || "0" });
} else if (errorData.code) {
errorMessage = t(`uploadFile.${errorData.code}`);
}
@@ -251,23 +252,54 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP
const abortController = new AbortController();
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, abortController } : u)));
await axios.put(url, file, {
headers: {
"Content-Type": file.type,
},
signal: abortController.signal,
onUploadProgress: (progressEvent) => {
const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100;
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u)));
},
});
const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size);
const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size);
await registerFile({
name: fileName,
objectName: objectName,
size: file.size,
extension: extension,
});
if (shouldUseChunked) {
const result = await ChunkedUploader.uploadFile({
file,
url,
chunkSize,
signal: abortController.signal,
onProgress: (progress) => {
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress } : u)));
},
});
if (!result.success) {
throw new Error(result.error || "Chunked upload failed");
}
const finalObjectName = result.finalObjectName || objectName;
await registerFile({
name: fileName,
objectName: finalObjectName,
size: file.size,
extension: extension,
});
} else {
await axios.put(url, file, {
headers: {
"Content-Type": file.type,
},
signal: abortController.signal,
timeout: 300000, // 5 minutes timeout for direct uploads
maxContentLength: Infinity,
maxBodyLength: Infinity,
onUploadProgress: (progressEvent) => {
const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100;
setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u)));
},
});
await registerFile({
name: fileName,
objectName: objectName,
size: file.size,
extension: extension,
});
}
setFileUploads((prev) =>
prev.map((u) =>
@@ -298,7 +330,6 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP
const startUploads = async () => {
const pendingUploads = fileUploads.filter((u) => u.status === UploadStatus.PENDING);
// Reset the toast flag when starting new uploads
setHasShownSuccessToast(false);
const uploadPromises = pendingUploads.map((upload) => uploadFile(upload));
@@ -346,7 +377,7 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP
setFileUploads([]);
setShowConfirmation(false);
setHasShownSuccessToast(false); // Reset toast flag when closing
setHasShownSuccessToast(false);
onClose();
};

View File

@@ -311,7 +311,6 @@ export function IconPicker({ value, onChange, placeholder }: IconPickerProps) {
iconSets.forEach(({ icons: iconSet, prefix, category }) => {
Object.entries(iconSet).forEach(([name, component]) => {
if (typeof component === "function" && name.startsWith(prefix)) {
// Skip duplicates - keep only the first occurrence
if (seenNames.has(name)) {
return;
}

View File

@@ -1,6 +1,5 @@
import type { AxiosResponse } from "axios";
// Base types that are reused across different operations
export interface FileSizeInfo {
bytes: number;
kb: number;
@@ -15,7 +14,6 @@ export interface DiskSpaceInfo {
uploadAllowed: boolean;
}
// Response types using base types
export interface CheckHealth200 {
status: string;
timestamp: string;
@@ -42,7 +40,6 @@ export interface UploadLogo200 {
logo: string;
}
// Request body and parameter types
export interface CheckUploadAllowedParams {
fileSize: string;
}
@@ -51,7 +48,6 @@ export interface UploadLogoBody {
file?: unknown;
}
// Axios response types
export type GetAppInfoResult = AxiosResponse<GetAppInfo200>;
export type UploadLogoResult = AxiosResponse<UploadLogo200>;
export type RemoveLogoResult = AxiosResponse<RemoveLogo200>;

View File

@@ -5,7 +5,6 @@ import type {
AuthProvider,
CreateProviderResult,
DeleteProviderResult,
// Auth Providers types
GetAllProvidersResult,
GetCurrentUserResult,
GetEnabledProvidersResult,
@@ -62,7 +61,6 @@ export const initiateOIDCLogin = (state?: string, redirectUri?: string): string
return `/api/auth/oidc/authorize${queryString ? `?${queryString}` : ""}`;
};
// Auth Providers endpoints
export const getEnabledProviders = <TData = GetEnabledProvidersResult>(
options?: AxiosRequestConfig
): Promise<TData> => {

View File

@@ -1,6 +1,5 @@
import type { AxiosResponse } from "axios";
// Base types that are reused across different operations
export interface BaseUser {
id: string;
firstName: string;
@@ -19,7 +18,6 @@ export interface User extends BaseUser {
export type LoginUser = BaseUser;
// Common API response patterns
export interface ApiResponse<T> {
success: boolean;
data: T;
@@ -34,7 +32,6 @@ export interface SimpleMessageResponse {
message: string;
}
// Auth response types using base types
export interface GetCurrentUser200 {
user: User;
}
@@ -50,7 +47,6 @@ export interface OidcConfig200 {
scopes?: string[];
}
// Request body types
export interface LoginBody {
emailOrUsername: string;
password: string;
@@ -66,7 +62,6 @@ export interface ResetPasswordBody {
password: string;
}
// Auth Provider types
export interface AuthProvider {
id: string;
name: string;
@@ -117,7 +112,6 @@ export interface UpdateProvidersOrderBody {
}>;
}
// Response types using common patterns
export type AuthProvidersResponse = ApiResponse<AuthProvider[]>;
export type EnabledProvidersResponse = ApiResponse<EnabledAuthProvider[]>;
export type AuthProviderResponse = ApiResponse<AuthProvider>;
@@ -126,7 +120,6 @@ export type Logout200 = SimpleMessageResponse;
export type RequestPasswordReset200 = SimpleMessageResponse;
export type ResetPassword200 = SimpleMessageResponse;
// Axios response types
export type GetEnabledProvidersResult = AxiosResponse<EnabledProvidersResponse>;
export type GetAllProvidersResult = AxiosResponse<AuthProvidersResponse>;
export type CreateProviderResult = AxiosResponse<AuthProviderResponse>;

View File

@@ -1,6 +1,5 @@
import type { AxiosResponse } from "axios";
// Base types that are reused across different operations
export interface ConfigItem {
key: string;
value: string;
@@ -14,7 +13,6 @@ export interface ConfigUpdateItem {
value: string;
}
// Response types using base types
export interface UpdateConfig200 {
config: ConfigItem;
}
@@ -27,14 +25,12 @@ export interface BulkUpdateConfigs200 {
configs: ConfigItem[];
}
// Request body types
export interface UpdateConfigBody {
value: string;
}
export type BulkUpdateConfigsBody = ConfigUpdateItem[];
// Axios response types
export type UpdateConfigResult = AxiosResponse<UpdateConfig200>;
export type GetAllConfigsResult = AxiosResponse<GetAllConfigs200>;
export type BulkUpdateConfigsResult = AxiosResponse<BulkUpdateConfigs200>;

View File

@@ -1,6 +1,5 @@
import type { AxiosResponse } from "axios";
// Base types that are reused across different operations
export interface FileItem {
id: string;
name: string;
@@ -21,7 +20,6 @@ export interface FileOperationRequest {
objectName: string;
}
// Common response patterns
export interface FileOperationResponse {
file: FileItem;
message: string;
@@ -43,12 +41,10 @@ export interface DownloadUrlResponse extends UrlResponse {
expiresIn: number;
}
// Response types using base types
export interface ListFiles200 {
files: FileItem[];
}
// Request body types
export type CheckFileBody = FileOperationRequest;
export type RegisterFileBody = FileOperationRequest;
@@ -57,7 +53,6 @@ export interface UpdateFileBody {
description?: string | null;
}
// Query parameter types
export interface GetPresignedUrlParams {
filename: string;
extension: string;
@@ -70,7 +65,6 @@ export type CheckFile201 = MessageOnlyResponse;
export type GetPresignedUrl200 = PresignedUrlResponse;
export type GetDownloadUrl200 = DownloadUrlResponse;
// Axios response types
export type GetPresignedUrlResult = AxiosResponse<GetPresignedUrl200>;
export type RegisterFileResult = AxiosResponse<RegisterFile201>;
export type CheckFileResult = AxiosResponse<CheckFile201>;

View File

@@ -1,10 +1,8 @@
import type { AxiosResponse } from "axios";
// Base types and enums
export type FieldRequirement = "HIDDEN" | "OPTIONAL" | "REQUIRED";
export type PageLayout = "WETRANSFER" | "DEFAULT";
// Base interfaces that are reused across different operations
export interface ReverseShareFile {
id: string;
name: string;
@@ -63,7 +61,6 @@ export interface ReverseShareForUpload {
emailFieldRequired: string;
}
// Response interfaces using base types
export interface CreateReverseShare201 {
reverseShare: BaseReverseShare;
}
@@ -126,7 +123,6 @@ export interface UpdateReverseShareFile200 {
file: ReverseShareFile;
}
// Request body interfaces
export interface CreateReverseShareBody {
name?: string;
description?: string;
@@ -182,7 +178,6 @@ export interface UpdateReverseShareFileBody {
description?: string | null;
}
// Query parameter interfaces
export interface GetReverseShareForUploadParams {
password?: string;
}
@@ -191,7 +186,6 @@ export interface RegisterFileUploadParams {
password?: string;
}
// Axios response types
export type CreateReverseShareResult = AxiosResponse<CreateReverseShare201>;
export type UpdateReverseShareResult = AxiosResponse<UpdateReverseShare200>;
export type ListUserReverseSharesResult = AxiosResponse<ListUserReverseShares200>;

View File

@@ -32,7 +32,6 @@ export interface ShareSecurity {
hasPassword: boolean;
}
// Full share object used in most responses
export interface Share {
id: string;
name: string | null;
@@ -48,14 +47,12 @@ export interface Share {
alias: ShareAlias;
}
// Simplified share object for specific operations
export interface SimpleShare {
id: string;
name: string;
description: string | null;
}
// Response types using base types
export interface CreateShare201 {
share: Share;
}
@@ -115,7 +112,6 @@ export interface NotifyRecipients200 {
notifiedRecipients: string[];
}
// Request body types
export interface CreateShareBody {
name?: string;
description?: string;
@@ -164,7 +160,6 @@ export interface NotifyRecipientsBody {
shareLink: string;
}
// Query parameter types
export interface GetShareParams {
password?: string;
}
@@ -173,7 +168,6 @@ export interface GetShareByAliasParams {
password?: string;
}
// Axios response types
export type CreateShareResult = AxiosResponse<CreateShare201>;
export type UpdateShareResult = AxiosResponse<UpdateShare200>;
export type ListUserSharesResult = AxiosResponse<ListUserShares200>;

View File

@@ -1,6 +1,5 @@
import type { AxiosResponse } from "axios";
// Base interface that is reused across all user operations
export interface User {
id: string;
firstName: string;
@@ -14,18 +13,15 @@ export interface User {
updatedAt: string;
}
// Common response patterns
export interface UserWithMessageResponse {
user: User;
message: string;
}
// Response interfaces using base User type
export interface ListUsers200 {
users: User[];
}
// Request body interfaces
export interface RegisterUserBody {
firstName: string;
lastName: string;
@@ -64,7 +60,6 @@ export type UpdateUserImage200 = User;
export type UploadAvatar200 = User;
export type RegisterUser201 = UserWithMessageResponse;
// Axios response types
export type RegisterUserResult = AxiosResponse<RegisterUser201>;
export type ListUsersResult = AxiosResponse<User[]>;
export type UpdateUserResult = AxiosResponse<UpdateUser200>;

View File

@@ -0,0 +1,272 @@
import axios from "axios";
export interface ChunkedUploadOptions {
file: File;
url: string;
chunkSize?: number;
onProgress?: (progress: number) => void;
onChunkComplete?: (chunkIndex: number, totalChunks: number) => void;
signal?: AbortSignal;
}
export interface ChunkedUploadResult {
success: boolean;
objectName?: string;
finalObjectName?: string;
error?: string;
}
export class ChunkedUploader {
/**
* Upload a file in chunks with streaming
*/
static async uploadFile(options: ChunkedUploadOptions): Promise<ChunkedUploadResult> {
const { file, url, chunkSize, onProgress, onChunkComplete, signal } = options;
if (!this.shouldUseChunkedUpload(file.size)) {
throw new Error(
`File ${file.name} (${(file.size / (1024 * 1024)).toFixed(2)}MB) should not use chunked upload. Use regular upload instead.`
);
}
const optimalChunkSize = chunkSize || this.calculateOptimalChunkSize(file.size);
try {
const fileId = this.generateFileId();
const totalChunks = Math.ceil(file.size / optimalChunkSize);
const uploadedChunks = new Set<number>();
let completedChunks = 0;
let lastChunkResponse: any = null;
for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
if (signal?.aborted) {
throw new Error("Upload cancelled");
}
const start = chunkIndex * optimalChunkSize;
const end = Math.min(start + optimalChunkSize, file.size);
const chunk = file.slice(start, end);
const isLastChunk = chunkIndex === totalChunks - 1;
let retries = 0;
const maxRetries = 3;
let chunkUploaded = false;
while (retries < maxRetries && !chunkUploaded) {
try {
const response = await this.uploadChunk({
fileId,
chunk,
chunkIndex,
totalChunks,
chunkSize: optimalChunkSize,
totalSize: file.size,
fileName: file.name,
isLastChunk,
url,
signal,
});
if (isLastChunk) {
lastChunkResponse = response;
}
chunkUploaded = true;
} catch (error: any) {
retries++;
if (
error.response?.status === 400 &&
(error.response?.data?.error?.includes("already uploaded") ||
error.response?.data?.details?.includes("already uploaded"))
) {
chunkUploaded = true;
break;
}
console.warn(`Chunk ${chunkIndex + 1} failed (attempt ${retries}/${maxRetries}):`, error.message);
if (retries >= maxRetries) {
throw error;
}
const backoffDelay = error.message?.includes("timeout") ? 2000 * retries : 1000 * retries;
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
}
}
if (!chunkUploaded) {
throw new Error(`Failed to upload chunk ${chunkIndex + 1} after ${maxRetries} attempts`);
}
uploadedChunks.add(chunkIndex);
completedChunks++;
const progress = Math.round((completedChunks / totalChunks) * 100);
onProgress?.(progress);
onChunkComplete?.(chunkIndex, totalChunks);
if (!isLastChunk) {
await new Promise((resolve) => setTimeout(resolve, 100));
}
}
await new Promise((resolve) => setTimeout(resolve, 500));
return {
success: true,
finalObjectName: lastChunkResponse?.finalObjectName || lastChunkResponse?.objectName,
};
} catch (error: any) {
console.error("Chunked upload failed:", error);
return {
success: false,
error: error.message || "Upload failed",
};
}
}
/**
* Upload a single chunk
*/
private static async uploadChunk({
fileId,
chunk,
chunkIndex,
totalChunks,
chunkSize,
totalSize,
fileName,
isLastChunk,
url,
signal,
}: {
fileId: string;
chunk: Blob;
chunkIndex: number;
totalChunks: number;
chunkSize: number;
totalSize: number;
fileName: string;
isLastChunk: boolean;
url: string;
signal?: AbortSignal;
}): Promise<any> {
const headers = {
"Content-Type": "application/octet-stream",
"X-File-Id": fileId,
"X-Chunk-Index": chunkIndex.toString(),
"X-Total-Chunks": totalChunks.toString(),
"X-Chunk-Size": chunkSize.toString(),
"X-Total-Size": totalSize.toString(),
"X-File-Name": fileName,
"X-Is-Last-Chunk": isLastChunk.toString(),
};
try {
const timeoutPer100MB = 120000; // 120 seconds per 100MB
const chunkSizeMB = chunk.size / (1024 * 1024);
const calculatedTimeout = Math.max(60000, Math.ceil(chunkSizeMB / 100) * timeoutPer100MB);
const response = await axios.put(url, chunk, {
headers,
signal,
timeout: calculatedTimeout,
maxContentLength: Infinity,
maxBodyLength: Infinity,
});
if (response.status !== 200) {
throw new Error(`Failed to upload chunk ${chunkIndex}: ${response.statusText}`);
}
return response.data;
} catch (error: any) {
if (
error.response?.status === 400 &&
(error.response?.data?.error?.includes("already uploaded") ||
error.response?.data?.details?.includes("already uploaded"))
) {
return error.response.data;
}
if (error.code === "ECONNABORTED" || error.message?.includes("timeout")) {
console.warn(`Chunk ${chunkIndex + 1} upload timed out, will retry`);
throw new Error(`Upload timeout for chunk ${chunkIndex + 1}`);
}
throw error;
}
}
/**
* Get upload progress
*/
static async getUploadProgress(fileId: string): Promise<{
uploaded: number;
total: number;
percentage: number;
} | null> {
try {
const response = await axios.get(`/api/filesystem/upload-progress/${fileId}`);
return response.data;
} catch (error) {
console.warn("Failed to get upload progress:", error);
return null;
}
}
/**
* Cancel upload
*/
static async cancelUpload(fileId: string): Promise<void> {
try {
await axios.delete(`/api/filesystem/cancel-upload/${fileId}`);
} catch (error) {
console.warn("Failed to cancel upload:", error);
}
}
/**
* Generate unique file ID
*/
private static generateFileId(): string {
return `${Date.now()}-${Math.random().toString(36).substring(2, 15)}`;
}
/**
* Check if file should use chunked upload
*/
static shouldUseChunkedUpload(fileSize: number): boolean {
const threshold = 100 * 1024 * 1024; // 100MB
const shouldUse = fileSize > threshold;
return shouldUse;
}
/**
* Calculate optimal chunk size based on file size
*/
static calculateOptimalChunkSize(fileSize: number): number {
if (fileSize <= 100 * 1024 * 1024) {
throw new Error(
`calculateOptimalChunkSize should not be called for files <= 100MB. File size: ${(fileSize / (1024 * 1024)).toFixed(2)}MB`
);
}
// For files > 1GB, use 150MB chunks
if (fileSize > 1024 * 1024 * 1024) {
return 150 * 1024 * 1024;
}
// For files > 500MB, use 100MB chunks
if (fileSize > 500 * 1024 * 1024) {
return 100 * 1024 * 1024;
}
// For files > 100MB, use 75MB chunks (minimum for chunked upload)
return 75 * 1024 * 1024;
}
}

View File

@@ -1,7 +1,11 @@
{
"compilerOptions": {
"target": "ES2017",
"lib": ["dom", "dom.iterable", "esnext"],
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
@@ -19,9 +23,20 @@
}
],
"paths": {
"@/*": ["./src/*"]
"@/*": [
"./src/*"
]
}
},
"exclude": ["node_modules", ".next/types/app/api/(proxy)/**/*", ".next/types/**/*.ts"],
"include": ["**/*.ts", "**/*.tsx", "next-env.d.ts", ".next/types/**/*.ts"]
}
"exclude": [
"node_modules",
".next/types/app/api/(proxy)/**/*",
".next/types/**/*.ts"
],
"include": [
"**/*.ts",
"**/*.tsx",
"next-env.d.ts",
".next/types/**/*.ts"
]
}