From a3389b8b0df54ea0cfa75f5b0553529dc9dc6179 Mon Sep 17 00:00:00 2001 From: Daniel Luiz Alves Date: Tue, 8 Jul 2025 15:40:25 -0300 Subject: [PATCH] feat: implement chunked file upload and progress tracking - Introduced a new ChunkManager class to handle chunked uploads, including methods for processing chunks, tracking upload progress, and cleaning up temporary files. - Updated the FilesystemController to support chunked uploads and provide endpoints for checking upload progress and canceling uploads. - Added a ChunkedUploader utility to manage chunked uploads on the client side, optimizing file uploads based on size. - Enhanced the API with new routes for upload progress and cancellation, improving user experience during file uploads. - Updated frontend components to utilize chunked upload functionality, ensuring efficient handling of large files. --- .../src/modules/filesystem/chunk-manager.ts | 332 ++++++++++++++++++ .../src/modules/filesystem/controller.ts | 118 ++++++- apps/server/src/modules/filesystem/routes.ts | 53 +++ .../src/modules/reverse-share/controller.ts | 9 +- .../src/modules/reverse-share/service.ts | 79 +++-- apps/server/src/server.ts | 13 + apps/web/.eslintignore | 10 - apps/web/messages/en-US.json | 12 +- .../components/file-upload-section.tsx | 44 ++- .../components/received-files-modal.tsx | 21 +- .../cancel-upload/[fileId]/route.ts | 33 ++ .../upload-progress/[fileId]/route.ts | 33 ++ .../filesystem/upload/[token]/route.ts | 83 +++-- .../files/[fileId]/copy/route.ts | 97 ++++- .../components/general/global-drop-zone.tsx | 64 +++- .../components/modals/upload-file-modal.tsx | 69 +++- apps/web/src/utils/chunked-upload.ts | 328 +++++++++++++++++ apps/web/tsconfig.json | 7 +- 18 files changed, 1250 insertions(+), 155 deletions(-) create mode 100644 apps/server/src/modules/filesystem/chunk-manager.ts delete mode 100644 apps/web/.eslintignore create mode 100644 apps/web/src/app/api/(proxy)/filesystem/cancel-upload/[fileId]/route.ts create mode 100644 apps/web/src/app/api/(proxy)/filesystem/upload-progress/[fileId]/route.ts create mode 100644 apps/web/src/utils/chunked-upload.ts diff --git a/apps/server/src/modules/filesystem/chunk-manager.ts b/apps/server/src/modules/filesystem/chunk-manager.ts new file mode 100644 index 0000000..af7ea98 --- /dev/null +++ b/apps/server/src/modules/filesystem/chunk-manager.ts @@ -0,0 +1,332 @@ +import * as fs from "fs"; +import * as path from "path"; +import { Transform } from "stream"; +import { pipeline } from "stream/promises"; + +import { getTempFilePath } from "../../config/directories.config"; +import { FilesystemStorageProvider } from "../../providers/filesystem-storage.provider"; + +export interface ChunkMetadata { + fileId: string; + chunkIndex: number; + totalChunks: number; + chunkSize: number; + totalSize: number; + fileName: string; + isLastChunk: boolean; +} + +export interface ChunkInfo { + fileId: string; + fileName: string; + totalSize: number; + totalChunks: number; + uploadedChunks: Set; + tempPath: string; + createdAt: number; +} + +export class ChunkManager { + private static instance: ChunkManager; + private activeUploads = new Map(); + private cleanupInterval: NodeJS.Timeout; + + private constructor() { + // Cleanup expired uploads every 30 minutes + this.cleanupInterval = setInterval( + () => { + this.cleanupExpiredUploads(); + }, + 30 * 60 * 1000 + ); + } + + public static getInstance(): ChunkManager { + if (!ChunkManager.instance) { + ChunkManager.instance = new ChunkManager(); + } + return ChunkManager.instance; + } + + /** + * Process a chunk upload with streaming + */ + async processChunk( + metadata: ChunkMetadata, + inputStream: NodeJS.ReadableStream, + originalObjectName: string + ): Promise<{ isComplete: boolean; finalPath?: string }> { + const startTime = Date.now(); + const { fileId, chunkIndex, totalChunks, fileName, totalSize, isLastChunk } = metadata; + + console.log(`Processing chunk ${chunkIndex + 1}/${totalChunks} for file ${fileName} (${fileId})`); + + let chunkInfo = this.activeUploads.get(fileId); + if (!chunkInfo) { + if (chunkIndex !== 0) { + throw new Error("First chunk must be chunk 0"); + } + + const tempPath = getTempFilePath(fileId); + chunkInfo = { + fileId, + fileName, + totalSize, + totalChunks, + uploadedChunks: new Set(), + tempPath, + createdAt: Date.now(), + }; + this.activeUploads.set(fileId, chunkInfo); + console.log(`Created new upload session for ${fileName} at ${tempPath}`); + } + + console.log( + `Validating chunk ${chunkIndex} (total: ${totalChunks}, uploaded: ${Array.from(chunkInfo.uploadedChunks).join(",")})` + ); + + if (chunkIndex < 0 || chunkIndex >= totalChunks) { + throw new Error(`Invalid chunk index: ${chunkIndex} (must be 0-${totalChunks - 1})`); + } + + if (chunkInfo.uploadedChunks.has(chunkIndex)) { + console.log(`Chunk ${chunkIndex} already uploaded, treating as success`); + + if (isLastChunk && chunkInfo.uploadedChunks.size === totalChunks) { + console.log(`All chunks uploaded, finalizing ${fileName}`); + return await this.finalizeUpload(chunkInfo, metadata, originalObjectName); + } + + return { isComplete: false }; + } + + const tempDir = path.dirname(chunkInfo.tempPath); + await fs.promises.mkdir(tempDir, { recursive: true }); + console.log(`Temp directory ensured: ${tempDir}`); + + await this.writeChunkToFile(chunkInfo.tempPath, inputStream, chunkIndex === 0); + + chunkInfo.uploadedChunks.add(chunkIndex); + + try { + const stats = await fs.promises.stat(chunkInfo.tempPath); + const processingTime = Date.now() - startTime; + console.log( + `Chunk ${chunkIndex + 1}/${totalChunks} uploaded successfully in ${processingTime}ms. Temp file size: ${stats.size} bytes` + ); + } catch (error) { + console.warn(`Could not get temp file stats:`, error); + } + + console.log( + `Checking completion: isLastChunk=${isLastChunk}, uploadedChunks.size=${chunkInfo.uploadedChunks.size}, totalChunks=${totalChunks}` + ); + + if (isLastChunk && chunkInfo.uploadedChunks.size === totalChunks) { + console.log(`All chunks uploaded, finalizing ${fileName}`); + + const uploadedChunksArray = Array.from(chunkInfo.uploadedChunks).sort((a, b) => a - b); + console.log(`Uploaded chunks in order: ${uploadedChunksArray.join(", ")}`); + + const expectedChunks = Array.from({ length: totalChunks }, (_, i) => i); + const missingChunks = expectedChunks.filter((chunk) => !chunkInfo.uploadedChunks.has(chunk)); + + if (missingChunks.length > 0) { + throw new Error(`Missing chunks: ${missingChunks.join(", ")}`); + } + + return await this.finalizeUpload(chunkInfo, metadata, originalObjectName); + } else { + console.log( + `Not ready for finalization: isLastChunk=${isLastChunk}, uploadedChunks.size=${chunkInfo.uploadedChunks.size}, totalChunks=${totalChunks}` + ); + } + + return { isComplete: false }; + } + + /** + * Write chunk to file using streaming + */ + private async writeChunkToFile( + filePath: string, + inputStream: NodeJS.ReadableStream, + isFirstChunk: boolean + ): Promise { + return new Promise((resolve, reject) => { + console.log(`Writing chunk to ${filePath} (first: ${isFirstChunk})`); + + if (isFirstChunk) { + const writeStream = fs.createWriteStream(filePath, { + highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance + }); + writeStream.on("error", (error) => { + console.error("Write stream error:", error); + reject(error); + }); + writeStream.on("finish", () => { + console.log("Write stream finished successfully"); + resolve(); + }); + inputStream.pipe(writeStream); + } else { + const writeStream = fs.createWriteStream(filePath, { + flags: "a", + highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance + }); + writeStream.on("error", (error) => { + console.error("Write stream error:", error); + reject(error); + }); + writeStream.on("finish", () => { + console.log("Write stream finished successfully"); + resolve(); + }); + inputStream.pipe(writeStream); + } + }); + } + + /** + * Finalize upload by moving temp file to final location and encrypting + */ + private async finalizeUpload( + chunkInfo: ChunkInfo, + metadata: ChunkMetadata, + originalObjectName: string + ): Promise<{ isComplete: boolean; finalPath: string }> { + try { + console.log(`Finalizing upload for ${chunkInfo.fileName}`); + + const tempStats = await fs.promises.stat(chunkInfo.tempPath); + console.log(`Temp file size: ${tempStats.size} bytes, expected: ${chunkInfo.totalSize} bytes`); + + if (tempStats.size !== chunkInfo.totalSize) { + console.warn(`Size mismatch! Temp: ${tempStats.size}, Expected: ${chunkInfo.totalSize}`); + } + + const provider = FilesystemStorageProvider.getInstance(); + const finalObjectName = originalObjectName; + const filePath = provider.getFilePath(finalObjectName); + const dir = path.dirname(filePath); + + console.log(`Starting encryption and finalization: ${finalObjectName}`); + + await fs.promises.mkdir(dir, { recursive: true }); + + const tempReadStream = fs.createReadStream(chunkInfo.tempPath, { + highWaterMark: 64 * 1024 * 1024, // 64MB buffer for better performance + }); + const writeStream = fs.createWriteStream(filePath, { + highWaterMark: 64 * 1024 * 1024, + }); + const encryptStream = provider.createEncryptStream(); + + const cleanupPromise = this.cleanupTempFile(chunkInfo.tempPath); + + await new Promise((resolve, reject) => { + const startTime = Date.now(); + + tempReadStream + .pipe(encryptStream) + .pipe(writeStream) + .on("finish", () => { + const duration = Date.now() - startTime; + console.log(`File encrypted and saved to: ${filePath} in ${duration}ms`); + resolve(); + }) + .on("error", (error) => { + console.error("Error during encryption:", error); + reject(error); + }); + }); + + console.log(`File successfully uploaded and encrypted: ${finalObjectName}`); + + cleanupPromise.catch((error) => { + console.warn("Background cleanup failed:", error); + }); + + this.activeUploads.delete(chunkInfo.fileId); + + return { isComplete: true, finalPath: finalObjectName }; + } catch (error) { + console.error("Error during finalization:", error); + await this.cleanupTempFile(chunkInfo.tempPath); + this.activeUploads.delete(chunkInfo.fileId); + throw error; + } + } + + /** + * Cleanup temporary file + */ + private async cleanupTempFile(tempPath: string): Promise { + try { + await fs.promises.access(tempPath); + await fs.promises.unlink(tempPath); + console.log(`Temp file cleaned up: ${tempPath}`); + } catch (error: any) { + if (error.code === "ENOENT") { + console.log(`Temp file already cleaned up: ${tempPath}`); + } else { + console.warn(`Failed to cleanup temp file ${tempPath}:`, error); + } + } + } + + /** + * Cleanup expired uploads (older than 2 hours) + */ + private async cleanupExpiredUploads(): Promise { + const now = Date.now(); + const maxAge = 2 * 60 * 60 * 1000; // 2 hours + + for (const [fileId, chunkInfo] of this.activeUploads.entries()) { + if (now - chunkInfo.createdAt > maxAge) { + console.log(`Cleaning up expired upload: ${fileId}`); + await this.cleanupTempFile(chunkInfo.tempPath); + this.activeUploads.delete(fileId); + } + } + } + + /** + * Get upload progress + */ + getUploadProgress(fileId: string): { uploaded: number; total: number; percentage: number } | null { + const chunkInfo = this.activeUploads.get(fileId); + if (!chunkInfo) return null; + + return { + uploaded: chunkInfo.uploadedChunks.size, + total: chunkInfo.totalChunks, + percentage: Math.round((chunkInfo.uploadedChunks.size / chunkInfo.totalChunks) * 100), + }; + } + + /** + * Cancel upload + */ + async cancelUpload(fileId: string): Promise { + const chunkInfo = this.activeUploads.get(fileId); + if (chunkInfo) { + await this.cleanupTempFile(chunkInfo.tempPath); + this.activeUploads.delete(fileId); + } + } + + /** + * Cleanup on shutdown + */ + destroy(): void { + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + } + + for (const [fileId, chunkInfo] of this.activeUploads.entries()) { + this.cleanupTempFile(chunkInfo.tempPath); + } + this.activeUploads.clear(); + } +} diff --git a/apps/server/src/modules/filesystem/controller.ts b/apps/server/src/modules/filesystem/controller.ts index bbb74a7..07be54d 100644 --- a/apps/server/src/modules/filesystem/controller.ts +++ b/apps/server/src/modules/filesystem/controller.ts @@ -1,13 +1,12 @@ import * as fs from "fs"; -import * as path from "path"; import { pipeline } from "stream/promises"; import { FastifyReply, FastifyRequest } from "fastify"; import { FilesystemStorageProvider } from "../../providers/filesystem-storage.provider"; -import { FileService } from "../file/service"; +import { ChunkManager, ChunkMetadata } from "./chunk-manager"; export class FilesystemController { - private fileService = new FileService(); + private chunkManager = ChunkManager.getInstance(); /** * Safely encode filename for Content-Disposition header @@ -65,22 +64,121 @@ export class FilesystemController { return reply.status(400).send({ error: "Invalid or expired upload token" }); } - // Use streaming for all files to avoid loading into RAM - await this.uploadFileStream(request, provider, tokenData.objectName); + const chunkMetadata = this.extractChunkMetadata(request); - provider.consumeUploadToken(token); - reply.status(200).send({ message: "File uploaded successfully" }); + if (chunkMetadata) { + try { + const result = await this.handleChunkedUpload(request, chunkMetadata, tokenData.objectName); + + if (result.isComplete) { + provider.consumeUploadToken(token); + reply.status(200).send({ + message: "File uploaded successfully", + objectName: result.finalPath, + finalObjectName: result.finalPath, + }); + } else { + reply.status(200).send({ + message: "Chunk uploaded successfully", + progress: this.chunkManager.getUploadProgress(chunkMetadata.fileId), + }); + } + } catch (chunkError: any) { + return reply.status(400).send({ + error: chunkError.message || "Chunked upload failed", + details: chunkError.toString(), + }); + } + } else { + await this.uploadFileStream(request, provider, tokenData.objectName); + provider.consumeUploadToken(token); + reply.status(200).send({ message: "File uploaded successfully" }); + } } catch (error) { - console.error("Error in filesystem upload:", error); return reply.status(500).send({ error: "Internal server error" }); } } private async uploadFileStream(request: FastifyRequest, provider: FilesystemStorageProvider, objectName: string) { - // Use the provider's streaming upload method directly await provider.uploadFileFromStream(objectName, request.raw); } + /** + * Extract chunk metadata from request headers + */ + private extractChunkMetadata(request: FastifyRequest): ChunkMetadata | null { + const fileId = request.headers["x-file-id"] as string; + const chunkIndex = request.headers["x-chunk-index"] as string; + const totalChunks = request.headers["x-total-chunks"] as string; + const chunkSize = request.headers["x-chunk-size"] as string; + const totalSize = request.headers["x-total-size"] as string; + const fileName = request.headers["x-file-name"] as string; + const isLastChunk = request.headers["x-is-last-chunk"] as string; + + if (!fileId || !chunkIndex || !totalChunks || !chunkSize || !totalSize || !fileName) { + return null; + } + + const metadata = { + fileId, + chunkIndex: parseInt(chunkIndex, 10), + totalChunks: parseInt(totalChunks, 10), + chunkSize: parseInt(chunkSize, 10), + totalSize: parseInt(totalSize, 10), + fileName, + isLastChunk: isLastChunk === "true", + }; + + return metadata; + } + + /** + * Handle chunked upload with streaming + */ + private async handleChunkedUpload(request: FastifyRequest, metadata: ChunkMetadata, originalObjectName: string) { + const stream = request.raw; + + stream.on("error", (error) => { + console.error("Request stream error:", error); + }); + + return await this.chunkManager.processChunk(metadata, stream, originalObjectName); + } + + /** + * Get upload progress for chunked uploads + */ + async getUploadProgress(request: FastifyRequest, reply: FastifyReply) { + try { + const { fileId } = request.params as { fileId: string }; + + const progress = this.chunkManager.getUploadProgress(fileId); + + if (!progress) { + return reply.status(404).send({ error: "Upload not found" }); + } + + reply.status(200).send(progress); + } catch (error) { + return reply.status(500).send({ error: "Internal server error" }); + } + } + + /** + * Cancel chunked upload + */ + async cancelUpload(request: FastifyRequest, reply: FastifyReply) { + try { + const { fileId } = request.params as { fileId: string }; + + await this.chunkManager.cancelUpload(fileId); + + reply.status(200).send({ message: "Upload cancelled successfully" }); + } catch (error) { + return reply.status(500).send({ error: "Internal server error" }); + } + } + async download(request: FastifyRequest, reply: FastifyReply) { try { const { token } = request.params as { token: string }; @@ -135,7 +233,6 @@ export class FilesystemController { provider.consumeDownloadToken(token); } catch (error) { - console.error("Error in filesystem download:", error); return reply.status(500).send({ error: "Internal server error" }); } } @@ -147,7 +244,6 @@ export class FilesystemController { try { await pipeline(readStream, decryptStream, reply.raw); } catch (error) { - console.error("Error streaming large file:", error); throw error; } } diff --git a/apps/server/src/modules/filesystem/routes.ts b/apps/server/src/modules/filesystem/routes.ts index 76faa42..bf23c96 100644 --- a/apps/server/src/modules/filesystem/routes.ts +++ b/apps/server/src/modules/filesystem/routes.ts @@ -67,4 +67,57 @@ export async function filesystemRoutes(app: FastifyInstance) { }, filesystemController.download.bind(filesystemController) ); + + app.get( + "/filesystem/upload-progress/:fileId", + { + schema: { + tags: ["Filesystem"], + operationId: "getUploadProgress", + summary: "Get chunked upload progress", + description: "Get the progress of a chunked upload", + params: z.object({ + fileId: z.string().describe("File ID"), + }), + response: { + 200: z.object({ + uploaded: z.number(), + total: z.number(), + percentage: z.number(), + }), + 404: z.object({ + error: z.string(), + }), + 500: z.object({ + error: z.string(), + }), + }, + }, + }, + filesystemController.getUploadProgress.bind(filesystemController) + ); + + app.delete( + "/filesystem/cancel-upload/:fileId", + { + schema: { + tags: ["Filesystem"], + operationId: "cancelUpload", + summary: "Cancel chunked upload", + description: "Cancel an ongoing chunked upload", + params: z.object({ + fileId: z.string().describe("File ID"), + }), + response: { + 200: z.object({ + message: z.string(), + }), + 500: z.object({ + error: z.string(), + }), + }, + }, + }, + filesystemController.cancelUpload.bind(filesystemController) + ); } diff --git a/apps/server/src/modules/reverse-share/controller.ts b/apps/server/src/modules/reverse-share/controller.ts index d08d641..35ee7da 100644 --- a/apps/server/src/modules/reverse-share/controller.ts +++ b/apps/server/src/modules/reverse-share/controller.ts @@ -2,7 +2,6 @@ import { FastifyReply, FastifyRequest } from "fastify"; import { CreateReverseShareSchema, - GetPresignedUrlSchema, ReverseSharePasswordSchema, UpdateReverseSharePasswordSchema, UpdateReverseShareSchema, @@ -454,6 +453,7 @@ export class ReverseShareController { async copyFileToUserFiles(request: FastifyRequest, reply: FastifyReply) { try { await request.jwtVerify(); + const { fileId } = request.params as { fileId: string }; const userId = (request as any).user?.userId; @@ -461,9 +461,16 @@ export class ReverseShareController { return reply.status(401).send({ error: "Unauthorized" }); } + console.log(`Copy to my files: User ${userId} copying file ${fileId}`); + const file = await this.reverseShareService.copyReverseShareFileToUserFiles(fileId, userId); + + console.log(`Copy to my files: Successfully copied file ${fileId}`); + return reply.send({ file, message: "File copied to your files successfully" }); } catch (error: any) { + console.error(`Copy to my files: Error:`, error.message); + if (error.message === "File not found") { return reply.status(404).send({ error: "File not found" }); } diff --git a/apps/server/src/modules/reverse-share/service.ts b/apps/server/src/modules/reverse-share/service.ts index 3f26eba..de45469 100644 --- a/apps/server/src/modules/reverse-share/service.ts +++ b/apps/server/src/modules/reverse-share/service.ts @@ -514,7 +514,6 @@ export class ReverseShareService { } const maxTotalStorage = BigInt(await configService.getValue("maxTotalStoragePerUser")); - const userFiles = await prisma.file.findMany({ where: { userId: creatorId }, select: { size: true }, @@ -535,40 +534,70 @@ export class ReverseShareService { const sourcePath = provider.getFilePath(file.objectName); const fs = await import("fs"); - const { pipeline } = await import("stream/promises"); - const sourceStream = fs.createReadStream(sourcePath); - const decryptStream = provider.createDecryptStream(); + const targetPath = provider.getFilePath(newObjectName); - const { PassThrough } = await import("stream"); - const passThrough = new PassThrough(); + const path = await import("path"); + const targetDir = path.dirname(targetPath); + if (!fs.existsSync(targetDir)) { + fs.mkdirSync(targetDir, { recursive: true }); + } - await pipeline(sourceStream, decryptStream, passThrough); - - await provider.uploadFileFromStream(newObjectName, passThrough); + const { copyFile } = await import("fs/promises"); + await copyFile(sourcePath, targetPath); } else { + const fileSizeMB = Number(file.size) / (1024 * 1024); + const needsStreaming = fileSizeMB > 100; + const downloadUrl = await this.fileService.getPresignedGetUrl(file.objectName, 300); const uploadUrl = await this.fileService.getPresignedPutUrl(newObjectName, 300); - const response = await fetch(downloadUrl); - if (!response.ok) { - throw new Error(`Failed to download file: ${response.statusText}`); - } + let retries = 0; + const maxRetries = 3; + let success = false; - if (!response.body) { - throw new Error("No response body received"); - } + while (retries < maxRetries && !success) { + try { + const response = await fetch(downloadUrl, { + signal: AbortSignal.timeout(600000), // 10 minutes timeout + }); - const uploadResponse = await fetch(uploadUrl, { - method: "PUT", - body: response.body, - headers: { - "Content-Type": "application/octet-stream", - }, - }); + if (!response.ok) { + throw new Error(`Failed to download file: ${response.statusText}`); + } - if (!uploadResponse.ok) { - throw new Error(`Failed to upload file: ${uploadResponse.statusText}`); + if (!response.body) { + throw new Error("No response body received"); + } + + const uploadOptions: any = { + method: "PUT", + body: response.body, + headers: { + "Content-Type": "application/octet-stream", + "Content-Length": file.size.toString(), + }, + signal: AbortSignal.timeout(600000), // 10 minutes timeout + }; + + const uploadResponse = await fetch(uploadUrl, uploadOptions); + + if (!uploadResponse.ok) { + const errorText = await uploadResponse.text(); + throw new Error(`Failed to upload file: ${uploadResponse.statusText} - ${errorText}`); + } + + success = true; + } catch (error: any) { + retries++; + + if (retries >= maxRetries) { + throw new Error(`Failed to copy file after ${maxRetries} attempts: ${error.message}`); + } + + const delay = Math.min(1000 * Math.pow(2, retries - 1), 10000); + await new Promise((resolve) => setTimeout(resolve, delay)); + } } } diff --git a/apps/server/src/server.ts b/apps/server/src/server.ts index 2a605f1..6141d1d 100644 --- a/apps/server/src/server.ts +++ b/apps/server/src/server.ts @@ -11,6 +11,7 @@ import { appRoutes } from "./modules/app/routes"; import { authProvidersRoutes } from "./modules/auth-providers/routes"; import { authRoutes } from "./modules/auth/routes"; import { fileRoutes } from "./modules/file/routes"; +import { ChunkManager } from "./modules/filesystem/chunk-manager"; import { filesystemRoutes } from "./modules/filesystem/routes"; import { healthRoutes } from "./modules/health/routes"; import { reverseShareRoutes } from "./modules/reverse-share/routes"; @@ -105,6 +106,18 @@ async function startServer() { console.log("\nšŸ“š API Documentation:"); console.log(` - API Reference: http://localhost:3333/docs\n`); + + process.on("SIGINT", async () => { + const chunkManager = ChunkManager.getInstance(); + chunkManager.destroy(); + process.exit(0); + }); + + process.on("SIGTERM", async () => { + const chunkManager = ChunkManager.getInstance(); + chunkManager.destroy(); + process.exit(0); + }); } startServer().catch((err) => { diff --git a/apps/web/.eslintignore b/apps/web/.eslintignore deleted file mode 100644 index b897ac5..0000000 --- a/apps/web/.eslintignore +++ /dev/null @@ -1,10 +0,0 @@ -# Next.js generated files -.next/types/**/* -.next/build/**/* - -# Node modules -node_modules/**/* - -# Build outputs -dist/**/* -build/**/* \ No newline at end of file diff --git a/apps/web/messages/en-US.json b/apps/web/messages/en-US.json index 36a5bce..679e85e 100644 --- a/apps/web/messages/en-US.json +++ b/apps/web/messages/en-US.json @@ -753,7 +753,12 @@ "delete": "Delete Selected" }, "selectAll": "Select all", - "selectFile": "Select file {fileName}" + "selectFile": "Select file {fileName}", + "copyErrors": { + "timeout": "Copy operation timed out. Please try again with a smaller file or check your connection.", + "failed": "Copy operation failed. Please try again.", + "aborted": "Copy operation was cancelled due to timeout." + } } }, "form": { @@ -905,7 +910,8 @@ "fileList": { "title": "Selected files:", "statusUploaded": "Uploaded", - "statusError": "Error" + "statusError": "Error", + "retry": "Retry" }, "form": { "nameLabel": "Name", @@ -951,8 +957,6 @@ }, "fileActions": { "edit": "Edit", - "save": "Save", - "cancel": "Cancel", "preview": "Preview", "download": "Download", "delete": "Delete", diff --git a/apps/web/src/app/(shares)/r/[alias]/components/file-upload-section.tsx b/apps/web/src/app/(shares)/r/[alias]/components/file-upload-section.tsx index 9f4f7f2..4967f4b 100644 --- a/apps/web/src/app/(shares)/r/[alias]/components/file-upload-section.tsx +++ b/apps/web/src/app/(shares)/r/[alias]/components/file-upload-section.tsx @@ -14,6 +14,7 @@ import { Label } from "@/components/ui/label"; import { Progress } from "@/components/ui/progress"; import { Textarea } from "@/components/ui/textarea"; import { getPresignedUrlForUploadByAlias, registerFileUploadByAlias } from "@/http/endpoints"; +import { ChunkedUploader } from "@/utils/chunked-upload"; import { formatFileSize } from "@/utils/format-file-size"; import { FILE_STATUS, UPLOAD_CONFIG, UPLOAD_PROGRESS } from "../constants"; import { FileUploadSectionProps, FileWithProgress } from "../types"; @@ -138,17 +139,37 @@ export function FileUploadSection({ reverseShare, password, alias, onUploadSucce presignedUrl: string, onProgress?: (progress: number) => void ): Promise => { - await axios.put(presignedUrl, file, { - headers: { - "Content-Type": file.type, - }, - onUploadProgress: (progressEvent) => { - if (onProgress && progressEvent.total) { - const progress = (progressEvent.loaded / progressEvent.total) * 100; - onProgress(Math.round(progress)); - } - }, - }); + const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size); + + if (shouldUseChunked) { + const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size); + + const result = await ChunkedUploader.uploadFile({ + file, + url: presignedUrl, + chunkSize, + onProgress, + onChunkComplete: (chunkIndex, totalChunks) => { + console.log(`Chunk ${chunkIndex + 1}/${totalChunks} completed`); + }, + }); + + if (!result.success) { + throw new Error(result.error || "Chunked upload failed"); + } + } else { + await axios.put(presignedUrl, file, { + headers: { + "Content-Type": file.type, + }, + onUploadProgress: (progressEvent) => { + if (onProgress && progressEvent.total) { + const progress = (progressEvent.loaded / progressEvent.total) * 100; + onProgress(Math.round(progress)); + } + }, + }); + } }; const registerUploadedFile = async (file: File, objectName: string): Promise => { @@ -195,7 +216,6 @@ export function FileUploadSection({ reverseShare, password, alias, onUploadSucce updateFileStatus(index, { status: FILE_STATUS.SUCCESS }); } catch (error: any) { - console.error("Upload error:", error); const errorMessage = error.response?.data?.error || t("reverseShares.upload.errors.uploadFailed"); updateFileStatus(index, { diff --git a/apps/web/src/app/(shares)/reverse-shares/components/received-files-modal.tsx b/apps/web/src/app/(shares)/reverse-shares/components/received-files-modal.tsx index d55b919..5278292 100644 --- a/apps/web/src/app/(shares)/reverse-shares/components/received-files-modal.tsx +++ b/apps/web/src/app/(shares)/reverse-shares/components/received-files-modal.tsx @@ -557,16 +557,21 @@ export function ReceivedFilesModal({ } catch (error: any) { console.error("Error copying file:", error); - if (error.response?.data?.error) { - const errorMessage = error.response.data.error; - if (errorMessage.includes("File size exceeds") || errorMessage.includes("Insufficient storage")) { - toast.error(errorMessage); - } else { - toast.error(t("reverseShares.modals.receivedFiles.copyError")); + let errorMessage = t("reverseShares.modals.receivedFiles.copyError"); + + if (error.message?.includes("timeout") || error.code === "UND_ERR_SOCKET") { + errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.timeout"); + } else if (error.response?.data?.error) { + const serverError = error.response.data.error; + if (serverError.includes("File size exceeds") || serverError.includes("Insufficient storage")) { + errorMessage = serverError; + } else if (serverError.includes("Copy operation failed")) { + errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.failed"); } - } else { - toast.error(t("reverseShares.modals.receivedFiles.copyError")); + } else if (error.name === "AbortError") { + errorMessage = t("reverseShares.modals.receivedFiles.copyErrors.aborted"); } + toast.error(errorMessage); } finally { setCopyingFile(null); } diff --git a/apps/web/src/app/api/(proxy)/filesystem/cancel-upload/[fileId]/route.ts b/apps/web/src/app/api/(proxy)/filesystem/cancel-upload/[fileId]/route.ts new file mode 100644 index 0000000..7b7138f --- /dev/null +++ b/apps/web/src/app/api/(proxy)/filesystem/cancel-upload/[fileId]/route.ts @@ -0,0 +1,33 @@ +import { NextRequest, NextResponse } from "next/server"; + +const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333"; + +export async function DELETE(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) { + const { fileId } = await params; + const cookieHeader = req.headers.get("cookie"); + const url = `${API_BASE_URL}/filesystem/cancel-upload/${fileId}`; + + const apiRes = await fetch(url, { + method: "DELETE", + headers: { + cookie: cookieHeader || "", + }, + }); + + const contentType = apiRes.headers.get("Content-Type") || "application/json"; + const resBody = await apiRes.text(); + + const res = new NextResponse(resBody, { + status: apiRes.status, + headers: { + "Content-Type": contentType, + }, + }); + + const setCookie = apiRes.headers.getSetCookie?.() || []; + if (setCookie.length > 0) { + res.headers.set("Set-Cookie", setCookie.join(",")); + } + + return res; +} diff --git a/apps/web/src/app/api/(proxy)/filesystem/upload-progress/[fileId]/route.ts b/apps/web/src/app/api/(proxy)/filesystem/upload-progress/[fileId]/route.ts new file mode 100644 index 0000000..d228355 --- /dev/null +++ b/apps/web/src/app/api/(proxy)/filesystem/upload-progress/[fileId]/route.ts @@ -0,0 +1,33 @@ +import { NextRequest, NextResponse } from "next/server"; + +const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333"; + +export async function GET(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) { + const { fileId } = await params; + const cookieHeader = req.headers.get("cookie"); + const url = `${API_BASE_URL}/filesystem/upload-progress/${fileId}`; + + const apiRes = await fetch(url, { + method: "GET", + headers: { + cookie: cookieHeader || "", + }, + }); + + const contentType = apiRes.headers.get("Content-Type") || "application/json"; + const resBody = await apiRes.text(); + + const res = new NextResponse(resBody, { + status: apiRes.status, + headers: { + "Content-Type": contentType, + }, + }); + + const setCookie = apiRes.headers.getSetCookie?.() || []; + if (setCookie.length > 0) { + res.headers.set("Set-Cookie", setCookie.join(",")); + } + + return res; +} diff --git a/apps/web/src/app/api/(proxy)/filesystem/upload/[token]/route.ts b/apps/web/src/app/api/(proxy)/filesystem/upload/[token]/route.ts index 7f768ea..a6ea3dd 100644 --- a/apps/web/src/app/api/(proxy)/filesystem/upload/[token]/route.ts +++ b/apps/web/src/app/api/(proxy)/filesystem/upload/[token]/route.ts @@ -1,6 +1,6 @@ import { NextRequest, NextResponse } from "next/server"; -export const maxDuration = 30000; +export const maxDuration = 120000; // 2 minutes to handle large files export const dynamic = "force-dynamic"; const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333"; @@ -10,37 +10,60 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ toke const cookieHeader = req.headers.get("cookie"); const url = `${API_BASE_URL}/filesystem/upload/${token}`; - const apiRes = await fetch(url, { - method: "PUT", - headers: { - cookie: cookieHeader || "", - "Content-Type": req.headers.get("Content-Type") || "application/octet-stream", - "Content-Length": req.headers.get("Content-Length") || "0", - }, - body: req.body, - duplex: "half", - } as RequestInit); + const headers: Record = { + cookie: cookieHeader || "", + "Content-Type": req.headers.get("Content-Type") || "application/octet-stream", + "Content-Length": req.headers.get("Content-Length") || "0", + }; - const contentType = apiRes.headers.get("Content-Type") || "application/json"; - - let resBody; - if (contentType.includes("application/json")) { - resBody = await apiRes.text(); - } else { - resBody = await apiRes.arrayBuffer(); - } - - const res = new NextResponse(resBody, { - status: apiRes.status, - headers: { - "Content-Type": contentType, - }, + req.headers.forEach((value, key) => { + if (key.startsWith("x-") || key.startsWith("X-")) { + headers[key] = value; + } }); - const setCookie = apiRes.headers.getSetCookie?.() || []; - if (setCookie.length > 0) { - res.headers.set("Set-Cookie", setCookie.join(",")); - } + try { + const apiRes = await fetch(url, { + method: "PUT", + headers, + body: req.body, + duplex: "half", + } as RequestInit); - return res; + const contentType = apiRes.headers.get("Content-Type") || "application/json"; + + let resBody; + if (contentType.includes("application/json")) { + resBody = await apiRes.text(); + } else { + resBody = await apiRes.arrayBuffer(); + } + + const res = new NextResponse(resBody, { + status: apiRes.status, + headers: { + "Content-Type": contentType, + }, + }); + + const setCookie = apiRes.headers.getSetCookie?.() || []; + if (setCookie.length > 0) { + res.headers.set("Set-Cookie", setCookie.join(",")); + } + + return res; + } catch (error) { + return new NextResponse( + JSON.stringify({ + error: "Proxy request failed", + details: error instanceof Error ? error.message : "Unknown error", + }), + { + status: 500, + headers: { + "Content-Type": "application/json", + }, + } + ); + } } diff --git a/apps/web/src/app/api/(proxy)/reverse-shares/files/[fileId]/copy/route.ts b/apps/web/src/app/api/(proxy)/reverse-shares/files/[fileId]/copy/route.ts index 42d97a5..c977c52 100644 --- a/apps/web/src/app/api/(proxy)/reverse-shares/files/[fileId]/copy/route.ts +++ b/apps/web/src/app/api/(proxy)/reverse-shares/files/[fileId]/copy/route.ts @@ -1,5 +1,8 @@ import { NextRequest, NextResponse } from "next/server"; +export const maxDuration = 600000; // 10 minutes timeout for large file copies +export const dynamic = "force-dynamic"; + const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:3333"; export async function POST(req: NextRequest, { params }: { params: Promise<{ fileId: string }> }) { @@ -7,27 +10,83 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ fil const cookieHeader = req.headers.get("cookie"); const url = `${API_BASE_URL}/reverse-shares/files/${fileId}/copy`; - const apiRes = await fetch(url, { - method: "POST", - headers: { - cookie: cookieHeader || "", - }, - redirect: "manual", - }); + try { + const testResponse = await fetch(`${API_BASE_URL}/health`, { + method: "GET", + signal: AbortSignal.timeout(5000), // 5 seconds timeout + }); - const resBody = await apiRes.text(); + if (!testResponse.ok) { + throw new Error(`Backend health check failed: ${testResponse.status}`); + } - const res = new NextResponse(resBody, { - status: apiRes.status, - headers: { - "Content-Type": "application/json", - }, - }); + const controller = new AbortController(); + const timeoutId = setTimeout(() => { + controller.abort(); + }, 600000); // 10 minutes - const setCookie = apiRes.headers.getSetCookie?.() || []; - if (setCookie.length > 0) { - res.headers.set("Set-Cookie", setCookie.join(",")); + const apiRes = await fetch(url, { + method: "POST", + headers: { + cookie: cookieHeader || "", + }, + redirect: "manual", + signal: controller.signal, + }); + + clearTimeout(timeoutId); + + const resBody = await apiRes.text(); + + const res = new NextResponse(resBody, { + status: apiRes.status, + headers: { + "Content-Type": "application/json", + }, + }); + + const setCookie = apiRes.headers.getSetCookie?.() || []; + if (setCookie.length > 0) { + res.headers.set("Set-Cookie", setCookie.join(",")); + } + + return res; + } catch (error: any) { + console.error(`Copy to my files proxy error details:`, { + name: error.name, + message: error.message, + code: error.code, + cause: error.cause, + }); + + if (error.name === "AbortError") { + return new NextResponse( + JSON.stringify({ + error: "Copy operation timed out", + details: "The operation took too long to complete", + fileId, + }), + { + status: 408, + headers: { + "Content-Type": "application/json", + }, + } + ); + } + + return new NextResponse( + JSON.stringify({ + error: "Copy operation failed", + details: error.message || "Unknown error", + fileId, + }), + { + status: 500, + headers: { + "Content-Type": "application/json", + }, + } + ); } - - return res; } diff --git a/apps/web/src/components/general/global-drop-zone.tsx b/apps/web/src/components/general/global-drop-zone.tsx index c816883..989bdca 100644 --- a/apps/web/src/components/general/global-drop-zone.tsx +++ b/apps/web/src/components/general/global-drop-zone.tsx @@ -9,6 +9,7 @@ import { toast } from "sonner"; import { Button } from "@/components/ui/button"; import { Progress } from "@/components/ui/progress"; import { checkFile, getPresignedUrl, registerFile } from "@/http/endpoints"; +import { ChunkedUploader } from "@/utils/chunked-upload"; import { getFileIcon } from "@/utils/file-icons"; import { generateSafeFileName } from "@/utils/file-utils"; import { formatFileSize } from "@/utils/format-file-size"; @@ -123,23 +124,54 @@ export function GlobalDropZone({ onSuccess, children }: GlobalDropZoneProps) { const abortController = new AbortController(); setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, abortController } : u))); - await axios.put(url, file, { - headers: { - "Content-Type": file.type, - }, - signal: abortController.signal, - onUploadProgress: (progressEvent: any) => { - const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100; - setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u))); - }, - }); + const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size); + const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size); - await registerFile({ - name: fileName, - objectName: objectName, - size: file.size, - extension: extension, - }); + if (shouldUseChunked) { + const result = await ChunkedUploader.uploadFile({ + file, + url, + chunkSize, + signal: abortController.signal, + onProgress: (progress) => { + setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress } : u))); + }, + onChunkComplete: (chunkIndex, totalChunks) => { + console.log(`Chunk ${chunkIndex + 1}/${totalChunks} completed`); + }, + }); + + if (!result.success) { + throw new Error(result.error || "Chunked upload failed"); + } + + const finalObjectName = result.finalObjectName || objectName; + + await registerFile({ + name: fileName, + objectName: finalObjectName, + size: file.size, + extension: extension, + }); + } else { + await axios.put(url, file, { + headers: { + "Content-Type": file.type, + }, + signal: abortController.signal, + onUploadProgress: (progressEvent: any) => { + const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100; + setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u))); + }, + }); + + await registerFile({ + name: fileName, + objectName: objectName, + size: file.size, + extension: extension, + }); + } setFileUploads((prev) => prev.map((u) => diff --git a/apps/web/src/components/modals/upload-file-modal.tsx b/apps/web/src/components/modals/upload-file-modal.tsx index 150e7e5..27f2a0e 100644 --- a/apps/web/src/components/modals/upload-file-modal.tsx +++ b/apps/web/src/components/modals/upload-file-modal.tsx @@ -10,6 +10,7 @@ import { Button } from "@/components/ui/button"; import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle } from "@/components/ui/dialog"; import { Progress } from "@/components/ui/progress"; import { checkFile, getPresignedUrl, registerFile } from "@/http/endpoints"; +import { ChunkedUploader } from "@/utils/chunked-upload"; import { getFileIcon } from "@/utils/file-icons"; import { generateSafeFileName } from "@/utils/file-utils"; import { formatFileSize } from "@/utils/format-file-size"; @@ -251,23 +252,59 @@ export function UploadFileModal({ isOpen, onClose, onSuccess }: UploadFileModalP const abortController = new AbortController(); setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, abortController } : u))); - await axios.put(url, file, { - headers: { - "Content-Type": file.type, - }, - signal: abortController.signal, - onUploadProgress: (progressEvent) => { - const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100; - setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u))); - }, - }); + const shouldUseChunked = ChunkedUploader.shouldUseChunkedUpload(file.size); + const chunkSize = ChunkedUploader.calculateOptimalChunkSize(file.size); - await registerFile({ - name: fileName, - objectName: objectName, - size: file.size, - extension: extension, - }); + if (shouldUseChunked) { + const result = await ChunkedUploader.uploadFile({ + file, + url, + chunkSize, + signal: abortController.signal, + onProgress: (progress) => { + setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress } : u))); + }, + onChunkComplete: (chunkIndex, totalChunks) => { + console.log(`Chunk ${chunkIndex + 1}/${totalChunks} completed`); + }, + }); + + if (!result.success) { + throw new Error(result.error || "Chunked upload failed"); + } + + const finalObjectName = result.finalObjectName || objectName; + console.log("Chunked upload result:", result); + console.log("Using final object name:", finalObjectName); + + await registerFile({ + name: fileName, + objectName: finalObjectName, + size: file.size, + extension: extension, + }); + } else { + await axios.put(url, file, { + headers: { + "Content-Type": file.type, + }, + signal: abortController.signal, + timeout: 300000, // 5 minutes timeout for direct uploads + maxContentLength: Infinity, + maxBodyLength: Infinity, + onUploadProgress: (progressEvent) => { + const progress = (progressEvent.loaded / (progressEvent.total || file.size)) * 100; + setFileUploads((prev) => prev.map((u) => (u.id === id ? { ...u, progress: Math.round(progress) } : u))); + }, + }); + + await registerFile({ + name: fileName, + objectName: objectName, + size: file.size, + extension: extension, + }); + } setFileUploads((prev) => prev.map((u) => diff --git a/apps/web/src/utils/chunked-upload.ts b/apps/web/src/utils/chunked-upload.ts new file mode 100644 index 0000000..ba62fcf --- /dev/null +++ b/apps/web/src/utils/chunked-upload.ts @@ -0,0 +1,328 @@ +import axios from "axios"; + +export interface ChunkedUploadOptions { + file: File; + url: string; + chunkSize?: number; // Default 1GB + onProgress?: (progress: number) => void; + onChunkComplete?: (chunkIndex: number, totalChunks: number) => void; + signal?: AbortSignal; +} + +export interface ChunkedUploadResult { + success: boolean; + objectName?: string; + finalObjectName?: string; + error?: string; +} + +export class ChunkedUploader { + private static readonly MAX_CONCURRENT_CHUNKS = 3; + + /** + * Upload a file in chunks with streaming + */ + static async uploadFile(options: ChunkedUploadOptions): Promise { + const { file, url, chunkSize, onProgress, onChunkComplete, signal } = options; + + if (!this.shouldUseChunkedUpload(file.size)) { + throw new Error( + `File ${file.name} (${(file.size / (1024 * 1024)).toFixed(2)}MB) should not use chunked upload. Use regular upload instead.` + ); + } + + const optimalChunkSize = chunkSize || this.calculateOptimalChunkSize(file.size); + + try { + const fileId = this.generateFileId(); + + const totalChunks = Math.ceil(file.size / optimalChunkSize); + + console.log(`Chunked upload debug:`, { + fileName: file.name, + fileSize: file.size, + fileSizeMB: (file.size / (1024 * 1024)).toFixed(2) + "MB", + chunkSize: optimalChunkSize, + chunkSizeMB: (optimalChunkSize / (1024 * 1024)).toFixed(2) + "MB", + totalChunks: totalChunks, + expectedChunkSize: (file.size / totalChunks / (1024 * 1024)).toFixed(2) + "MB per chunk", + }); + const uploadedChunks = new Set(); + let completedChunks = 0; + let lastChunkResponse: any = null; + + for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) { + if (signal?.aborted) { + throw new Error("Upload cancelled"); + } + + const start = chunkIndex * optimalChunkSize; + const end = Math.min(start + optimalChunkSize, file.size); + const chunk = file.slice(start, end); + const isLastChunk = chunkIndex === totalChunks - 1; + + let retries = 0; + const maxRetries = 3; + let chunkUploaded = false; + + while (retries < maxRetries && !chunkUploaded) { + try { + const response = await this.uploadChunk({ + fileId, + chunk, + chunkIndex, + totalChunks, + chunkSize: optimalChunkSize, + totalSize: file.size, + fileName: file.name, + isLastChunk, + url, + signal, + }); + + if (isLastChunk) { + lastChunkResponse = response; + console.log("Last chunk response captured:", response); + } + + chunkUploaded = true; + } catch (error: any) { + retries++; + + if ( + error.response?.status === 400 && + (error.response?.data?.error?.includes("already uploaded") || + error.response?.data?.details?.includes("already uploaded")) + ) { + console.log(`Chunk ${chunkIndex + 1} was already uploaded successfully`); + chunkUploaded = true; + break; + } + + console.warn(`Chunk ${chunkIndex + 1} failed (attempt ${retries}/${maxRetries}):`, error.message); + + if (retries >= maxRetries) { + throw error; + } + + const backoffDelay = error.message?.includes("timeout") ? 2000 * retries : 1000 * retries; + console.log(`Waiting ${backoffDelay}ms before retry ${retries + 1}/${maxRetries}`); + await new Promise((resolve) => setTimeout(resolve, backoffDelay)); + } + } + + if (!chunkUploaded) { + throw new Error(`Failed to upload chunk ${chunkIndex + 1} after ${maxRetries} attempts`); + } + + uploadedChunks.add(chunkIndex); + completedChunks++; + + const progress = Math.round((completedChunks / totalChunks) * 100); + onProgress?.(progress); + onChunkComplete?.(chunkIndex, totalChunks); + + if (!isLastChunk) { + await new Promise((resolve) => setTimeout(resolve, 100)); + } + } + + await new Promise((resolve) => setTimeout(resolve, 500)); + + console.log("Chunked upload completed. Last chunk response:", lastChunkResponse); + + return { + success: true, + finalObjectName: lastChunkResponse?.finalObjectName || lastChunkResponse?.objectName, + }; + } catch (error: any) { + console.error("Chunked upload failed:", error); + return { + success: false, + error: error.message || "Upload failed", + }; + } + } + + /** + * Upload a single chunk + */ + private static async uploadChunk({ + fileId, + chunk, + chunkIndex, + totalChunks, + chunkSize, + totalSize, + fileName, + isLastChunk, + url, + signal, + }: { + fileId: string; + chunk: Blob; + chunkIndex: number; + totalChunks: number; + chunkSize: number; + totalSize: number; + fileName: string; + isLastChunk: boolean; + url: string; + signal?: AbortSignal; + }): Promise { + const headers = { + "Content-Type": "application/octet-stream", + "X-File-Id": fileId, + "X-Chunk-Index": chunkIndex.toString(), + "X-Total-Chunks": totalChunks.toString(), + "X-Chunk-Size": chunkSize.toString(), + "X-Total-Size": totalSize.toString(), + "X-File-Name": fileName, + "X-Is-Last-Chunk": isLastChunk.toString(), + }; + + console.log(`Uploading chunk ${chunkIndex + 1}/${totalChunks} with headers:`, headers); + + try { + const timeoutPer100MB = 60000; // 60 seconds per 100MB + const chunkSizeMB = chunk.size / (1024 * 1024); + const calculatedTimeout = Math.max(30000, Math.ceil(chunkSizeMB / 100) * timeoutPer100MB); + + console.log(`Chunk ${chunkIndex + 1} size: ${chunkSizeMB.toFixed(2)}MB, timeout: ${calculatedTimeout}ms`); + + const response = await axios.put(url, chunk, { + headers, + signal, + timeout: calculatedTimeout, + maxContentLength: Infinity, + maxBodyLength: Infinity, + }); + + if (response.status !== 200) { + throw new Error(`Failed to upload chunk ${chunkIndex}: ${response.statusText}`); + } + + return response.data; + } catch (error: any) { + if ( + error.response?.status === 400 && + (error.response?.data?.error?.includes("already uploaded") || + error.response?.data?.details?.includes("already uploaded")) + ) { + console.log(`Chunk ${chunkIndex + 1} was already uploaded successfully, treating as success`); + return error.response.data; + } + + if (error.code === "ECONNABORTED" || error.message?.includes("timeout")) { + console.warn(`Chunk ${chunkIndex + 1} upload timed out, will retry`); + throw new Error(`Upload timeout for chunk ${chunkIndex + 1}`); + } + + throw error; + } + } + + /** + * Get upload progress + */ + static async getUploadProgress(fileId: string): Promise<{ + uploaded: number; + total: number; + percentage: number; + } | null> { + try { + const response = await axios.get(`/api/filesystem/upload-progress/${fileId}`); + return response.data; + } catch (error) { + console.warn("Failed to get upload progress:", error); + return null; + } + } + + /** + * Cancel upload + */ + static async cancelUpload(fileId: string): Promise { + try { + await axios.delete(`/api/filesystem/cancel-upload/${fileId}`); + } catch (error) { + console.warn("Failed to cancel upload:", error); + } + } + + /** + * Generate unique file ID + */ + private static generateFileId(): string { + return `${Date.now()}-${Math.random().toString(36).substring(2, 15)}`; + } + + /** + * Check if file should use chunked upload + */ + static shouldUseChunkedUpload(fileSize: number): boolean { + const threshold = 100 * 1024 * 1024; // 100MB + const shouldUse = fileSize > threshold; + + console.log(`Chunked upload decision:`, { + fileSize, + fileSizeMB: (fileSize / (1024 * 1024)).toFixed(2) + "MB", + threshold, + thresholdMB: (threshold / (1024 * 1024)).toFixed(2) + "MB", + shouldUse, + comparison: `${fileSize} > ${threshold} = ${shouldUse}`, + }); + + return shouldUse; + } + + /** + * Calculate optimal chunk size based on file size + */ + static calculateOptimalChunkSize(fileSize: number): number { + console.log(`Calculating chunk size for file:`, { + fileSize, + fileSizeMB: (fileSize / (1024 * 1024)).toFixed(2) + "MB", + }); + + if (fileSize <= 100 * 1024 * 1024) { + throw new Error( + `calculateOptimalChunkSize should not be called for files <= 100MB. File size: ${(fileSize / (1024 * 1024)).toFixed(2)}MB` + ); + } + + // For files > 10GB, use 500MB chunks + if (fileSize > 10 * 1024 * 1024 * 1024) { + console.log(`File > 10GB, using 500MB chunks`); + return 500 * 1024 * 1024; + } + + // For files > 5GB, use 300MB chunks + if (fileSize > 5 * 1024 * 1024 * 1024) { + console.log(`File > 5GB, using 300MB chunks`); + return 300 * 1024 * 1024; + } + + // For files > 2GB, use 200MB chunks + if (fileSize > 2 * 1024 * 1024 * 1024) { + console.log(`File > 2GB, using 200MB chunks`); + return 200 * 1024 * 1024; + } + + // For files > 1GB, use 150MB chunks + if (fileSize > 1024 * 1024 * 1024) { + console.log(`File > 1GB, using 150MB chunks`); + return 150 * 1024 * 1024; + } + + // For files > 500MB, use 100MB chunks + if (fileSize > 500 * 1024 * 1024) { + console.log(`File > 500MB, using 100MB chunks`); + return 100 * 1024 * 1024; + } + + // For files > 100MB, use 75MB chunks (minimum for chunked upload) + console.log(`File > 100MB, using 75MB chunks (minimum for chunked upload)`); + return 75 * 1024 * 1024; + } +} diff --git a/apps/web/tsconfig.json b/apps/web/tsconfig.json index f85d9b9..f78de47 100644 --- a/apps/web/tsconfig.json +++ b/apps/web/tsconfig.json @@ -34,8 +34,9 @@ ".next/types/**/*.ts" ], "include": [ - "next-env.d.ts", "**/*.ts", - "**/*.tsx" + "**/*.tsx", + "next-env.d.ts", + ".next/types/**/*.ts" ] -} \ No newline at end of file +}