mirror of
https://github.com/kyantech/Palmr.git
synced 2025-10-23 06:11:58 +00:00
refactor(filesystem): improve file download handling and memory management
- Replace separate large/small file download methods with unified stream handling - Add memory usage tracking and garbage collection for download operations - Implement proper stream cleanup and error handling - Remove redundant comments and simplify interval configuration
This commit is contained in:
@@ -8,9 +8,6 @@ import { ChunkManager, ChunkMetadata } from "./chunk-manager";
|
|||||||
export class FilesystemController {
|
export class FilesystemController {
|
||||||
private chunkManager = ChunkManager.getInstance();
|
private chunkManager = ChunkManager.getInstance();
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely encode filename for Content-Disposition header
|
|
||||||
*/
|
|
||||||
private encodeFilenameForHeader(filename: string): string {
|
private encodeFilenameForHeader(filename: string): string {
|
||||||
if (!filename || filename.trim() === "") {
|
if (!filename || filename.trim() === "") {
|
||||||
return 'attachment; filename="download"';
|
return 'attachment; filename="download"';
|
||||||
@@ -103,9 +100,6 @@ export class FilesystemController {
|
|||||||
await provider.uploadFileFromStream(objectName, request.raw);
|
await provider.uploadFileFromStream(objectName, request.raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract chunk metadata from request headers
|
|
||||||
*/
|
|
||||||
private extractChunkMetadata(request: FastifyRequest): ChunkMetadata | null {
|
private extractChunkMetadata(request: FastifyRequest): ChunkMetadata | null {
|
||||||
const fileId = request.headers["x-file-id"] as string;
|
const fileId = request.headers["x-file-id"] as string;
|
||||||
const chunkIndex = request.headers["x-chunk-index"] as string;
|
const chunkIndex = request.headers["x-chunk-index"] as string;
|
||||||
@@ -132,9 +126,6 @@ export class FilesystemController {
|
|||||||
return metadata;
|
return metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle chunked upload with streaming
|
|
||||||
*/
|
|
||||||
private async handleChunkedUpload(request: FastifyRequest, metadata: ChunkMetadata, originalObjectName: string) {
|
private async handleChunkedUpload(request: FastifyRequest, metadata: ChunkMetadata, originalObjectName: string) {
|
||||||
const stream = request.raw;
|
const stream = request.raw;
|
||||||
|
|
||||||
@@ -145,9 +136,6 @@ export class FilesystemController {
|
|||||||
return await this.chunkManager.processChunk(metadata, stream, originalObjectName);
|
return await this.chunkManager.processChunk(metadata, stream, originalObjectName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get upload progress for chunked uploads
|
|
||||||
*/
|
|
||||||
async getUploadProgress(request: FastifyRequest, reply: FastifyReply) {
|
async getUploadProgress(request: FastifyRequest, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { fileId } = request.params as { fileId: string };
|
const { fileId } = request.params as { fileId: string };
|
||||||
@@ -164,9 +152,6 @@ export class FilesystemController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Cancel chunked upload
|
|
||||||
*/
|
|
||||||
async cancelUpload(request: FastifyRequest, reply: FastifyReply) {
|
async cancelUpload(request: FastifyRequest, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { fileId } = request.params as { fileId: string };
|
const { fileId } = request.params as { fileId: string };
|
||||||
@@ -194,7 +179,6 @@ export class FilesystemController {
|
|||||||
const filePath = provider.getFilePath(tokenData.objectName);
|
const filePath = provider.getFilePath(tokenData.objectName);
|
||||||
const stats = await fs.promises.stat(filePath);
|
const stats = await fs.promises.stat(filePath);
|
||||||
const fileSize = stats.size;
|
const fileSize = stats.size;
|
||||||
const isLargeFile = fileSize > 50 * 1024 * 1024;
|
|
||||||
|
|
||||||
const fileName = tokenData.fileName || "download";
|
const fileName = tokenData.fileName || "download";
|
||||||
const range = request.headers.range;
|
const range = request.headers.range;
|
||||||
@@ -207,28 +191,15 @@ export class FilesystemController {
|
|||||||
const parts = range.replace(/bytes=/, "").split("-");
|
const parts = range.replace(/bytes=/, "").split("-");
|
||||||
const start = parseInt(parts[0], 10);
|
const start = parseInt(parts[0], 10);
|
||||||
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
|
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
|
||||||
const chunkSize = end - start + 1;
|
|
||||||
|
|
||||||
reply.status(206);
|
reply.status(206);
|
||||||
reply.header("Content-Range", `bytes ${start}-${end}/${fileSize}`);
|
reply.header("Content-Range", `bytes ${start}-${end}/${fileSize}`);
|
||||||
reply.header("Content-Length", chunkSize);
|
reply.header("Content-Length", end - start + 1);
|
||||||
|
|
||||||
if (isLargeFile) {
|
await this.downloadFileRange(reply, provider, tokenData.objectName, start, end);
|
||||||
await this.downloadLargeFileRange(reply, provider, tokenData.objectName, start, end);
|
|
||||||
} else {
|
|
||||||
const buffer = await provider.downloadFile(tokenData.objectName);
|
|
||||||
const chunk = buffer.slice(start, end + 1);
|
|
||||||
reply.send(chunk);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
reply.header("Content-Length", fileSize);
|
reply.header("Content-Length", fileSize);
|
||||||
|
await this.downloadFileStream(reply, provider, tokenData.objectName);
|
||||||
if (isLargeFile) {
|
|
||||||
await this.downloadLargeFile(reply, provider, filePath);
|
|
||||||
} else {
|
|
||||||
const stream = provider.createDecryptedReadStream(tokenData.objectName);
|
|
||||||
reply.send(stream);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
provider.consumeDownloadToken(token);
|
provider.consumeDownloadToken(token);
|
||||||
@@ -237,32 +208,75 @@ export class FilesystemController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async downloadLargeFile(reply: FastifyReply, provider: FilesystemStorageProvider, filePath: string) {
|
private async downloadFileStream(reply: FastifyReply, provider: FilesystemStorageProvider, objectName: string) {
|
||||||
const readStream = fs.createReadStream(filePath);
|
|
||||||
const decryptStream = provider.createDecryptStream();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await pipeline(readStream, decryptStream, reply.raw);
|
FilesystemStorageProvider.logMemoryUsage(`Download start: ${objectName}`);
|
||||||
|
|
||||||
|
const downloadStream = provider.createDownloadStream(objectName);
|
||||||
|
|
||||||
|
downloadStream.on("error", (error) => {
|
||||||
|
console.error("Download stream error:", error);
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Download error: ${objectName}`);
|
||||||
|
if (!reply.sent) {
|
||||||
|
reply.status(500).send({ error: "Download failed" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
reply.raw.on("close", () => {
|
||||||
|
if (downloadStream.readable && typeof (downloadStream as any).destroy === "function") {
|
||||||
|
(downloadStream as any).destroy();
|
||||||
|
}
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Download client disconnect: ${objectName}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await pipeline(downloadStream, reply.raw);
|
||||||
|
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Download complete: ${objectName}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
console.error("Download error:", error);
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Download failed: ${objectName}`);
|
||||||
|
if (!reply.sent) {
|
||||||
|
reply.status(500).send({ error: "Download failed" });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async downloadLargeFileRange(
|
private async downloadFileRange(
|
||||||
reply: FastifyReply,
|
reply: FastifyReply,
|
||||||
provider: FilesystemStorageProvider,
|
provider: FilesystemStorageProvider,
|
||||||
objectName: string,
|
objectName: string,
|
||||||
start: number,
|
start: number,
|
||||||
end: number
|
end: number
|
||||||
) {
|
) {
|
||||||
const filePath = provider.getFilePath(objectName);
|
|
||||||
const readStream = fs.createReadStream(filePath, { start, end });
|
|
||||||
const decryptStream = provider.createDecryptStream();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await pipeline(readStream, decryptStream, reply.raw);
|
FilesystemStorageProvider.logMemoryUsage(`Range download start: ${objectName} (${start}-${end})`);
|
||||||
|
|
||||||
|
const rangeStream = await provider.createDownloadRangeStream(objectName, start, end);
|
||||||
|
|
||||||
|
rangeStream.on("error", (error) => {
|
||||||
|
console.error("Range download stream error:", error);
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Range download error: ${objectName} (${start}-${end})`);
|
||||||
|
if (!reply.sent) {
|
||||||
|
reply.status(500).send({ error: "Download failed" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
reply.raw.on("close", () => {
|
||||||
|
if (rangeStream.readable && typeof (rangeStream as any).destroy === "function") {
|
||||||
|
(rangeStream as any).destroy();
|
||||||
|
}
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Range download client disconnect: ${objectName} (${start}-${end})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await pipeline(rangeStream, reply.raw);
|
||||||
|
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Range download complete: ${objectName} (${start}-${end})`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
console.error("Range download error:", error);
|
||||||
|
FilesystemStorageProvider.logMemoryUsage(`Range download failed: ${objectName} (${start}-${end})`);
|
||||||
|
if (!reply.sent) {
|
||||||
|
reply.status(500).send({ error: "Download failed" });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -22,7 +22,7 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
|
|
||||||
this.ensureUploadsDir();
|
this.ensureUploadsDir();
|
||||||
setInterval(() => this.cleanExpiredTokens(), 5 * 60 * 1000);
|
setInterval(() => this.cleanExpiredTokens(), 5 * 60 * 1000);
|
||||||
setInterval(() => this.cleanupEmptyTempDirs(), 10 * 60 * 1000); // Every 10 minutes
|
setInterval(() => this.cleanupEmptyTempDirs(), 10 * 60 * 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static getInstance(): FilesystemStorageProvider {
|
public static getInstance(): FilesystemStorageProvider {
|
||||||
@@ -32,14 +32,6 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
return FilesystemStorageProvider.instance;
|
return FilesystemStorageProvider.instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
public createDecryptedReadStream(objectName: string): NodeJS.ReadableStream {
|
|
||||||
const filePath = this.getFilePath(objectName);
|
|
||||||
const fileStream = fsSync.createReadStream(filePath);
|
|
||||||
const decryptStream = this.createDecryptStream();
|
|
||||||
|
|
||||||
return fileStream.pipe(decryptStream);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async ensureUploadsDir(): Promise<void> {
|
private async ensureUploadsDir(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await fs.access(this.uploadsDir);
|
await fs.access(this.uploadsDir);
|
||||||
@@ -261,6 +253,183 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
return this.decryptFileLegacy(fileBuffer);
|
return this.decryptFileLegacy(fileBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
createDownloadStream(objectName: string): NodeJS.ReadableStream {
|
||||||
|
const filePath = this.getFilePath(objectName);
|
||||||
|
const fileStream = fsSync.createReadStream(filePath);
|
||||||
|
|
||||||
|
if (this.isEncryptionDisabled) {
|
||||||
|
fileStream.on("end", () => {
|
||||||
|
if (global.gc) {
|
||||||
|
global.gc();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
fileStream.on("close", () => {
|
||||||
|
if (global.gc) {
|
||||||
|
global.gc();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptStream = this.createDecryptStream();
|
||||||
|
let isDestroyed = false;
|
||||||
|
|
||||||
|
const cleanup = () => {
|
||||||
|
if (isDestroyed) return;
|
||||||
|
isDestroyed = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (fileStream && !fileStream.destroyed) {
|
||||||
|
fileStream.destroy();
|
||||||
|
}
|
||||||
|
if (decryptStream && !decryptStream.destroyed) {
|
||||||
|
decryptStream.destroy();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Error during download stream cleanup:", error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (global.gc) {
|
||||||
|
global.gc();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fileStream.on("error", cleanup);
|
||||||
|
decryptStream.on("error", cleanup);
|
||||||
|
decryptStream.on("end", cleanup);
|
||||||
|
decryptStream.on("close", cleanup);
|
||||||
|
|
||||||
|
decryptStream.on("pipe", (src: any) => {
|
||||||
|
if (src && src.on) {
|
||||||
|
src.on("close", cleanup);
|
||||||
|
src.on("error", cleanup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileStream.pipe(decryptStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createDownloadRangeStream(objectName: string, start: number, end: number): Promise<NodeJS.ReadableStream> {
|
||||||
|
if (!this.isEncryptionDisabled) {
|
||||||
|
return this.createRangeStreamFromDecrypted(objectName, start, end);
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = this.getFilePath(objectName);
|
||||||
|
return fsSync.createReadStream(filePath, { start, end });
|
||||||
|
}
|
||||||
|
|
||||||
|
private createRangeStreamFromDecrypted(objectName: string, start: number, end: number): NodeJS.ReadableStream {
|
||||||
|
const { Transform, PassThrough } = require("stream");
|
||||||
|
const filePath = this.getFilePath(objectName);
|
||||||
|
const fileStream = fsSync.createReadStream(filePath);
|
||||||
|
const decryptStream = this.createDecryptStream();
|
||||||
|
const rangeStream = new PassThrough();
|
||||||
|
|
||||||
|
let bytesRead = 0;
|
||||||
|
let rangeEnded = false;
|
||||||
|
let isDestroyed = false;
|
||||||
|
|
||||||
|
const rangeTransform = new Transform({
|
||||||
|
transform(chunk: Buffer, encoding: any, callback: any) {
|
||||||
|
if (rangeEnded || isDestroyed) {
|
||||||
|
callback();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunkStart = bytesRead;
|
||||||
|
const chunkEnd = bytesRead + chunk.length - 1;
|
||||||
|
bytesRead += chunk.length;
|
||||||
|
|
||||||
|
if (chunkEnd < start) {
|
||||||
|
callback();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunkStart > end) {
|
||||||
|
rangeEnded = true;
|
||||||
|
this.end();
|
||||||
|
callback();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let sliceStart = 0;
|
||||||
|
let sliceEnd = chunk.length;
|
||||||
|
|
||||||
|
if (chunkStart < start) {
|
||||||
|
sliceStart = start - chunkStart;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunkEnd > end) {
|
||||||
|
sliceEnd = end - chunkStart + 1;
|
||||||
|
rangeEnded = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const slicedChunk = chunk.slice(sliceStart, sliceEnd);
|
||||||
|
this.push(slicedChunk);
|
||||||
|
|
||||||
|
if (rangeEnded) {
|
||||||
|
this.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
|
||||||
|
flush(callback: any) {
|
||||||
|
if (global.gc) {
|
||||||
|
global.gc();
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const cleanup = () => {
|
||||||
|
if (isDestroyed) return;
|
||||||
|
isDestroyed = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (fileStream && !fileStream.destroyed) {
|
||||||
|
fileStream.destroy();
|
||||||
|
}
|
||||||
|
if (decryptStream && !decryptStream.destroyed) {
|
||||||
|
decryptStream.destroy();
|
||||||
|
}
|
||||||
|
if (rangeTransform && !rangeTransform.destroyed) {
|
||||||
|
rangeTransform.destroy();
|
||||||
|
}
|
||||||
|
if (rangeStream && !rangeStream.destroyed) {
|
||||||
|
rangeStream.destroy();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Error during stream cleanup:", error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (global.gc) {
|
||||||
|
global.gc();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fileStream.on("error", cleanup);
|
||||||
|
decryptStream.on("error", cleanup);
|
||||||
|
rangeTransform.on("error", cleanup);
|
||||||
|
rangeStream.on("error", cleanup);
|
||||||
|
|
||||||
|
rangeStream.on("close", cleanup);
|
||||||
|
rangeStream.on("end", cleanup);
|
||||||
|
|
||||||
|
rangeStream.on("pipe", (src: any) => {
|
||||||
|
if (src && src.on) {
|
||||||
|
src.on("close", cleanup);
|
||||||
|
src.on("error", cleanup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
fileStream.pipe(decryptStream).pipe(rangeTransform).pipe(rangeStream);
|
||||||
|
|
||||||
|
return rangeStream;
|
||||||
|
}
|
||||||
|
|
||||||
private decryptFileBuffer(encryptedBuffer: Buffer): Buffer {
|
private decryptFileBuffer(encryptedBuffer: Buffer): Buffer {
|
||||||
const key = this.createEncryptionKey();
|
const key = this.createEncryptionKey();
|
||||||
const iv = encryptedBuffer.slice(0, 16);
|
const iv = encryptedBuffer.slice(0, 16);
|
||||||
@@ -277,6 +446,59 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
return Buffer.from(decrypted.toString(CryptoJS.enc.Utf8), "base64");
|
return Buffer.from(decrypted.toString(CryptoJS.enc.Utf8), "base64");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static logMemoryUsage(context: string = "Unknown"): void {
|
||||||
|
const memUsage = process.memoryUsage();
|
||||||
|
const formatBytes = (bytes: number) => {
|
||||||
|
const mb = bytes / 1024 / 1024;
|
||||||
|
return `${mb.toFixed(2)} MB`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const rssInMB = memUsage.rss / 1024 / 1024;
|
||||||
|
const heapUsedInMB = memUsage.heapUsed / 1024 / 1024;
|
||||||
|
|
||||||
|
if (rssInMB > 1024 || heapUsedInMB > 512) {
|
||||||
|
console.warn(`[MEMORY WARNING] ${context} - High memory usage detected:`);
|
||||||
|
console.warn(` RSS: ${formatBytes(memUsage.rss)}`);
|
||||||
|
console.warn(` Heap Used: ${formatBytes(memUsage.heapUsed)}`);
|
||||||
|
console.warn(` Heap Total: ${formatBytes(memUsage.heapTotal)}`);
|
||||||
|
console.warn(` External: ${formatBytes(memUsage.external)}`);
|
||||||
|
|
||||||
|
if (global.gc) {
|
||||||
|
console.warn(" Forcing garbage collection...");
|
||||||
|
global.gc();
|
||||||
|
|
||||||
|
const afterGC = process.memoryUsage();
|
||||||
|
console.warn(` After GC - RSS: ${formatBytes(afterGC.rss)}, Heap: ${formatBytes(afterGC.heapUsed)}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`[MEMORY INFO] ${context} - RSS: ${formatBytes(memUsage.rss)}, Heap: ${formatBytes(memUsage.heapUsed)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static forceGarbageCollection(context: string = "Manual"): void {
|
||||||
|
if (global.gc) {
|
||||||
|
const beforeGC = process.memoryUsage();
|
||||||
|
global.gc();
|
||||||
|
const afterGC = process.memoryUsage();
|
||||||
|
|
||||||
|
const formatBytes = (bytes: number) => `${(bytes / 1024 / 1024).toFixed(2)} MB`;
|
||||||
|
|
||||||
|
console.log(`[GC] ${context} - Before: RSS ${formatBytes(beforeGC.rss)}, Heap ${formatBytes(beforeGC.heapUsed)}`);
|
||||||
|
console.log(`[GC] ${context} - After: RSS ${formatBytes(afterGC.rss)}, Heap ${formatBytes(afterGC.heapUsed)}`);
|
||||||
|
|
||||||
|
const rssSaved = beforeGC.rss - afterGC.rss;
|
||||||
|
const heapSaved = beforeGC.heapUsed - afterGC.heapUsed;
|
||||||
|
|
||||||
|
if (rssSaved > 0 || heapSaved > 0) {
|
||||||
|
console.log(`[GC] ${context} - Freed: RSS ${formatBytes(rssSaved)}, Heap ${formatBytes(heapSaved)}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.warn(`[GC] ${context} - Garbage collection not available. Start Node.js with --expose-gc flag.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fileExists(objectName: string): Promise<boolean> {
|
async fileExists(objectName: string): Promise<boolean> {
|
||||||
const filePath = this.getFilePath(objectName);
|
const filePath = this.getFilePath(objectName);
|
||||||
try {
|
try {
|
||||||
@@ -321,9 +543,6 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
this.downloadTokens.delete(token);
|
this.downloadTokens.delete(token);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up temporary file and its parent directory if empty
|
|
||||||
*/
|
|
||||||
private async cleanupTempFile(tempPath: string): Promise<void> {
|
private async cleanupTempFile(tempPath: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await fs.unlink(tempPath);
|
await fs.unlink(tempPath);
|
||||||
@@ -346,9 +565,6 @@ export class FilesystemStorageProvider implements StorageProvider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up empty temporary directories periodically
|
|
||||||
*/
|
|
||||||
private async cleanupEmptyTempDirs(): Promise<void> {
|
private async cleanupEmptyTempDirs(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const tempUploadsDir = directoriesConfig.tempUploads;
|
const tempUploadsDir = directoriesConfig.tempUploads;
|
||||||
|
Reference in New Issue
Block a user