fix: file upload failure for utf8 names over 100MiB (#290)

This commit is contained in:
Copilot
2025-10-20 10:50:37 -03:00
committed by GitHub
parent ab5ea156a3
commit 39dc94b7f8
2 changed files with 16 additions and 3 deletions

View File

@@ -122,13 +122,22 @@ export class FilesystemController {
const totalChunks = request.headers["x-total-chunks"] as string;
const chunkSize = request.headers["x-chunk-size"] as string;
const totalSize = request.headers["x-total-size"] as string;
const fileName = request.headers["x-file-name"] as string;
const encodedFileName = request.headers["x-file-name"] as string;
const isLastChunk = request.headers["x-is-last-chunk"] as string;
if (!fileId || !chunkIndex || !totalChunks || !chunkSize || !totalSize || !fileName) {
if (!fileId || !chunkIndex || !totalChunks || !chunkSize || !totalSize || !encodedFileName) {
return null;
}
// Decode the base64-encoded filename to handle UTF-8 characters
let fileName: string;
try {
fileName = decodeURIComponent(escape(Buffer.from(encodedFileName, "base64").toString("binary")));
} catch (error) {
// Fallback to the encoded value if decoding fails (for backward compatibility)
fileName = encodedFileName;
}
const metadata = {
fileId,
chunkIndex: parseInt(chunkIndex, 10),

View File

@@ -157,6 +157,10 @@ export class ChunkedUploader {
url: string;
signal?: AbortSignal;
}): Promise<any> {
// Encode filename as base64 to handle UTF-8 characters in HTTP headers
// This prevents errors when setting headers with non-ASCII characters
const encodedFileName = btoa(unescape(encodeURIComponent(fileName)));
const headers = {
"Content-Type": "application/octet-stream",
"X-File-Id": fileId,
@@ -164,7 +168,7 @@ export class ChunkedUploader {
"X-Total-Chunks": totalChunks.toString(),
"X-Chunk-Size": chunkSize.toString(),
"X-Total-Size": totalSize.toString(),
"X-File-Name": fileName,
"X-File-Name": encodedFileName,
"X-Is-Last-Chunk": isLastChunk.toString(),
};