feat: make upload chunk size configurable (#273)

This commit is contained in:
Hakim Bawa
2025-10-02 22:04:52 +09:00
committed by GitHub
parent 6086d2a0ac
commit 59f9e19ffb
3 changed files with 57 additions and 4 deletions

View File

@@ -165,6 +165,27 @@ cp .env.example .env
This creates a `.env` file with the necessary configurations for the frontend.
##### Upload Configuration
Palmr. supports configurable chunked uploading for large files. You can customize the chunk size by setting the following environment variable in your `.env` file:
```bash
NEXT_PUBLIC_UPLOAD_CHUNK_SIZE_MB=100
```
**How it works:**
- If `NEXT_PUBLIC_UPLOAD_CHUNK_SIZE_MB` is set, Palmr. will use this value (in megabytes) as the chunk size for all file uploads that exceed this threshold.
- If not set or left empty, Palmr. automatically calculates optimal chunk sizes based on file size:
- Files ≤ 100MB: uploaded without chunking
- Files > 100MB and ≤ 1GB: 75MB chunks
- Files > 1GB: 150MB chunks
**When to configure:**
- **Default (not set):** Recommended for most use cases. Palmr. will intelligently determine the best chunk size.
- **Custom value:** Set this if you have specific network conditions or want to optimize for your infrastructure (e.g., slower connections may benefit from smaller chunks like 50MB, while fast networks can handle larger chunks like 200MB, or the upload size per payload may be limited by a proxy like Cloudflare)
#### Install dependencies
Install all the frontend dependencies:

View File

@@ -1,2 +1,5 @@
API_BASE_URL=http:localhost:3333
NEXT_PUBLIC_DEFAULT_LANGUAGE=en-US
# Configuration options
NEXT_PUBLIC_UPLOAD_CHUNK_SIZE_MB=

View File

@@ -18,6 +18,8 @@ export interface ChunkedUploadResult {
}
export class ChunkedUploader {
private static defaultChunkSizeInBytes = 100 * 1024 * 1024; // 100MB
/**
* Upload a file in chunks with streaming
*/
@@ -246,7 +248,7 @@ export class ChunkedUploader {
return false;
}
const threshold = 100 * 1024 * 1024; // 100MB
const threshold = this.getConfiguredChunkSize() || this.defaultChunkSizeInBytes;
const shouldUse = fileSize > threshold;
return shouldUse;
@@ -256,12 +258,19 @@ export class ChunkedUploader {
* Calculate optimal chunk size based on file size
*/
static calculateOptimalChunkSize(fileSize: number): number {
if (fileSize <= 100 * 1024 * 1024) {
const configuredChunkSize = this.getConfiguredChunkSize();
const chunkSize = configuredChunkSize || this.defaultChunkSizeInBytes;
if (fileSize <= chunkSize) {
throw new Error(
`calculateOptimalChunkSize should not be called for files <= 100MB. File size: ${(fileSize / (1024 * 1024)).toFixed(2)}MB`
`calculateOptimalChunkSize should not be called for files <= ${chunkSize}. File size: ${(fileSize / (1024 * 1024)).toFixed(2)}MB`
);
}
if (configuredChunkSize) {
return configuredChunkSize;
}
// For files > 1GB, use 150MB chunks
if (fileSize > 1024 * 1024 * 1024) {
return 150 * 1024 * 1024;
@@ -275,4 +284,24 @@ export class ChunkedUploader {
// For files > 100MB, use 75MB chunks (minimum for chunked upload)
return 75 * 1024 * 1024;
}
private static getConfiguredChunkSize(): number | null {
const configuredChunkSizeMb = process.env.NEXT_PUBLIC_UPLOAD_CHUNK_SIZE_MB;
if (!configuredChunkSizeMb) {
return null;
}
const parsedValue = Number(configuredChunkSizeMb);
if (Number.isNaN(parsedValue) || parsedValue <= 0) {
console.warn(
`Invalid NEXT_PUBLIC_UPLOAD_CHUNK_SIZE_MB value: ${configuredChunkSizeMb}. Falling back to optimal chunk size.`
);
return null;
}
return Math.floor(parsedValue * 1024 * 1024);
}
}