mirror of
https://github.com/DumbWareio/DumbDrop.git
synced 2025-10-23 07:41:58 +00:00
Merge branch 'main' into security-enhancements
This commit is contained in:
@@ -61,9 +61,15 @@
|
|||||||
const MAX_RETRIES = 3;
|
const MAX_RETRIES = 3;
|
||||||
const RETRY_DELAY = 1000;
|
const RETRY_DELAY = 1000;
|
||||||
|
|
||||||
|
// Utility function to generate a unique batch ID
|
||||||
|
function generateBatchId() {
|
||||||
|
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
}
|
||||||
|
|
||||||
class FileUploader {
|
class FileUploader {
|
||||||
constructor(file) {
|
constructor(file, batchId) {
|
||||||
this.file = file;
|
this.file = file;
|
||||||
|
this.batchId = batchId;
|
||||||
this.uploadId = null;
|
this.uploadId = null;
|
||||||
this.position = 0;
|
this.position = 0;
|
||||||
this.progressElement = null;
|
this.progressElement = null;
|
||||||
@@ -88,7 +94,10 @@
|
|||||||
|
|
||||||
const response = await fetch('/upload/init', {
|
const response = await fetch('/upload/init', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Batch-ID': this.batchId
|
||||||
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
filename: uploadPath,
|
filename: uploadPath,
|
||||||
fileSize: this.file.size
|
fileSize: this.file.size
|
||||||
@@ -207,10 +216,16 @@
|
|||||||
name: folderName,
|
name: folderName,
|
||||||
isFolder: true,
|
isFolder: true,
|
||||||
totalSize: 0,
|
totalSize: 0,
|
||||||
files: []
|
files: [],
|
||||||
|
// Use the first file's batch ID or generate a new one
|
||||||
|
batchId: file.batchId
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const group = groups.get(folderName);
|
const group = groups.get(folderName);
|
||||||
|
// If group doesn't have a batch ID yet, use the file's batch ID
|
||||||
|
if (!group.batchId) {
|
||||||
|
group.batchId = file.batchId;
|
||||||
|
}
|
||||||
group.files.push(file);
|
group.files.push(file);
|
||||||
group.totalSize += file.size;
|
group.totalSize += file.size;
|
||||||
} else {
|
} else {
|
||||||
@@ -219,7 +234,8 @@
|
|||||||
name: file.name,
|
name: file.name,
|
||||||
isFolder: false,
|
isFolder: false,
|
||||||
totalSize: file.size,
|
totalSize: file.size,
|
||||||
files: [file]
|
files: [file],
|
||||||
|
batchId: file.batchId
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -230,11 +246,13 @@
|
|||||||
// Helper function to process directory entries
|
// Helper function to process directory entries
|
||||||
async function getAllFileEntries(dataTransferItems) {
|
async function getAllFileEntries(dataTransferItems) {
|
||||||
let fileEntries = [];
|
let fileEntries = [];
|
||||||
|
const batchId = generateBatchId();
|
||||||
|
|
||||||
async function traverseEntry(entry, path = '') {
|
async function traverseEntry(entry, path = '') {
|
||||||
if (entry.isFile) {
|
if (entry.isFile) {
|
||||||
const file = await new Promise((resolve) => entry.file(resolve));
|
const file = await new Promise((resolve) => entry.file(resolve));
|
||||||
file.relativePath = path;
|
file.relativePath = path;
|
||||||
|
file.batchId = batchId; // Use the same batch ID for all files in this drop
|
||||||
fileEntries.push(file);
|
fileEntries.push(file);
|
||||||
} else if (entry.isDirectory) {
|
} else if (entry.isDirectory) {
|
||||||
const reader = entry.createReader();
|
const reader = entry.createReader();
|
||||||
@@ -293,30 +311,41 @@
|
|||||||
function handleDrop(e) {
|
function handleDrop(e) {
|
||||||
const items = e.dataTransfer.items;
|
const items = e.dataTransfer.items;
|
||||||
if (items && items[0].webkitGetAsEntry) {
|
if (items && items[0].webkitGetAsEntry) {
|
||||||
|
// Handle folder/file drop using DataTransferItemList
|
||||||
getAllFileEntries(items).then(newFiles => {
|
getAllFileEntries(items).then(newFiles => {
|
||||||
files = newFiles;
|
files = newFiles;
|
||||||
updateFileList();
|
updateFileList();
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
// Handle single file drop
|
||||||
|
const batchId = generateBatchId();
|
||||||
files = [...e.dataTransfer.files];
|
files = [...e.dataTransfer.files];
|
||||||
|
files.forEach(file => {
|
||||||
|
file.relativePath = ''; // No relative path for dropped files
|
||||||
|
file.batchId = batchId;
|
||||||
|
});
|
||||||
updateFileList();
|
updateFileList();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleFiles(e) {
|
function handleFiles(e) {
|
||||||
|
const batchId = generateBatchId();
|
||||||
files = [...e.target.files];
|
files = [...e.target.files];
|
||||||
files.forEach(file => {
|
files.forEach(file => {
|
||||||
file.relativePath = ''; // No relative path for individual files
|
file.relativePath = ''; // No relative path for individual files
|
||||||
|
file.batchId = batchId;
|
||||||
});
|
});
|
||||||
updateFileList();
|
updateFileList();
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleFolders(e) {
|
function handleFolders(e) {
|
||||||
|
const batchId = generateBatchId();
|
||||||
files = [...e.target.files];
|
files = [...e.target.files];
|
||||||
files.forEach(file => {
|
files.forEach(file => {
|
||||||
const pathParts = file.webkitRelativePath.split('/');
|
const pathParts = file.webkitRelativePath.split('/');
|
||||||
pathParts.pop(); // Remove filename
|
pathParts.pop(); // Remove filename
|
||||||
file.relativePath = pathParts.length > 0 ? pathParts.join('/') + '/' : '';
|
file.relativePath = pathParts.length > 0 ? pathParts.join('/') + '/' : '';
|
||||||
|
file.batchId = batchId;
|
||||||
});
|
});
|
||||||
updateFileList();
|
updateFileList();
|
||||||
}
|
}
|
||||||
@@ -354,11 +383,15 @@
|
|||||||
document.getElementById('uploadProgress').innerHTML = '';
|
document.getElementById('uploadProgress').innerHTML = '';
|
||||||
|
|
||||||
const groupedItems = groupFilesByFolder(files);
|
const groupedItems = groupFilesByFolder(files);
|
||||||
|
|
||||||
const results = await Promise.all(
|
const results = await Promise.all(
|
||||||
groupedItems.map(async item => {
|
groupedItems.map(async item => {
|
||||||
let success = true;
|
let success = true;
|
||||||
|
// Use the group's batch ID for all files in the group
|
||||||
|
const groupBatchId = item.batchId || generateBatchId();
|
||||||
for (const file of item.files) {
|
for (const file of item.files) {
|
||||||
const uploader = new FileUploader(file);
|
// Always use the group's batch ID
|
||||||
|
const uploader = new FileUploader(file, groupBatchId);
|
||||||
if (!await uploader.start()) {
|
if (!await uploader.start()) {
|
||||||
success = false;
|
success = false;
|
||||||
}
|
}
|
||||||
|
163
server.js
163
server.js
@@ -256,10 +256,105 @@ app.use('/upload', requirePin);
|
|||||||
|
|
||||||
// Store ongoing uploads
|
// Store ongoing uploads
|
||||||
const uploads = new Map();
|
const uploads = new Map();
|
||||||
|
// Store folder name mappings for batch uploads with timestamps
|
||||||
|
const folderMappings = new Map();
|
||||||
|
// Store batch IDs for folder uploads
|
||||||
|
const batchUploads = new Map();
|
||||||
|
// Store batch activity timestamps
|
||||||
|
const batchActivity = new Map();
|
||||||
|
|
||||||
|
// Add cleanup interval for inactive batches
|
||||||
|
setInterval(() => {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const [batchId, lastActivity] of batchActivity.entries()) {
|
||||||
|
if (now - lastActivity >= 5 * 60 * 1000) { // 5 minutes of inactivity
|
||||||
|
// Clean up all folder mappings for this batch
|
||||||
|
for (const key of folderMappings.keys()) {
|
||||||
|
if (key.endsWith(`-${batchId}`)) {
|
||||||
|
folderMappings.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
batchActivity.delete(batchId);
|
||||||
|
log.info(`Cleaned up folder mappings for inactive batch: ${batchId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 60000); // Check every minute
|
||||||
|
|
||||||
|
// Add these helper functions before the routes
|
||||||
|
async function getUniqueFilePath(filePath) {
|
||||||
|
const dir = path.dirname(filePath);
|
||||||
|
const ext = path.extname(filePath);
|
||||||
|
const baseName = path.basename(filePath, ext);
|
||||||
|
let counter = 1;
|
||||||
|
let finalPath = filePath;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
// Try to create the file exclusively - will fail if file exists
|
||||||
|
const fileHandle = await fs.promises.open(finalPath, 'wx');
|
||||||
|
// Return both the path and handle instead of closing it
|
||||||
|
return { path: finalPath, handle: fileHandle };
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'EEXIST') {
|
||||||
|
// File exists, try next number
|
||||||
|
finalPath = path.join(dir, `${baseName} (${counter})${ext}`);
|
||||||
|
counter++;
|
||||||
|
} else {
|
||||||
|
throw err; // Other errors should be handled by caller
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getUniqueFolderPath(folderPath) {
|
||||||
|
let counter = 1;
|
||||||
|
let finalPath = folderPath;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
// Try to create the directory - mkdir with recursive:false is atomic
|
||||||
|
await fs.promises.mkdir(finalPath, { recursive: false });
|
||||||
|
return finalPath;
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'EEXIST') {
|
||||||
|
// Folder exists, try next number
|
||||||
|
finalPath = `${folderPath} (${counter})`;
|
||||||
|
counter++;
|
||||||
|
} else if (err.code === 'ENOENT') {
|
||||||
|
// Parent directory doesn't exist, create it first
|
||||||
|
await fs.promises.mkdir(path.dirname(finalPath), { recursive: true });
|
||||||
|
// Then try again with the same path
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
throw err; // Other errors should be handled by caller
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate batch ID format
|
||||||
|
function isValidBatchId(batchId) {
|
||||||
|
// Batch ID should be in format: timestamp-randomstring
|
||||||
|
return /^\d+-[a-z0-9]{9}$/.test(batchId);
|
||||||
|
}
|
||||||
|
|
||||||
// Routes
|
// Routes
|
||||||
app.post('/upload/init', initUploadLimiter, async (req, res) => {
|
app.post('/upload/init', initUploadLimiter, async (req, res) => {
|
||||||
const { filename, fileSize } = req.body;
|
const { filename, fileSize } = req.body;
|
||||||
|
let batchId = req.headers['x-batch-id'];
|
||||||
|
|
||||||
|
// For single file uploads without a batch ID, generate one
|
||||||
|
if (!batchId) {
|
||||||
|
const timestamp = Date.now();
|
||||||
|
const randomStr = crypto.randomBytes(4).toString('hex').substring(0, 9);
|
||||||
|
batchId = `${timestamp}-${randomStr}`;
|
||||||
|
} else if (!isValidBatchId(batchId)) {
|
||||||
|
log.error('Invalid batch ID format');
|
||||||
|
return res.status(400).json({ error: 'Invalid batch ID format' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always update batch activity timestamp for any upload
|
||||||
|
batchActivity.set(batchId, Date.now());
|
||||||
|
|
||||||
const safeFilename = path.normalize(filename).replace(/^(\.\.(\/|\\|$))+/, '');
|
const safeFilename = path.normalize(filename).replace(/^(\.\.(\/|\\|$))+/, '');
|
||||||
|
|
||||||
@@ -289,23 +384,72 @@ app.post('/upload/init', initUploadLimiter, async (req, res) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadId = Date.now().toString();
|
const uploadId = crypto.randomBytes(16).toString('hex');
|
||||||
const filePath = path.join(uploadDir, safeFilename);
|
let filePath = path.join(uploadDir, safeFilename);
|
||||||
|
let fileHandle;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await ensureDirectoryExists(filePath);
|
// Handle file/folder duplication
|
||||||
|
const pathParts = safeFilename.split('/');
|
||||||
|
|
||||||
|
if (pathParts.length > 1) {
|
||||||
|
// This is a file within a folder
|
||||||
|
const originalFolderName = pathParts[0];
|
||||||
|
const folderPath = path.join(uploadDir, originalFolderName);
|
||||||
|
|
||||||
|
// Check if we already have a mapping for this folder in this batch
|
||||||
|
let newFolderName = folderMappings.get(`${originalFolderName}-${batchId}`);
|
||||||
|
|
||||||
|
if (!newFolderName) {
|
||||||
|
try {
|
||||||
|
// Try to create the folder atomically first
|
||||||
|
await fs.promises.mkdir(folderPath, { recursive: false });
|
||||||
|
newFolderName = originalFolderName;
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'EEXIST') {
|
||||||
|
// Folder exists, get a unique name
|
||||||
|
const uniqueFolderPath = await getUniqueFolderPath(folderPath);
|
||||||
|
newFolderName = path.basename(uniqueFolderPath);
|
||||||
|
log.info(`Folder "${originalFolderName}" exists, using "${newFolderName}" instead`);
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
folderMappings.set(`${originalFolderName}-${batchId}`, newFolderName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace the original folder path with the mapped one and keep original file name
|
||||||
|
pathParts[0] = newFolderName;
|
||||||
|
filePath = path.join(uploadDir, ...pathParts);
|
||||||
|
|
||||||
|
// Ensure parent directories exist
|
||||||
|
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// For both single files and files in folders, get a unique path and file handle
|
||||||
|
const result = await getUniqueFilePath(filePath);
|
||||||
|
filePath = result.path;
|
||||||
|
fileHandle = result.handle;
|
||||||
|
|
||||||
|
// Create upload entry (using the file handle we already have)
|
||||||
uploads.set(uploadId, {
|
uploads.set(uploadId, {
|
||||||
safeFilename,
|
safeFilename: path.relative(uploadDir, filePath),
|
||||||
filePath,
|
filePath,
|
||||||
fileSize,
|
fileSize,
|
||||||
bytesReceived: 0,
|
bytesReceived: 0,
|
||||||
writeStream: fs.createWriteStream(filePath)
|
writeStream: fileHandle.createWriteStream()
|
||||||
});
|
});
|
||||||
|
|
||||||
log.info(`Initialized upload for ${safeFilename} (${fileSize} bytes)`);
|
log.info(`Initialized upload for ${path.relative(uploadDir, filePath)} (${fileSize} bytes)`);
|
||||||
res.json({ uploadId });
|
res.json({ uploadId });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// Clean up file handle if something went wrong
|
||||||
|
if (fileHandle) {
|
||||||
|
await fileHandle.close().catch(() => {});
|
||||||
|
// Try to remove the file if it was created
|
||||||
|
fs.unlink(filePath).catch(() => {});
|
||||||
|
}
|
||||||
log.error(`Failed to initialize upload: ${err.message}`);
|
log.error(`Failed to initialize upload: ${err.message}`);
|
||||||
res.status(500).json({ error: 'Failed to initialize upload' });
|
res.status(500).json({ error: 'Failed to initialize upload' });
|
||||||
}
|
}
|
||||||
@@ -324,6 +468,13 @@ app.post('/upload/chunk/:uploadId', express.raw({
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Get the batch ID from the request headers
|
||||||
|
const batchId = req.headers['x-batch-id'];
|
||||||
|
if (batchId && isValidBatchId(batchId)) {
|
||||||
|
// Update batch activity timestamp
|
||||||
|
batchActivity.set(batchId, Date.now());
|
||||||
|
}
|
||||||
|
|
||||||
upload.writeStream.write(Buffer.from(req.body));
|
upload.writeStream.write(Buffer.from(req.body));
|
||||||
upload.bytesReceived += chunkSize;
|
upload.bytesReceived += chunkSize;
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user