diff --git a/platforms/file-manager-api/package.json b/platforms/file-manager-api/package.json index 8f5c19ed..9c3f48ea 100644 --- a/platforms/file-manager-api/package.json +++ b/platforms/file-manager-api/package.json @@ -13,6 +13,7 @@ "migration:revert": "npm run typeorm migration:revert -- -d src/database/data-source.ts" }, "dependencies": { + "archiver": "^7.0.1", "axios": "^1.6.7", "cors": "^2.8.5", "dotenv": "^16.4.5", @@ -29,6 +30,7 @@ "web3-adapter": "workspace:*" }, "devDependencies": { + "@types/archiver": "^6.0.3", "@types/cors": "^2.8.17", "@types/express": "^4.17.21", "@types/jsonwebtoken": "^9.0.5", diff --git a/platforms/file-manager-api/src/controllers/FileController.ts b/platforms/file-manager-api/src/controllers/FileController.ts index 3913d4e3..77868ed1 100644 --- a/platforms/file-manager-api/src/controllers/FileController.ts +++ b/platforms/file-manager-api/src/controllers/FileController.ts @@ -1,5 +1,9 @@ import type { Request, Response } from "express"; import multer from "multer"; +import archiver from "archiver"; +import fs from "fs"; +import path from "path"; +import os from "os"; import { FileService } from "../services/FileService"; const upload = multer({ @@ -14,9 +18,15 @@ const uploadMultiple = multer({ export class FileController { private fileService: FileService; + private ZIP_TEMP_DIR = path.join(os.tmpdir(), 'file-manager-zips'); constructor() { this.fileService = new FileService(); + + // Ensure temp directory exists + if (!fs.existsSync(this.ZIP_TEMP_DIR)) { + fs.mkdirSync(this.ZIP_TEMP_DIR, { recursive: true }); + } } uploadFile = [ @@ -586,4 +596,318 @@ export class FileController { res.status(500).json({ error: "Failed to get storage usage" }); } }; + + /** + * Download multiple files as ZIP. Creates zip on disk then serves it. + * Zip is deleted after serving via finally block. + */ + downloadFilesAsZip = async (req: Request, res: Response) => { + let output: fs.WriteStream | null = null; + let archive: archiver.Archiver | null = null; + let zipPath: string | null = null; + + try { + if (!req.user) { + return res + .status(401) + .json({ error: "Authentication required" }); + } + + let { files, fileIds } = req.body; + + // Handle form-encoded data where files is a JSON string + if (typeof files === 'string') { + try { + files = JSON.parse(files); + } catch { + return res.status(400).json({ error: "Invalid files JSON" }); + } + } + + // Support both formats: { files: [{id, path}] } or { fileIds: [id] } + let fileEntries: Array<{ id: string; path: string }>; + + if (Array.isArray(files) && files.length > 0) { + fileEntries = files.map((f: any) => ({ + id: typeof f === 'string' ? f : f.id, + path: (typeof f === 'object' && f.path) || '', + })); + } else if (Array.isArray(fileIds) && fileIds.length > 0) { + fileEntries = fileIds.map((id: string) => ({ id, path: '' })); + } else { + return res.status(400).json({ error: "files or fileIds array is required" }); + } + + if (fileEntries.length > 500) { + return res.status(400).json({ error: "Maximum 500 files per download" }); + } + + // Validate all file IDs are non-empty strings + for (const entry of fileEntries) { + if (!entry.id || typeof entry.id !== 'string' || entry.id.trim() === '') { + return res.status(400).json({ error: "Invalid file id in request" }); + } + } + + // Validate all files exist and user has access + const validatedFiles = await this.fileService.getFilesMetadataByIds( + fileEntries.map(f => f.id), + req.user.id + ); + + if (validatedFiles.length === 0) { + return res.status(404).json({ error: "No accessible files found" }); + } + + // Create a map of id -> metadata for quick lookup + const fileMetaMap = new Map(validatedFiles.map(f => [f.id, f])); + + // Create zip file on disk with timestamp + const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19); + const zipFilename = `files-${timestamp}.zip`; + zipPath = path.join(this.ZIP_TEMP_DIR, zipFilename); + + console.log(`[ZIP] Creating zip file at: ${zipPath}`); + + output = fs.createWriteStream(zipPath); + archive = archiver('zip', { + store: true, // No compression for speed + }); + + console.log(`[ZIP] Archive and output stream initialized`); + + // Track if request was aborted + let aborted = false; + + // Handle client disconnect + req.on('close', () => { + if (!res.writableEnded) { + aborted = true; + if (archive) archive.abort(); + console.log('Download aborted by client'); + } + }); + + // Handle archive errors + archive.on('error', (err: Error) => { + if (aborted) return; + console.error('Archive error:', err); + if (!res.headersSent) { + res.status(500).json({ error: 'Failed to create archive' }); + } + }); + + // Pipe archive to file on disk + archive.pipe(output); + + // Set up promise to wait for file write completion (BEFORE finalize) + const writeComplete = new Promise((resolve, reject) => { + output!.on('finish', resolve); // 'finish' fires when all data written + output!.on('error', reject); + }); + + // Track full paths to handle duplicates + const usedPaths = new Map(); + + // Sanitize filename to prevent zip-slip attacks + const sanitizeFilename = (filename: string): string => { + if (!filename) return 'file'; + + let safe = filename; + + // Convert backslashes to forward slashes + safe = safe.replace(/\\/g, '/'); + + // Strip Windows drive letters (C:, D:, etc.) + safe = safe.replace(/^[a-zA-Z]:/, ''); + + // Strip any leading slashes or dots + safe = safe.replace(/^[\/\.]+/, ''); + + // Take only the basename (after last slash) + const lastSlash = safe.lastIndexOf('/'); + if (lastSlash !== -1) { + safe = safe.slice(lastSlash + 1); + } + + // Remove or replace dangerous characters + // Keep: alphanumeric, spaces, dots, dashes, underscores, parentheses + safe = safe.replace(/[^\w\s.\-()]/g, '_'); + + // Collapse multiple dots to prevent .. traversal + safe = safe.replace(/\.{2,}/g, '.'); + + // Remove leading/trailing dots and spaces + safe = safe.replace(/^[.\s]+|[.\s]+$/g, ''); + + // If empty after sanitization, use default + if (!safe) return 'file'; + + return safe; + }; + + // Sanitize path to prevent directory traversal attacks + const sanitizePath = (p: string): string => { + if (!p) return ''; + + // Normalize separators: convert backslashes to forward slashes + let normalized = p.replace(/\\/g, '/'); + + // Strip Windows drive letters (C:, D:, etc.) + normalized = normalized.replace(/^[a-zA-Z]:/, ''); + + // Strip UNC paths (//server/share or \\server\share already normalized) + normalized = normalized.replace(/^\/\/[^/]*\/[^/]*/, ''); + + // Strip any leading slashes + normalized = normalized.replace(/^\/+/, ''); + + // Split into segments and resolve . and .. + const segments = normalized.split('/'); + const resolved: string[] = []; + let escapedRoot = false; + + for (const segment of segments) { + // Skip empty segments and current directory references + if (segment === '' || segment === '.') { + continue; + } + + if (segment === '..') { + // Pop parent directory if possible + if (resolved.length > 0) { + resolved.pop(); + } else { + // Attempted to escape root - mark as invalid + escapedRoot = true; + } + } else { + // Regular segment - add it + resolved.push(segment); + } + } + + // If any attempt to escape root was detected, return empty string + if (escapedRoot) { + return ''; + } + + return resolved.join('/'); + }; + + // Stream each file into the archive one at a time + for (const entry of fileEntries) { + // Stop processing if client disconnected + if (aborted) break; + + const fileMeta = fileMetaMap.get(entry.id); + if (!fileMeta) continue; // User doesn't have access + + try { + const fileData = await this.fileService.getFileDataStream(entry.id, req.user.id); + + if (fileData) { + const sanitizedPath = sanitizePath(entry.path); + const baseName = sanitizeFilename(fileData.name); + + // Build full path in zip + let fullPath = sanitizedPath ? `${sanitizedPath}/${baseName}` : baseName; + + // Handle duplicate paths by appending a number + const count = usedPaths.get(fullPath) || 0; + if (count > 0) { + const ext = baseName.lastIndexOf('.'); + let uniqueName: string; + if (ext > 0) { + uniqueName = `${baseName.slice(0, ext)} (${count})${baseName.slice(ext)}`; + } else { + uniqueName = `${baseName} (${count})`; + } + fullPath = sanitizedPath ? `${sanitizedPath}/${uniqueName}` : uniqueName; + } + usedPaths.set(sanitizedPath ? `${sanitizedPath}/${baseName}` : baseName, count + 1); + + // Append stream to archive + archive.append(fileData.stream, { name: fullPath }); + } + } catch (fileError) { + console.error(`Error adding file ${entry.id} to archive:`, fileError); + // Continue with other files + } + } + + // Finalize the archive (this is when the stream ends) + if (!aborted && output && archive) { + console.log(`[ZIP] Finalizing archive...`); + await archive.finalize(); + console.log(`[ZIP] Archive finalized, waiting for disk write...`); + + // Wait for file to be completely written to disk + await writeComplete; + console.log(`[ZIP] Disk write complete!`); + + // Send the file + console.log(`[ZIP] Starting to stream file: ${zipPath}, size: ${fs.statSync(zipPath).size} bytes`); + + res.setHeader('Content-Type', 'application/zip'); + res.setHeader('Content-Disposition', `attachment; filename="${zipFilename}"`); + res.setHeader('Content-Length', fs.statSync(zipPath).size.toString()); + + const fileStream = fs.createReadStream(zipPath); + + // Wait for the stream to finish BEFORE exiting try block (so finally doesn't delete file mid-stream) + await new Promise((resolve, reject) => { + fileStream.on('end', () => { + console.log(`[ZIP] Finished streaming file: ${zipPath}`); + resolve(); + }); + fileStream.on('error', (err) => { + console.error('Error streaming zip file:', err); + if (!res.headersSent) { + res.status(500).json({ error: 'Failed to send zip file' }); + } + reject(err); + }); + + fileStream.pipe(res); + }); + } + + } catch (error) { + console.error("Error creating zip download:", error); + if (!res.headersSent) { + res.status(500).json({ error: "Failed to create zip download" }); + } + } finally { + console.log(`[ZIP] Cleanup starting for: ${zipPath}`); + + // Always cleanup resources + if (archive) { + try { + archive.abort(); + } catch (e) { + // Ignore abort errors (may already be finalized) + } + } + if (output) { + try { + output.close(); + } catch (e) { + // Ignore close errors + } + } + // Delete zip file - no longer needed after serving + if (zipPath && fs.existsSync(zipPath)) { + try { + console.log(`[ZIP] Deleting temp file: ${zipPath}`); + fs.unlinkSync(zipPath); + console.log(`[ZIP] Successfully deleted: ${zipPath}`); + } catch (e) { + console.error('[ZIP] Error deleting temp zip:', e); + } + } else { + console.log(`[ZIP] File already gone or path not set: ${zipPath}`); + } + } + }; } diff --git a/platforms/file-manager-api/src/index.ts b/platforms/file-manager-api/src/index.ts index e1a3372a..9b3993f3 100644 --- a/platforms/file-manager-api/src/index.ts +++ b/platforms/file-manager-api/src/index.ts @@ -86,6 +86,7 @@ app.use(authMiddleware); // File routes app.post("/api/files", authGuard, fileController.uploadFile); +app.post("/api/files/download-zip", authGuard, fileController.downloadFilesAsZip); app.get("/api/files", authGuard, fileController.getFiles); app.get("/api/files/:id", authGuard, fileController.getFile); app.get("/api/files/:id/download", authGuard, fileController.downloadFile); diff --git a/platforms/file-manager-api/src/services/FileService.ts b/platforms/file-manager-api/src/services/FileService.ts index 31fa6bb4..8ba0e401 100644 --- a/platforms/file-manager-api/src/services/FileService.ts +++ b/platforms/file-manager-api/src/services/FileService.ts @@ -6,6 +6,7 @@ import { FolderAccess } from "../database/entities/FolderAccess"; import { SignatureContainer } from "../database/entities/SignatureContainer"; import { In, IsNull, Not } from "typeorm"; import crypto from "crypto"; +import { Readable } from "stream"; /** Soft-delete marker: file is hidden and syncs to eSigner so they can hide it too (no delete webhook). */ export const SOFT_DELETED_FILE_NAME = "[[deleted]]"; @@ -358,5 +359,92 @@ export class FileService { return { used, limit, fileCount, folderCount }; } + + /** + * Get file metadata without the data blob (for bulk operations) + */ + async getFileMetadataById(id: string, userId: string): Promise | null> { + const file = await this.fileRepository.findOne({ + where: { id }, + select: ["id", "name", "displayName", "mimeType", "size", "md5Hash", "ownerId", "folderId", "createdAt", "updatedAt"], + }); + + if (!file || file.name === SOFT_DELETED_FILE_NAME) { + return null; + } + + // Check access: owner or has access permission + if (file.ownerId === userId) { + return file; + } + + // Check for direct file access + const access = await this.fileAccessRepository.findOne({ + where: { fileId: id, userId }, + }); + + if (access) { + return file; + } + + // Check if user has access via parent folder (if file is in a folder) + if (file.folderId) { + const hasAccessViaParent = await this.hasAccessViaParentFolder(file.folderId, userId); + if (hasAccessViaParent) { + return file; + } + } + + return null; + } + + /** + * Stream file data as a Readable stream (fetches data blob separately) + * This avoids loading the entire file into memory at once for the caller, + * though the DB query still loads it. For true streaming, you'd need + * PostgreSQL large objects or file system storage. + */ + async getFileDataStream(id: string, userId: string): Promise<{ stream: Readable; size: number; name: string; mimeType: string } | null> { + // First verify access with metadata only + const metadata = await this.getFileMetadataById(id, userId); + if (!metadata) { + return null; + } + + // Fetch only the data column + const result = await this.fileRepository + .createQueryBuilder('file') + .select('file.data') + .where('file.id = :id', { id }) + .getRawOne(); + + if (!result || !result.file_data) { + return null; + } + + // Convert Buffer to Readable stream + const stream = Readable.from(result.file_data); + + return { + stream, + size: Number(metadata.size), + name: metadata.displayName || metadata.name, + mimeType: metadata.mimeType, + }; + } + + /** + * Get multiple files' metadata by IDs (for bulk download validation) + */ + async getFilesMetadataByIds(ids: string[], userId: string): Promise>> { + const files: Array> = []; + for (const id of ids) { + const file = await this.getFileMetadataById(id, userId); + if (file) { + files.push(file); + } + } + return files; + } } diff --git a/platforms/file-manager/package.json b/platforms/file-manager/package.json index 224fc217..a875e684 100644 --- a/platforms/file-manager/package.json +++ b/platforms/file-manager/package.json @@ -38,7 +38,6 @@ "dependencies": { "@sveltejs/adapter-node": "^5.2.12", "axios": "^1.6.7", - "jszip": "^3.10.1", "svelte-qrcode": "^1.0.1", "svelte-qrcode-action": "^1.0.2", "tailwind-merge": "^3.0.2" diff --git a/platforms/file-manager/src/routes/(protected)/files/+page.svelte b/platforms/file-manager/src/routes/(protected)/files/+page.svelte index 4a22aa70..531cc73d 100644 --- a/platforms/file-manager/src/routes/(protected)/files/+page.svelte +++ b/platforms/file-manager/src/routes/(protected)/files/+page.svelte @@ -29,7 +29,7 @@ } from "$lib/stores/folders"; import { toast } from "$lib/stores/toast"; import { apiClient } from "$lib/utils/axios"; - import JSZip from "jszip"; + // JSZip removed - using server-side zip generation import { onMount } from "svelte"; import { get } from "svelte/store"; @@ -141,34 +141,8 @@ let downloadUrl = $state(null); // Multi-file selection for download - const DOWNLOAD_BATCH_SIZE = 3; // Number of concurrent downloads let selectedFileIds = $state>(new Set()); - // Download modal state - let showDownloadModal = $state(false); - let downloadProgress = $state<{ - currentFile: string; - currentFileIndex: number; - totalFiles: number; - fileProgress: number; // 0-100 for current file - overallProgress: number; // 0-100 for all files - status: "preparing" | "downloading" | "zipping" | "complete" | "error"; - errorMessage?: string; - downloadedFiles: Array<{ - name: string; - size: number; - status: "done" | "downloading" | "pending" | "error"; - errorMessage?: string; - }>; - }>({ - currentFile: "", - currentFileIndex: 0, - totalFiles: 0, - fileProgress: 0, - overallProgress: 0, - status: "preparing", - downloadedFiles: [], - }); let breadcrumbs = $state>([ { id: null, name: "My Files" }, ]); @@ -1047,18 +1021,6 @@ return result; } - function resetDownloadProgress() { - downloadProgress = { - currentFile: "", - currentFileIndex: 0, - totalFiles: 0, - fileProgress: 0, - overallProgress: 0, - status: "preparing", - downloadedFiles: [], - }; - } - async function downloadSelectedFiles() { if (selectedFileIds.size === 0) return; @@ -1095,59 +1057,35 @@ return; } - // Show download modal - showDownloadModal = true; - resetDownloadProgress(); - - downloadProgress = { - ...downloadProgress, - status: "preparing", - currentFile: "Gathering files...", - downloadedFiles: [], - }; + // For multiple files/folders, use server-side zip with native browser download + toast.info("Preparing download..."); try { // Build the list of all files to download with their paths - // Structure: { file, path } where path is the directory path in the zip - const allFilesToDownload: Array<{ file: any; path: string; displayName: string }> = []; + const allFilesToDownload: Array<{ file: any; path: string }> = []; // Add directly selected files (at root level) for (const file of selectedFiles) { - allFilesToDownload.push({ - file, - path: "", - displayName: file.displayName || file.name, - }); + allFilesToDownload.push({ file, path: "" }); } // Process selected folders - gather all files recursively - // Use a shared visited set across all selected folders to detect cross-folder cycles const visitedFolders = new Set(); for (const folder of selectedFolders) { - downloadProgress = { - ...downloadProgress, - currentFile: `Scanning folder: ${folder.name}...`, - }; - try { const folderFiles = await getAllFilesFromFolder( folder.id, folder.name, "", - 100, // maxDepth - 0, // currentDepth + 100, + 0, visitedFolders ); for (const { file, path } of folderFiles) { - allFilesToDownload.push({ - file, - path, - displayName: file.displayName || file.name, - }); + allFilesToDownload.push({ file, path }); } } catch (folderError) { - // Re-throw with additional context about which top-level folder failed const errorMessage = folderError instanceof Error ? folderError.message : String(folderError); @@ -1155,297 +1093,44 @@ } } - // Guard against empty result (folders might be empty) if (allFilesToDownload.length === 0) { toast.info("No files found in selected items"); - showDownloadModal = false; - resetDownloadProgress(); clearSelection(); return; } - // Update progress with actual file count - downloadProgress = { - ...downloadProgress, - totalFiles: allFilesToDownload.length, - status: "downloading", - downloadedFiles: allFilesToDownload.map(({ displayName, path }) => ({ - name: path ? `${path}/${displayName}` : displayName, - size: 0, // Size not known for folder files until downloaded - status: "pending" as const, - })), - }; - - const zip = new JSZip(); - const downloadedBlobs: Array<{ name: string; path: string; blob: Blob }> = []; - - // Download files in batches - for ( - let i = 0; - i < allFilesToDownload.length; - i += DOWNLOAD_BATCH_SIZE - ) { - const batch = allFilesToDownload.slice(i, i + DOWNLOAD_BATCH_SIZE); - - // Mark batch files as downloading - downloadProgress = { - ...downloadProgress, - downloadedFiles: downloadProgress.downloadedFiles.map( - (f, idx) => ({ - ...f, - status: - idx >= i && idx < i + batch.length - ? ("downloading" as const) - : f.status, - }), - ), - }; - - // Download batch in parallel - const batchPromises = batch.map(async ({ file, path, displayName }, batchIdx) => { - const globalIdx = i + batchIdx; - const url = `${API_BASE_URL}/api/files/${file.id}/download?token=${token || ""}`; - - const response = await fetch(url); - if (!response.ok) { - throw new Error(`HTTP ${response.status}: Failed to download`); - } - - const blob = await response.blob(); - return { name: displayName, path, blob, globalIdx }; - }); - - const settledResults = await Promise.allSettled(batchPromises); - - // Process each settled result - for (let batchIdx = 0; batchIdx < settledResults.length; batchIdx++) { - const result = settledResults[batchIdx]; - const globalIdx = i + batchIdx; - const { displayName, path } = batch[batchIdx]; - const fullName = path ? `${path}/${displayName}` : displayName; - - if (result.status === "fulfilled") { - // Success - add to downloadedBlobs and mark as done - downloadedBlobs.push({ - name: result.value.name, - path: result.value.path, - blob: result.value.blob - }); - - downloadProgress = { - ...downloadProgress, - currentFile: fullName, - currentFileIndex: globalIdx + 1, - overallProgress: Math.round( - ((globalIdx + 1) / allFilesToDownload.length) * 80, - ), // 80% for downloads, 20% for zipping - downloadedFiles: - downloadProgress.downloadedFiles.map( - (f, idx) => - idx === globalIdx - ? { ...f, status: "done" as const } - : f, - ), - }; - } else { - // Failed - log error and mark as error - const errorMessage = result.reason instanceof Error - ? result.reason.message - : "Unknown error"; - console.error(`Error downloading ${fullName}:`, result.reason); - - downloadProgress = { - ...downloadProgress, - currentFile: fullName, - currentFileIndex: globalIdx + 1, - overallProgress: Math.round( - ((globalIdx + 1) / allFilesToDownload.length) * 80, - ), - downloadedFiles: - downloadProgress.downloadedFiles.map( - (f, idx) => - idx === globalIdx - ? { ...f, status: "error" as const, errorMessage } - : f, - ), - }; - } - } - } - - // Count successful and failed downloads - const failedCount = downloadProgress.downloadedFiles.filter( - (f) => f.status === "error" - ).length; - const successCount = downloadedBlobs.length; - - // Check if all downloads failed - if (successCount === 0) { - downloadProgress = { - ...downloadProgress, - status: "error", - overallProgress: 100, - errorMessage: `All ${failedCount} file(s) failed to download`, - }; - return; - } - - // Zipping phase - downloadProgress = { - ...downloadProgress, - status: "zipping", - currentFile: "Creating zip file...", - overallProgress: 85, - }; - - // Add all successfully downloaded files to zip with unique filenames - // Track used full paths to handle collisions - const usedPaths = new Set(); - - /** - * Sanitize a filename to prevent path traversal and invalid paths. - */ - function sanitizeFilename(rawName: string): string { - let name = rawName; - name = name.replace(/^[a-zA-Z]:/, ''); - name = name.replace(/\\/g, '/'); - name = name.replace(/\.\./g, ''); - const lastSlashIndex = name.lastIndexOf('/'); - if (lastSlashIndex !== -1) { - name = name.slice(lastSlashIndex + 1); - } - name = name.replace(/[/\\]/g, ''); - name = name.replace(/^[.\s]+/, ''); - name = name.trim(); - // biome-ignore lint/suspicious/noControlCharactersInRegex: Intentional removal of control chars - name = name.replace(/[\x00-\x1f\x7f]/g, ''); - if (!name) { - name = 'file'; - } - return name; - } - - /** - * Sanitize a folder path component - */ - function sanitizePath(rawPath: string): string { - if (!rawPath) return ""; - // Split path, sanitize each component, rejoin - const parts = rawPath.split('/').filter(Boolean); - const sanitizedParts = parts.map(part => { - let p = part; - p = p.replace(/\.\./g, ''); - p = p.replace(/[\\:*?"<>|]/g, ''); - p = p.replace(/^[.\s]+/, ''); - p = p.trim(); - return p || 'folder'; - }); - return sanitizedParts.join('/'); - } - - function getUniqueFilePath(originalPath: string, originalName: string): string { - const sanitizedPath = sanitizePath(originalPath); - const sanitizedName = sanitizeFilename(originalName); - const fullPath = sanitizedPath ? `${sanitizedPath}/${sanitizedName}` : sanitizedName; - - if (!usedPaths.has(fullPath)) { - usedPaths.add(fullPath); - return fullPath; - } - - // Split name into base and extension - const lastDotIndex = sanitizedName.lastIndexOf('.'); - const hasExtension = lastDotIndex > 0 && lastDotIndex < sanitizedName.length - 1; - const baseName = hasExtension ? sanitizedName.slice(0, lastDotIndex) : sanitizedName; - const extension = hasExtension ? sanitizedName.slice(lastDotIndex) : ''; - - // Find a unique name by incrementing suffix - let counter = 1; - let uniqueName = `${baseName} (${counter})${extension}`; - let uniqueFullPath = sanitizedPath ? `${sanitizedPath}/${uniqueName}` : uniqueName; - while (usedPaths.has(uniqueFullPath)) { - counter++; - uniqueName = `${baseName} (${counter})${extension}`; - uniqueFullPath = sanitizedPath ? `${sanitizedPath}/${uniqueName}` : uniqueName; - } - - usedPaths.add(uniqueFullPath); - return uniqueFullPath; - } - - for (const { name, path, blob } of downloadedBlobs) { - const uniqueFullPath = getUniqueFilePath(path, name); - zip.file(uniqueFullPath, blob); - } - - downloadProgress = { - ...downloadProgress, - overallProgress: 95, - }; - - // Generate zip file - const zipBlob = await zip.generateAsync({ - type: "blob", - compression: "DEFLATE", - compressionOptions: { level: 1 }, - }); - - downloadProgress = { - ...downloadProgress, - status: "complete", - overallProgress: 100, - currentFile: failedCount > 0 - ? `Download ready (${failedCount} file(s) failed)` - : "Download ready!", - }; - - // Trigger download of the zip file - const zipUrl = URL.createObjectURL(zipBlob); - const link = document.createElement("a"); - link.href = zipUrl; - const timestamp = new Date().toISOString().slice(0, 10); - link.download = `files-${timestamp}.zip`; - link.style.display = "none"; - document.body.appendChild(link); - link.click(); - document.body.removeChild(link); - - // Delay revoking the object URL to ensure the download starts - // Some browsers may cancel the download if revoked immediately - setTimeout(() => { - URL.revokeObjectURL(zipUrl); - }, 500); - - // Close modal after a short delay (longer if there were failures) - setTimeout(() => { - showDownloadModal = false; - resetDownloadProgress(); - clearSelection(); - }, failedCount > 0 ? 3000 : 1500); + // Build payload and trigger server-side zip download via hidden form + const filesPayload = allFilesToDownload.map(({ file, path }) => ({ + id: file.id, + path: path, + })); - // Show appropriate toast message - if (failedCount > 0) { - toast.success(`Downloaded ${successCount} of ${allFilesToDownload.length} files as zip (${failedCount} failed)`); - } else { - toast.success(`Downloaded ${successCount} files as zip`); - } + // Create a hidden form to POST and trigger native browser download + const form = document.createElement("form"); + form.method = "POST"; + // Pass token in query param so auth middleware picks it up + form.action = `${API_BASE_URL}/api/files/download-zip?token=${encodeURIComponent(token || "")}`; + form.style.display = "none"; + + // Add files payload as hidden field (JSON string) + const filesInput = document.createElement("input"); + filesInput.type = "hidden"; + filesInput.name = "files"; + filesInput.value = JSON.stringify(filesPayload); + form.appendChild(filesInput); + + document.body.appendChild(form); + form.submit(); + document.body.removeChild(form); + + toast.success(`Downloading ${allFilesToDownload.length} files as zip`); + clearSelection(); } catch (error) { console.error("Download error:", error); - downloadProgress = { - ...downloadProgress, - status: "error", - errorMessage: - error instanceof Error - ? error.message - : "Failed to download files", - }; + toast.error(error instanceof Error ? error.message : "Failed to prepare download"); + clearSelection(); } } - - function cancelDownload() { - showDownloadModal = false; - resetDownloadProgress(); - }
@@ -2640,286 +2325,3 @@
{/if} - - -{#if showDownloadModal} -
{ - if ( - e.target === e.currentTarget && - downloadProgress.status !== "downloading" && - downloadProgress.status !== "zipping" - ) { - cancelDownload(); - } - }} - > -
e.stopPropagation()} - > - -
-

- {#if downloadProgress.status === "preparing"} - Preparing Download... - {:else if downloadProgress.status === "downloading"} - Downloading Files - {:else if downloadProgress.status === "zipping"} - Creating Zip File - {:else if downloadProgress.status === "complete"} - Download Complete! - {:else if downloadProgress.status === "error"} - Download Failed - {/if} -

- {#if downloadProgress.status !== "downloading" && downloadProgress.status !== "zipping"} - - {/if} -
- - -
- {#if downloadProgress.status === "error"} -
- - - - {downloadProgress.errorMessage} -
- {:else if downloadProgress.status === "complete"} - {@const hasFailures = downloadProgress.downloadedFiles.some(f => f.status === "error")} -
- - {#if hasFailures} - - {:else} - - {/if} - - {downloadProgress.currentFile} -
- {:else} -

- {#if downloadProgress.status === "downloading"} - Downloading file {downloadProgress.currentFileIndex} of - {downloadProgress.totalFiles} - {:else if downloadProgress.status === "zipping"} - Compressing {downloadProgress.totalFiles} files into a - zip... - {:else} - Please wait while we prepare your download... - {/if} -

- {#if downloadProgress.currentFile && downloadProgress.status === "downloading"} -

- Current: {downloadProgress.currentFile} -

- {/if} - {/if} -
- - -
-
- Overall Progress - {downloadProgress.overallProgress}% -
-
-
-
-
- - - {#if downloadProgress.downloadedFiles.length > 0} -
-
- {#each downloadProgress.downloadedFiles as file, idx} -
- -
- {#if file.status === "done"} - - - - {:else if file.status === "downloading"} -
- {:else if file.status === "error"} - - - - {:else} -
- {/if} -
- -
- - {file.name} - - {#if file.status === "error" && file.errorMessage} - - {file.errorMessage} - - {/if} -
- - - {formatFileSize(file.size)} - -
- {/each} -
-
- {/if} - - - {#if downloadProgress.status === "downloading" || downloadProgress.status === "zipping"} -
-
- - - -

- Please don't close this window or navigate away - while the download is in progress. -

-
-
- {/if} - - -
- {#if downloadProgress.status === "error"} - - - {:else if downloadProgress.status === "complete"} - - {/if} -
-
-
-{/if} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 261d08a0..9073f325 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2377,9 +2377,6 @@ importers: axios: specifier: ^1.6.7 version: 1.13.2 - jszip: - specifier: ^3.10.1 - version: 3.10.1 svelte-qrcode: specifier: ^1.0.1 version: 1.0.1 @@ -2453,6 +2450,9 @@ importers: platforms/file-manager-api: dependencies: + archiver: + specifier: ^7.0.1 + version: 7.0.1 axios: specifier: ^1.6.7 version: 1.13.2 @@ -2496,6 +2496,9 @@ importers: specifier: workspace:* version: link:../../infrastructure/web3-adapter devDependencies: + '@types/archiver': + specifier: ^6.0.3 + version: 6.0.4 '@types/cors': specifier: ^2.8.17 version: 2.8.19 @@ -8770,6 +8773,9 @@ packages: '@tybys/wasm-util@0.10.1': resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + '@types/archiver@6.0.4': + resolution: {integrity: sha512-ULdQpARQ3sz9WH4nb98mJDYA0ft2A8C4f4fovvUcFwINa1cgGjY36JCAYuP5YypRq4mco1lJp1/7jEMS2oR0Hg==} + '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} @@ -9111,6 +9117,9 @@ packages: '@types/react@18.3.27': resolution: {integrity: sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==} + '@types/readdir-glob@1.1.5': + resolution: {integrity: sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==} + '@types/request@2.48.13': resolution: {integrity: sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==} @@ -13120,9 +13129,6 @@ packages: engines: {node: '>=16.x'} hasBin: true - immediate@3.0.6: - resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} - immutable@3.7.6: resolution: {integrity: sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==} engines: {node: '>=0.8.0'} @@ -13916,9 +13922,6 @@ packages: resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==} engines: {node: '>=4.0'} - jszip@3.10.1: - resolution: {integrity: sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==} - jwa@1.4.2: resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==} @@ -14003,9 +14006,6 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lie@3.3.0: - resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==} - light-my-request@5.14.0: resolution: {integrity: sha512-aORPWntbpH5esaYpGOOmri0OHDOe3wC5M2MQxZ9dvMLZm6DnaAn0kJlcbU9hwsQgLzmZyReKwFwwPkR+nHu5kA==} @@ -26145,6 +26145,10 @@ snapshots: tslib: 2.8.1 optional: true + '@types/archiver@6.0.4': + dependencies: + '@types/readdir-glob': 1.1.5 + '@types/aria-query@5.0.4': {} '@types/babel__core@7.20.5': @@ -26555,6 +26559,10 @@ snapshots: '@types/prop-types': 15.7.15 csstype: 3.2.3 + '@types/readdir-glob@1.1.5': + dependencies: + '@types/node': 20.19.26 + '@types/request@2.48.13': dependencies: '@types/caseless': 0.12.5 @@ -31930,8 +31938,6 @@ snapshots: image-size@2.0.2: {} - immediate@3.0.6: {} - immutable@3.7.6: {} immutable@5.1.4: {} @@ -33213,13 +33219,6 @@ snapshots: object.assign: 4.1.7 object.values: 1.2.1 - jszip@3.10.1: - dependencies: - lie: 3.3.0 - pako: 1.0.11 - readable-stream: 2.3.8 - setimmediate: 1.0.5 - jwa@1.4.2: dependencies: buffer-equal-constant-time: 1.0.1 @@ -33316,10 +33315,6 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lie@3.3.0: - dependencies: - immediate: 3.0.6 - light-my-request@5.14.0: dependencies: cookie: 0.7.2