diff --git a/package.json b/package.json index 31f6fd04..5a4c4c57 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "node-fetch": "2.7.0", "openpgp": "5.11.2", "pm2": "5.4.3", + "range-parser": "^1.2.1", "reflect-metadata": "0.2.2", "selfsigned": "2.4.1", "sequelize": "6.37.5", @@ -75,6 +76,7 @@ "@types/mime-types": "2.1.4", "@types/node": "22.10.2", "@types/node-fetch": "2.6.12", + "@types/range-parser": "^1.2.7", "@vitest/coverage-istanbul": "2.1.8", "@vitest/spy": "2.1.8", "eslint": "9.17.0", diff --git a/src/commands/download-file.ts b/src/commands/download-file.ts index 5f0180b8..a4d06ba0 100644 --- a/src/commands/download-file.ts +++ b/src/commands/download-file.ts @@ -80,6 +80,7 @@ export default class DownloadFile extends Command { user.mnemonic, driveFile.fileId, StreamUtils.writeStreamToWritableStream(fileWriteStream), + undefined, { abortController: new AbortController(), progressCallback: (progress) => { diff --git a/src/services/crypto.service.ts b/src/services/crypto.service.ts index 91e0d1e2..ad5829cb 100644 --- a/src/services/crypto.service.ts +++ b/src/services/crypto.service.ts @@ -1,6 +1,6 @@ import { CryptoProvider } from '@internxt/sdk'; import { Keys, Password } from '@internxt/sdk/dist/auth'; -import { createCipheriv, createDecipheriv, createHash, pbkdf2Sync, randomBytes } from 'node:crypto'; +import { createCipheriv, createDecipheriv, createHash, Decipher, pbkdf2Sync, randomBytes } from 'node:crypto'; import { Transform } from 'node:stream'; import { KeysService } from './keys.service'; import { ConfigService } from '../services/config.service'; @@ -116,8 +116,29 @@ export class CryptoService { return Buffer.concat([decipher.update(contentsToDecrypt), decipher.final()]).toString('utf8'); }; - public async decryptStream(inputSlices: ReadableStream[], key: Buffer, iv: Buffer) { - const decipher = createDecipheriv('aes-256-ctr', key, iv); + public async decryptStream( + inputSlices: ReadableStream[], + key: Buffer, + iv: Buffer, + startOffsetByte?: number, + ) { + let decipher: Decipher; + if (startOffsetByte) { + const aesBlockSize = 16; + const startOffset = startOffsetByte % aesBlockSize; + const startBlockFirstByte = startOffsetByte - startOffset; + const startBlockNumber = startBlockFirstByte / aesBlockSize; + + const ivForRange = (BigInt('0x' + iv.toString('hex')) + BigInt(startBlockNumber)).toString(16).padStart(32, '0'); + const newIv = Buffer.from(ivForRange, 'hex'); + + const skipBuffer = Buffer.alloc(startOffset, 0); + + decipher = createDecipheriv('aes-256-ctr', key, newIv); + decipher.update(skipBuffer); + } else { + decipher = createDecipheriv('aes-256-ctr', key, iv); + } const encryptedStream = StreamUtils.joinReadableBinaryStreams(inputSlices); let keepReading = true; diff --git a/src/services/network/download.service.ts b/src/services/network/download.service.ts index dde74946..1907e30a 100644 --- a/src/services/network/download.service.ts +++ b/src/services/network/download.service.ts @@ -5,7 +5,11 @@ export class DownloadService { async downloadFile( url: string, - options: { progressCallback?: (progress: number) => void; abortController?: AbortController }, + options: { + progressCallback?: (progress: number) => void; + abortController?: AbortController; + rangeHeader?: string; + }, ): Promise> { const response = await axios.get(url, { responseType: 'stream', @@ -16,6 +20,9 @@ export class DownloadService { options.progressCallback(reportedProgress); } }, + headers: { + range: options.rangeHeader, + }, }); const readable = new ReadableStream({ diff --git a/src/services/network/network-facade.service.ts b/src/services/network/network-facade.service.ts index bce2e742..339523f5 100644 --- a/src/services/network/network-facade.service.ts +++ b/src/services/network/network-facade.service.ts @@ -17,6 +17,7 @@ import { DownloadService } from './download.service'; import { ValidationService } from '../validation.service'; import { HashStream } from '../../utils/hash.utils'; import { ProgressTransform } from '../../utils/stream.utils'; +import { RangeOptions } from '../../utils/network.utils'; export class NetworkFacade { private readonly cryptoLib: Network.Crypto; @@ -54,6 +55,7 @@ export class NetworkFacade { mnemonic: string, fileId: string, to: WritableStream, + rangeOptions?: RangeOptions, options?: DownloadOptions, ): Promise<[Promise, AbortController]> { const encryptedContentStreams: ReadableStream[] = []; @@ -70,16 +72,25 @@ export class NetworkFacade { }; const decryptFile: DecryptFileFunction = async (_, key, iv) => { + let startOffsetByte; + if (rangeOptions) { + startOffsetByte = rangeOptions.parsed.start; + } fileStream = await this.cryptoService.decryptStream( encryptedContentStreams, Buffer.from(key as ArrayBuffer), Buffer.from(iv as ArrayBuffer), + startOffsetByte, ); await fileStream.pipeTo(to); }; const downloadFile: DownloadFileFunction = async (downloadables) => { + if (rangeOptions && downloadables.length > 1) { + throw new Error('Multi-Part Download with Range-Requests is not implemented'); + } + for (const downloadable of downloadables) { if (abortable.signal.aborted) { throw new Error('Download aborted'); @@ -88,6 +99,7 @@ export class NetworkFacade { const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, { progressCallback: onDownloadProgress, abortController: options?.abortController, + rangeHeader: rangeOptions?.range, }); encryptedContentStreams.push(encryptedContentStream); diff --git a/src/utils/errors.utils.ts b/src/utils/errors.utils.ts index 1c56a471..15e2226f 100644 --- a/src/utils/errors.utils.ts +++ b/src/utils/errors.utils.ts @@ -50,6 +50,16 @@ export class UnsupportedMediaTypeError extends Error { } } +export class MethodNotAllowed extends Error { + public statusCode = 405; + + constructor(message: string) { + super(message); + this.name = 'MethodNotAllowed'; + Object.setPrototypeOf(this, MethodNotAllowed.prototype); + } +} + export class NotImplementedError extends Error { public statusCode = 501; diff --git a/src/utils/logger.utils.ts b/src/utils/logger.utils.ts index ec5ead91..78f249a7 100644 --- a/src/utils/logger.utils.ts +++ b/src/utils/logger.utils.ts @@ -6,7 +6,7 @@ const maxLogsFiles = 5; export const logger = winston.createLogger({ level: 'info', - format: winston.format.json(), + format: winston.format.combine(winston.format.timestamp(), winston.format.json()), defaultMeta: { service: 'internxt-cli' }, transports: [ new winston.transports.File({ @@ -29,7 +29,7 @@ export const logger = winston.createLogger({ export const webdavLogger = winston.createLogger({ level: 'info', - format: winston.format.json(), + format: winston.format.combine(winston.format.timestamp(), winston.format.json()), defaultMeta: { service: 'internxt-webdav' }, transports: [ new winston.transports.File({ diff --git a/src/utils/network.utils.ts b/src/utils/network.utils.ts index 698adc16..c0237fb4 100644 --- a/src/utils/network.utils.ts +++ b/src/utils/network.utils.ts @@ -3,6 +3,7 @@ import { createHash, X509Certificate } from 'node:crypto'; import { readFile, stat, writeFile } from 'node:fs/promises'; import path from 'node:path'; import selfsigned from 'selfsigned'; +import parseRange from 'range-parser'; import { ConfigService } from '../services/config.service'; export class NetworkUtils { @@ -74,4 +75,41 @@ export class NetworkUtils { const pems = selfsigned.generate(attrs, { days: 365, algorithm: 'sha256', keySize: 2048, extensions }); return pems; } + + static parseRangeHeader(rangeOptions: { range?: string; totalFileSize: number }): RangeOptions | undefined { + if (!rangeOptions.range) { + return; + } + const parsed = parseRange(rangeOptions.totalFileSize, rangeOptions.range); + if (Array.isArray(parsed)) { + if (parsed.length > 1) { + throw new Error(`Multi Range-Requests functionality is not implemented. ${JSON.stringify(rangeOptions)}`); + } else if (parsed.length <= 0) { + throw new Error(`Empty Range-Request. ${JSON.stringify(rangeOptions)}`); + } else if (parsed.type !== 'bytes') { + throw new Error(`Unkwnown Range-Request type "${parsed.type}". ${JSON.stringify(rangeOptions)}`); + } else { + const rangeSize = parsed[0].end - parsed[0].start + 1; + return { + range: rangeOptions.range, + rangeSize: rangeSize, + totalFileSize: rangeOptions.totalFileSize, + parsed: parsed[0], + }; + } + } else if (parsed === -1) { + throw new Error(`Malformed Range-Request. ${JSON.stringify(rangeOptions)}`); + } else if (parsed === -2) { + throw new Error(`Unsatisfiable Range-Request. ${JSON.stringify(rangeOptions)}`); + } else { + throw new Error(`Unknown error from Range-Request. ${JSON.stringify(rangeOptions)}`); + } + } +} + +export interface RangeOptions { + range: string; + rangeSize: number; + totalFileSize: number; + parsed: parseRange.Range; } diff --git a/src/webdav/handlers/DELETE.handler.ts b/src/webdav/handlers/DELETE.handler.ts index 611c0a5d..92dac932 100644 --- a/src/webdav/handlers/DELETE.handler.ts +++ b/src/webdav/handlers/DELETE.handler.ts @@ -19,9 +19,8 @@ export class DELETERequestHandler implements WebDavMethodHandler { handle = async (req: Request, res: Response) => { const { driveDatabaseManager, driveFileService, driveFolderService, trashService } = this.dependencies; - webdavLogger.info('DELETE request received'); const resource = await WebDavUtils.getRequestedResource(req); - webdavLogger.info('Resource received for DELETE request', { resource }); + webdavLogger.info(`[DELETE] Request received for ${resource.type} at ${resource.url}`); const driveItem = await WebDavUtils.getAndSearchItemFromResource({ resource, @@ -30,7 +29,7 @@ export class DELETERequestHandler implements WebDavMethodHandler { driveFileService: driveFileService, }); - webdavLogger.info(`Trashing ${resource.type} with UUID ${driveItem.uuid}...`); + webdavLogger.info(`[DELETE] [${driveItem.uuid}] Trashing ${resource.type}`); await trashService.trashItems({ items: [{ type: resource.type, uuid: driveItem.uuid }], }); @@ -42,5 +41,7 @@ export class DELETERequestHandler implements WebDavMethodHandler { } res.status(204).send(); + const type = resource.type.charAt(0).toUpperCase() + resource.type.substring(1); + webdavLogger.info(`[DELETE] [${driveItem.uuid}] ${type} trashed successfully`); }; } diff --git a/src/webdav/handlers/GET.handler.ts b/src/webdav/handlers/GET.handler.ts index 82adaf32..9b72dceb 100644 --- a/src/webdav/handlers/GET.handler.ts +++ b/src/webdav/handlers/GET.handler.ts @@ -4,20 +4,19 @@ import { WebDavUtils } from '../../utils/webdav.utils'; import { DriveFileService } from '../../services/drive/drive-file.service'; import { DriveDatabaseManager } from '../../services/database/drive-database-manager.service'; import { NetworkFacade } from '../../services/network/network-facade.service'; -import { UploadService } from '../../services/network/upload.service'; import { DownloadService } from '../../services/network/download.service'; import { CryptoService } from '../../services/crypto.service'; import { AuthService } from '../../services/auth.service'; -import { NotFoundError, NotImplementedError } from '../../utils/errors.utils'; +import { NotFoundError } from '../../utils/errors.utils'; import { webdavLogger } from '../../utils/logger.utils'; import { DriveFileItem } from '../../types/drive.types'; +import { NetworkUtils } from '../../utils/network.utils'; export class GETRequestHandler implements WebDavMethodHandler { constructor( private readonly dependencies: { driveFileService: DriveFileService; driveDatabaseManager: DriveDatabaseManager; - uploadService: UploadService; downloadService: DownloadService; cryptoService: CryptoService; authService: AuthService; @@ -29,24 +28,20 @@ export class GETRequestHandler implements WebDavMethodHandler { const { driveDatabaseManager, driveFileService, authService, networkFacade } = this.dependencies; const resource = await WebDavUtils.getRequestedResource(req); - if (req.headers['content-range'] || req.headers['range']) - throw new NotImplementedError('Range requests not supported'); if (resource.name.startsWith('._')) throw new NotFoundError('File not found'); + if (resource.type === 'folder') throw new NotFoundError('Folders cannot be listed with GET. Use PROPFIND instead.'); - webdavLogger.info(`GET request received for file at ${resource.url}`); + webdavLogger.info(`[GET] Request received for ${resource.type} at ${resource.url}`); const driveFile = (await WebDavUtils.getAndSearchItemFromResource({ resource, driveDatabaseManager, driveFileService, })) as DriveFileItem; - webdavLogger.info(`✅ Found Drive File with uuid ${driveFile.uuid}`); - - res.set('Content-Type', 'application/octet-stream'); - res.set('Content-length', driveFile.size.toString()); + webdavLogger.info(`[GET] [${driveFile.uuid}] Found Drive File`); const { user } = await authService.getAuthDetails(); - webdavLogger.info('✅ Network ready for download'); + webdavLogger.info(`[GET] [${driveFile.uuid}] Network ready for download`); const writable = new WritableStream({ write(chunk) { @@ -57,28 +52,32 @@ export class GETRequestHandler implements WebDavMethodHandler { }, }); - let lastLoggedProgress = 0; + const range = req.headers['range']; + const rangeOptions = NetworkUtils.parseRangeHeader({ + range, + totalFileSize: driveFile.size, + }); + let contentLength = driveFile.size; + if (rangeOptions) { + webdavLogger.info(`[GET] [${driveFile.uuid}] Range request received:`, { rangeOptions }); + contentLength = rangeOptions.rangeSize; + } + + res.header('Content-Type', 'application/octet-stream'); + res.header('Content-length', contentLength.toString()); + const [executeDownload] = await networkFacade.downloadToStream( driveFile.bucket, user.mnemonic, driveFile.fileId, writable, - { - progressCallback: (progress) => { - const percentage = Math.floor(100 * progress); - - if (percentage >= lastLoggedProgress + 1) { - lastLoggedProgress = percentage; - webdavLogger.info(`Download progress for file ${resource.name}: ${percentage}%`); - } - }, - }, + rangeOptions, ); - webdavLogger.info('✅ Download prepared, executing...'); + webdavLogger.info(`[GET] [${driveFile.uuid}] Download prepared, executing...`); res.status(200); await executeDownload; - webdavLogger.info('✅ Download ready, replying to client'); + webdavLogger.info(`[GET] [${driveFile.uuid}] ✅ Download ready, replying to client`); }; } diff --git a/src/webdav/handlers/HEAD.handler.ts b/src/webdav/handlers/HEAD.handler.ts index 185d8e86..cf6d4562 100644 --- a/src/webdav/handlers/HEAD.handler.ts +++ b/src/webdav/handlers/HEAD.handler.ts @@ -1,9 +1,51 @@ -import { WebDavMethodHandler } from '../../types/webdav.types'; import { Request, Response } from 'express'; +import { WebDavMethodHandler } from '../../types/webdav.types'; +import { WebDavUtils } from '../../utils/webdav.utils'; +import { webdavLogger } from '../../utils/logger.utils'; +import { DriveFileService } from '../../services/drive/drive-file.service'; +import { DriveDatabaseManager } from '../../services/database/drive-database-manager.service'; +import { DriveFileItem } from '../../types/drive.types'; +import { NetworkUtils } from '../../utils/network.utils'; export class HEADRequestHandler implements WebDavMethodHandler { - handle = async (_: Request, res: Response) => { - // This is a NOOP request handler, clients like CyberDuck uses this. - res.status(405).send(); + constructor( + private readonly dependencies: { + driveFileService: DriveFileService; + driveDatabaseManager: DriveDatabaseManager; + }, + ) {} + + handle = async (req: Request, res: Response) => { + const { driveDatabaseManager, driveFileService } = this.dependencies; + const resource = await WebDavUtils.getRequestedResource(req); + + if (resource.type === 'folder') { + res.status(200).send(); + return; + } + + webdavLogger.info(`[HEAD] Request received for ${resource.type} at ${resource.url}`); + const driveFile = (await WebDavUtils.getAndSearchItemFromResource({ + resource, + driveDatabaseManager, + driveFileService, + })) as DriveFileItem; + + webdavLogger.info(`[HEAD] [${driveFile.uuid}] Found Drive File`); + + const range = req.headers['range']; + const rangeOptions = NetworkUtils.parseRangeHeader({ + range, + totalFileSize: driveFile.size, + }); + let contentLength = driveFile.size; + if (rangeOptions) { + webdavLogger.info(`[HEAD] [${driveFile.uuid}] Range request received:`, { rangeOptions }); + contentLength = rangeOptions.rangeSize; + } + + res.header('Content-Type', 'application/octet-stream'); + res.header('Content-length', contentLength.toString()); + res.status(200).send(); }; } diff --git a/src/webdav/handlers/MKCOL.handler.ts b/src/webdav/handlers/MKCOL.handler.ts index d3e56104..d847299d 100644 --- a/src/webdav/handlers/MKCOL.handler.ts +++ b/src/webdav/handlers/MKCOL.handler.ts @@ -7,6 +7,7 @@ import { webdavLogger } from '../../utils/logger.utils'; import { XMLUtils } from '../../utils/xml.utils'; import { AsyncUtils } from '../../utils/async.utils'; import { DriveFolderItem } from '../../types/drive.types'; +import { MethodNotAllowed } from '../../utils/errors.utils'; export class MKCOLRequestHandler implements WebDavMethodHandler { constructor( @@ -19,7 +20,10 @@ export class MKCOLRequestHandler implements WebDavMethodHandler { handle = async (req: Request, res: Response) => { const { driveDatabaseManager, driveFolderService } = this.dependencies; const resource = await WebDavUtils.getRequestedResource(req); - webdavLogger.info('Resource received for MKCOL request', { resource }); + + if (resource.type === 'file') throw new MethodNotAllowed('Files cannot be created with MKCOL. Use PUT instead.'); + + webdavLogger.info(`[MKCOL] Request received for ${resource.type} at ${resource.url}`); const parentResource = await WebDavUtils.getRequestedResource(resource.parentPath); @@ -36,7 +40,7 @@ export class MKCOLRequestHandler implements WebDavMethodHandler { const newFolder = await createFolder; - webdavLogger.info(`✅ Folder created with UUID ${newFolder.uuid}`); + webdavLogger.info(`[MKCOL] ✅ Folder created with UUID ${newFolder.uuid}`); await driveDatabaseManager.createFolder( { diff --git a/src/webdav/handlers/MOVE.handler.ts b/src/webdav/handlers/MOVE.handler.ts index 50441f96..83768a8f 100644 --- a/src/webdav/handlers/MOVE.handler.ts +++ b/src/webdav/handlers/MOVE.handler.ts @@ -21,11 +21,11 @@ export class MOVERequestHandler implements WebDavMethodHandler { const { driveDatabaseManager, driveFolderService, driveFileService } = this.dependencies; const resource = await WebDavUtils.getRequestedResource(req); - webdavLogger.info('[MOVE] Resource found', { resource }); + webdavLogger.info(`[MOVE] Request received for ${resource.type} at ${resource.url}`); const destinationUrl = req.header('destination'); if (!destinationUrl) { - throw new NotFoundError('Destination folder not received'); + throw new NotFoundError('[MOVE] Destination folder not received'); } const destinationPath = WebDavUtils.removeHostFromURL(destinationUrl); const destinationResource = await WebDavUtils.getRequestedResource(destinationPath); diff --git a/src/webdav/handlers/OPTIONS.handler.ts b/src/webdav/handlers/OPTIONS.handler.ts index bf90ff95..d6aea67d 100644 --- a/src/webdav/handlers/OPTIONS.handler.ts +++ b/src/webdav/handlers/OPTIONS.handler.ts @@ -1,10 +1,39 @@ import { WebDavMethodHandler } from '../../types/webdav.types'; import { Request, Response } from 'express'; +import { WebDavUtils } from '../../utils/webdav.utils'; +import { webdavLogger } from '../../utils/logger.utils'; export class OPTIONSRequestHandler implements WebDavMethodHandler { - handle = async (_: Request, res: Response) => { - res.header('Allow', 'OPTIONS, GET, HEAD, POST, PUT, DELETE, PROPFIND, PROPPATCH, MKCOL, COPY, MOVE, LOCK, UNLOCK'); - res.header('DAV', '1, 2, ordered-collections'); - res.status(200).send(); + handle = async (req: Request, res: Response) => { + const resource = await WebDavUtils.getRequestedResource(req); + + webdavLogger.info(`[OPTIONS] Request received for ${resource.type} at ${resource.url}`); + + if (resource.url === '/' || resource.url === '') { + const allowedMethods = 'DELETE, GET, HEAD, MKCOL, MOVE, OPTIONS, PROPFIND, PUT'; + webdavLogger.info(`[OPTIONS] Returning Allowed Options: ${allowedMethods}`); + res.header('Allow', 'DELETE, GET, HEAD, MKCOL, MOVE, OPTIONS, PROPFIND, PUT'); + res.header('DAV', '1, 2, ordered-collections'); + res.status(200).send(); + return; + } + + if (resource.type === 'folder') { + const allowedMethods = 'DELETE, HEAD, MKCOL, MOVE, OPTIONS, PROPFIND'; + webdavLogger.info(`[OPTIONS] Returning Allowed Options: ${allowedMethods}`); + res.header('Allow', allowedMethods); + res.header('DAV', '1, 2, ordered-collections'); + res.status(200).send(); + return; + } + + if (resource.type === 'file') { + const allowedMethods = 'DELETE, GET, HEAD, MOVE, OPTIONS, PROPFIND, PUT'; + webdavLogger.info(`[OPTIONS] Returning Allowed Options: ${allowedMethods}`); + res.header('Allow', allowedMethods); + res.header('DAV', '1, 2, ordered-collections'); + res.status(200).send(); + return; + } }; } diff --git a/src/webdav/handlers/PROPFIND.handler.ts b/src/webdav/handlers/PROPFIND.handler.ts index e63f0251..7a8a9e19 100644 --- a/src/webdav/handlers/PROPFIND.handler.ts +++ b/src/webdav/handlers/PROPFIND.handler.ts @@ -25,7 +25,7 @@ export class PROPFINDRequestHandler implements WebDavMethodHandler { const { driveDatabaseManager, driveFolderService, driveFileService } = this.dependencies; const resource = await WebDavUtils.getRequestedResource(req); - webdavLogger.info('[PROPFIND] Request received', { resource }); + webdavLogger.info(`[PROPFIND] Request received for ${resource.type} at ${resource.url}`); const driveItem = await WebDavUtils.getAndSearchItemFromResource({ resource, diff --git a/src/webdav/handlers/PUT.handler.ts b/src/webdav/handlers/PUT.handler.ts index 377a42e3..3e9ae149 100644 --- a/src/webdav/handlers/PUT.handler.ts +++ b/src/webdav/handlers/PUT.handler.ts @@ -3,7 +3,7 @@ import { DriveFileService } from '../../services/drive/drive-file.service'; import { NetworkFacade } from '../../services/network/network-facade.service'; import { AuthService } from '../../services/auth.service'; import { WebDavMethodHandler } from '../../types/webdav.types'; -import { UnsupportedMediaTypeError } from '../../utils/errors.utils'; +import { NotFoundError, UnsupportedMediaTypeError } from '../../utils/errors.utils'; import { WebDavUtils } from '../../utils/webdav.utils'; import { webdavLogger } from '../../utils/logger.utils'; import { DriveDatabaseManager } from '../../services/database/drive-database-manager.service'; @@ -33,7 +33,12 @@ export class PUTRequestHandler implements WebDavMethodHandler { } const resource = await WebDavUtils.getRequestedResource(req); - webdavLogger.info(`PUT request received for uploading file '${resource.name}' to '${resource.parentPath}'`); + + if (resource.type === 'folder') throw new NotFoundError('Folders cannot be created with PUT. Use MKCOL instead.'); + + webdavLogger.info(`[PUT] Request received for ${resource.type} at ${resource.url}`); + webdavLogger.info(`[PUT] Uploading '${resource.name}' to '${resource.parentPath}'`); + const parentResource = await WebDavUtils.getRequestedResource(resource.parentPath); const parentFolderItem = (await WebDavUtils.getAndSearchItemFromResource({ @@ -51,7 +56,7 @@ export class PUTRequestHandler implements WebDavMethodHandler { driveFileService, })) as DriveFileItem; if (driveFileItem && driveFileItem.status === 'EXISTS') { - webdavLogger.info(`File '${resource.name}' already exists in '${resource.path.dir}', trashing it before PUT`); + webdavLogger.info(`[PUT] File '${resource.name}' already exists in '${resource.path.dir}', trashing it...`); await driveDatabaseManager.deleteFileById(driveFileItem.id); await trashService.trashItems({ items: [{ type: resource.type, uuid: driveFileItem.uuid }], @@ -70,14 +75,14 @@ export class PUTRequestHandler implements WebDavMethodHandler { if (percentage >= lastLoggedProgress + 1) { lastLoggedProgress = percentage; - webdavLogger.info(`Upload progress for file ${resource.name}: ${percentage}%`); + webdavLogger.info(`[PUT] Upload progress for file ${resource.name}: ${percentage}%`); } }, }); const uploadResult = await uploadPromise; - webdavLogger.info('✅ File uploaded to network'); + webdavLogger.info('[PUT] ✅ File uploaded to network'); const file = await DriveFileService.instance.createFile({ plain_name: resource.path.name, @@ -90,7 +95,7 @@ export class PUTRequestHandler implements WebDavMethodHandler { name: '', }); - webdavLogger.info('✅ File uploaded to internxt drive'); + webdavLogger.info('[PUT] ✅ File uploaded to internxt drive'); await driveDatabaseManager.createFile(file, resource.path.dir + '/'); diff --git a/src/webdav/webdav-server.ts b/src/webdav/webdav-server.ts index 75e00be8..af8e1714 100644 --- a/src/webdav/webdav-server.ts +++ b/src/webdav/webdav-server.ts @@ -69,14 +69,21 @@ export class WebDavServer { private readonly registerHandlers = async () => { const networkFacade = await this.getNetworkFacade(); - this.app.head('*', asyncHandler(new HEADRequestHandler().handle)); + this.app.head( + '*', + asyncHandler( + new HEADRequestHandler({ + driveFileService: this.driveFileService, + driveDatabaseManager: this.driveDatabaseManager, + }).handle, + ), + ); this.app.get( '*', asyncHandler( new GETRequestHandler({ driveFileService: this.driveFileService, driveDatabaseManager: this.driveDatabaseManager, - uploadService: this.uploadService, downloadService: this.downloadService, cryptoService: this.cryptoService, authService: this.authService, diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index 1d18ba22..7998bbc6 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -224,6 +224,7 @@ describe('Network Facade Service', () => { 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', writable, + undefined, options, ); diff --git a/test/utils/network.utils.test.ts b/test/utils/network.utils.test.ts index f4aa4ec8..9b053217 100644 --- a/test/utils/network.utils.test.ts +++ b/test/utils/network.utils.test.ts @@ -1,9 +1,10 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; -import { randomBytes, X509Certificate } from 'node:crypto'; +import { randomBytes, randomInt, X509Certificate } from 'node:crypto'; import selfsigned, { GenerateResult } from 'selfsigned'; import { readFile, stat, writeFile } from 'node:fs/promises'; import { NetworkUtils } from '../../src/utils/network.utils'; import { Stats } from 'node:fs'; +import { fail } from 'node:assert'; vi.mock('node:fs/promises', async () => { const actual = await vi.importActual('node:fs/promises'); @@ -142,4 +143,60 @@ describe('Network utils', () => { expect(mockWriteFile).toHaveBeenCalledTimes(2); expect(mockReadFile).toHaveBeenCalledTimes(2); }); + + it('When parsing range, it should parse it if its all good', () => { + const mockSize = randomInt(500, 10000); + const rangeStart = randomInt(0, 450); + const range = `bytes=${rangeStart}-${mockSize}`; + + const result = NetworkUtils.parseRangeHeader({ range, totalFileSize: mockSize }); + + expect(result).to.deep.equal({ + range, + rangeSize: mockSize - rangeStart, + totalFileSize: mockSize, + parsed: { start: rangeStart, end: mockSize - 1 }, + }); + }); + + it('When parsing range, it should return errors if found', () => { + const totalFileSize = randomInt(500, 10000); + + expect(NetworkUtils.parseRangeHeader({ range: undefined, totalFileSize })).to.be.equal(undefined); + + try { + NetworkUtils.parseRangeHeader({ range: 'range', totalFileSize }); + fail('Expected function to throw an error, but it did not.'); + } catch (error) { + expect((error as Error).message).to.contain('Unsatisfiable Range-Request.'); + } + + try { + NetworkUtils.parseRangeHeader({ range: 'whatever-range', totalFileSize }); + fail('Expected function to throw an error, but it did not.'); + } catch (error) { + expect((error as Error).message).to.contain('Unsatisfiable Range-Request.'); + } + + try { + NetworkUtils.parseRangeHeader({ range: 'bytes=', totalFileSize }); + fail('Expected function to throw an error, but it did not.'); + } catch (error) { + expect((error as Error).message).to.contain('Malformed Range-Request.'); + } + + try { + NetworkUtils.parseRangeHeader({ range: 'megabytes=50-55', totalFileSize }); + fail('Expected function to throw an error, but it did not.'); + } catch (error) { + expect((error as Error).message).to.contain('Unkwnown Range-Request type '); + } + + try { + NetworkUtils.parseRangeHeader({ range: 'bytes=50-55,0-10,5-10,56-60', totalFileSize }); + fail('Expected function to throw an error, but it did not.'); + } catch (error) { + expect((error as Error).message).to.contain('Multi Range-Requests functionality is not implemented.'); + } + }); }); diff --git a/test/webdav/handlers/GET.handler.test.ts b/test/webdav/handlers/GET.handler.test.ts index 7a824e2d..2be73414 100644 --- a/test/webdav/handlers/GET.handler.test.ts +++ b/test/webdav/handlers/GET.handler.test.ts @@ -12,7 +12,7 @@ import { CryptoService } from '../../../src/services/crypto.service'; import { DownloadService } from '../../../src/services/network/download.service'; import { UploadService } from '../../../src/services/network/upload.service'; import { AuthService } from '../../../src/services/auth.service'; -import { NotFoundError, NotImplementedError } from '../../../src/utils/errors.utils'; +import { NotFoundError } from '../../../src/utils/errors.utils'; import { SdkManager } from '../../../src/services/sdk-manager.service'; import { NetworkFacade } from '../../../src/services/network/network-facade.service'; import { WebDavUtils } from '../../../src/utils/webdav.utils'; @@ -20,6 +20,8 @@ import { WebDavRequestedResource } from '../../../src/types/webdav.types'; import { newFileItem } from '../../fixtures/drive.fixture'; import { LoginCredentials } from '../../../src/types/command.types'; import { UserCredentialsFixture } from '../../fixtures/login.fixture'; +import { randomInt } from 'node:crypto'; +import { NetworkUtils } from '../../../src/utils/network.utils'; describe('GET request handler', () => { const getNetworkMock = () => { @@ -33,54 +35,63 @@ describe('GET request handler', () => { vi.restoreAllMocks(); }); - it('When the request contains a content-range header, then it should throw a NotImplementedError', async () => { - const networkFacade = new NetworkFacade( - getNetworkMock(), - UploadService.instance, - DownloadService.instance, - CryptoService.instance, - ); - const sut = new GETRequestHandler({ + it('When the Drive file is not found, then it should throw a NotFoundError', async () => { + const driveDatabaseManager = getDriveDatabaseManager(); + const downloadService = DownloadService.instance; + const uploadService = UploadService.instance; + const cryptoService = CryptoService.instance; + const networkFacade = new NetworkFacade(getNetworkMock(), uploadService, downloadService, cryptoService); + const requestHandler = new GETRequestHandler({ driveFileService: DriveFileService.instance, - uploadService: UploadService.instance, - downloadService: DownloadService.instance, - driveDatabaseManager: getDriveDatabaseManager(), + downloadService, + driveDatabaseManager, authService: AuthService.instance, - cryptoService: CryptoService.instance, + cryptoService, networkFacade, }); + const requestedFileResource: WebDavRequestedResource = getRequestedFileResource(); + const request = createWebDavRequestFixture({ method: 'GET', - url: '/file.txt', - headers: { - 'content-range': 'bytes 0-100/200', - }, + url: requestedFileResource.url, + headers: {}, }); const response = createWebDavResponseFixture({ status: vi.fn().mockReturnValue({ send: vi.fn() }), }); + const expectedError = new NotFoundError(`Resource not found on Internxt Drive at ${requestedFileResource.url}`); + + const getRequestedResourceStub = vi + .spyOn(WebDavUtils, 'getRequestedResource') + .mockResolvedValue(requestedFileResource); + const getAndSearchItemFromResourceStub = vi + .spyOn(WebDavUtils, 'getAndSearchItemFromResource') + .mockRejectedValue(expectedError); + try { - await sut.handle(request, response); + await requestHandler.handle(request, response); fail('Expected function to throw an error, but it did not.'); } catch (error) { - expect(error).to.be.instanceOf(NotImplementedError); + expect(error).to.be.instanceOf(NotFoundError); } + expect(getRequestedResourceStub).toHaveBeenCalledOnce(); + expect(getAndSearchItemFromResourceStub).toHaveBeenCalledOnce(); }); - it('When the Drive file is not found, then it should throw a NotFoundError', async () => { + it('When file is requested, then it should write a response with the content', async () => { const driveDatabaseManager = getDriveDatabaseManager(); const downloadService = DownloadService.instance; const uploadService = UploadService.instance; const cryptoService = CryptoService.instance; + const authService = AuthService.instance; const networkFacade = new NetworkFacade(getNetworkMock(), uploadService, downloadService, cryptoService); const requestHandler = new GETRequestHandler({ driveFileService: DriveFileService.instance, - uploadService, downloadService, driveDatabaseManager, - authService: AuthService.instance, + authService, cryptoService, networkFacade, }); @@ -94,28 +105,41 @@ describe('GET request handler', () => { }); const response = createWebDavResponseFixture({ status: vi.fn().mockReturnValue({ send: vi.fn() }), + header: vi.fn(), }); - const expectedError = new NotFoundError(`Resource not found on Internxt Drive at ${requestedFileResource.url}`); + const mockFile = newFileItem(); + const mockAuthDetails: LoginCredentials = UserCredentialsFixture; const getRequestedResourceStub = vi .spyOn(WebDavUtils, 'getRequestedResource') .mockResolvedValue(requestedFileResource); const getAndSearchItemFromResourceStub = vi .spyOn(WebDavUtils, 'getAndSearchItemFromResource') - .mockRejectedValue(expectedError); + .mockResolvedValue(mockFile); + const authDetailsStub = vi.spyOn(authService, 'getAuthDetails').mockResolvedValue(mockAuthDetails); + const downloadStreamStub = vi + .spyOn(networkFacade, 'downloadToStream') + .mockResolvedValue([Promise.resolve(), new AbortController()]); - try { - await requestHandler.handle(request, response); - fail('Expected function to throw an error, but it did not.'); - } catch (error) { - expect(error).to.be.instanceOf(NotFoundError); - } + await requestHandler.handle(request, response); + + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Content-length', mockFile.size.toString()); + expect(response.header).toHaveBeenCalledWith('Content-Type', 'application/octet-stream'); expect(getRequestedResourceStub).toHaveBeenCalledOnce(); expect(getAndSearchItemFromResourceStub).toHaveBeenCalledOnce(); + expect(authDetailsStub).toHaveBeenCalledOnce(); + expect(downloadStreamStub).toHaveBeenCalledWith( + mockFile.bucket, + mockAuthDetails.user.mnemonic, + mockFile.fileId, + expect.any(Object), + undefined, + ); }); - it('When the Drive file is found, then it should write a response with the content', async () => { + it('When file is requested with Range, then it should write a response with the ranged content', async () => { const driveDatabaseManager = getDriveDatabaseManager(); const downloadService = DownloadService.instance; const uploadService = UploadService.instance; @@ -124,7 +148,6 @@ describe('GET request handler', () => { const networkFacade = new NetworkFacade(getNetworkMock(), uploadService, downloadService, cryptoService); const requestHandler = new GETRequestHandler({ driveFileService: DriveFileService.instance, - uploadService, downloadService, driveDatabaseManager, authService, @@ -134,16 +157,29 @@ describe('GET request handler', () => { const requestedFileResource: WebDavRequestedResource = getRequestedFileResource(); + const mockSize = randomInt(500, 10000); + const mockFile = newFileItem({ size: mockSize }); + const rangeStart = randomInt(0, 450); + + const range = `bytes=${rangeStart}-${mockSize}`; + + const expectedRangeOptions = NetworkUtils.parseRangeHeader({ + range, + totalFileSize: mockFile.size, + }); + const request = createWebDavRequestFixture({ method: 'GET', url: requestedFileResource.url, - headers: {}, + headers: { + range, + }, }); const response = createWebDavResponseFixture({ status: vi.fn().mockReturnValue({ send: vi.fn() }), + header: vi.fn(), }); - const mockFile = newFileItem(); const mockAuthDetails: LoginCredentials = UserCredentialsFixture; const getRequestedResourceStub = vi @@ -158,7 +194,10 @@ describe('GET request handler', () => { .mockResolvedValue([Promise.resolve(), new AbortController()]); await requestHandler.handle(request, response); + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Content-length', (mockSize - rangeStart).toString()); + expect(response.header).toHaveBeenCalledWith('Content-Type', 'application/octet-stream'); expect(getRequestedResourceStub).toHaveBeenCalledOnce(); expect(getAndSearchItemFromResourceStub).toHaveBeenCalledOnce(); expect(authDetailsStub).toHaveBeenCalledOnce(); @@ -167,7 +206,7 @@ describe('GET request handler', () => { mockAuthDetails.user.mnemonic, mockFile.fileId, expect.any(Object), - expect.any(Object), + expectedRangeOptions, ); }); }); diff --git a/test/webdav/handlers/HEAD.handler.test.ts b/test/webdav/handlers/HEAD.handler.test.ts index 34336c2f..b79725e7 100644 --- a/test/webdav/handlers/HEAD.handler.test.ts +++ b/test/webdav/handlers/HEAD.handler.test.ts @@ -1,23 +1,115 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { HEADRequestHandler } from '../../../src/webdav/handlers/HEAD.handler'; -import { createWebDavRequestFixture, createWebDavResponseFixture } from '../../fixtures/webdav.fixture'; +import { + createWebDavRequestFixture, + createWebDavResponseFixture, + getRequestedFileResource, + getRequestedFolderResource, +} from '../../fixtures/webdav.fixture'; +import { DriveFileService } from '../../../src/services/drive/drive-file.service'; +import { getDriveDatabaseManager } from '../../fixtures/drive-database.fixture'; +import { WebDavRequestedResource } from '../../../src/types/webdav.types'; +import { newFileItem } from '../../fixtures/drive.fixture'; +import { WebDavUtils } from '../../../src/utils/webdav.utils'; +import { randomInt } from 'crypto'; describe('HEAD request handler', () => { beforeEach(() => { vi.restoreAllMocks(); }); - it('When a WebDav client sends a HEAD request, it should reply with a 405', async () => { - const requestHandler = new HEADRequestHandler(); + it('When a folder is requested, it should reply with a 200', async () => { + const requestHandler = new HEADRequestHandler({ + driveDatabaseManager: getDriveDatabaseManager(), + driveFileService: DriveFileService.instance, + }); + + const requestedFolderResource: WebDavRequestedResource = getRequestedFolderResource(); + + const request = createWebDavRequestFixture({ + method: 'HEAD', + url: requestedFolderResource.url, + headers: {}, + }); + const response = createWebDavResponseFixture({ + status: vi.fn().mockReturnValue({ send: vi.fn() }), + }); + + await requestHandler.handle(request, response); + expect(response.status).toHaveBeenCalledWith(200); + }); + + it('When a file is requested, it should reply with a 200 with the correct headers', async () => { + const requestHandler = new HEADRequestHandler({ + driveDatabaseManager: getDriveDatabaseManager(), + driveFileService: DriveFileService.instance, + }); + + const requestedFileResource: WebDavRequestedResource = getRequestedFileResource(); + + const request = createWebDavRequestFixture({ + method: 'HEAD', + url: requestedFileResource.url, + headers: {}, + }); + const response = createWebDavResponseFixture({ + status: vi.fn().mockReturnValue({ send: vi.fn() }), + header: vi.fn(), + }); + + const mockFile = newFileItem(); + + const getRequestedResourceStub = vi + .spyOn(WebDavUtils, 'getRequestedResource') + .mockResolvedValue(requestedFileResource); + const getAndSearchItemFromResourceStub = vi + .spyOn(WebDavUtils, 'getAndSearchItemFromResource') + .mockResolvedValue(mockFile); + + await requestHandler.handle(request, response); + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Content-Type', 'application/octet-stream'); + expect(response.header).toHaveBeenCalledWith('Content-length', mockFile.size.toString()); + expect(getRequestedResourceStub).toHaveBeenCalledOnce(); + expect(getAndSearchItemFromResourceStub).toHaveBeenCalledOnce(); + }); + + it('When a file is requested with range-request, it should reply with a 200 with the correct headers', async () => { + const requestHandler = new HEADRequestHandler({ + driveDatabaseManager: getDriveDatabaseManager(), + driveFileService: DriveFileService.instance, + }); + + const requestedFileResource: WebDavRequestedResource = getRequestedFileResource(); + + const mockSize = randomInt(500, 10000); + const mockFile = newFileItem({ size: mockSize }); + const rangeStart = randomInt(0, 450); const request = createWebDavRequestFixture({ method: 'HEAD', + url: requestedFileResource.url, + headers: { + range: `bytes=${rangeStart}-${mockSize}`, + }, }); const response = createWebDavResponseFixture({ status: vi.fn().mockReturnValue({ send: vi.fn() }), + header: vi.fn(), }); + const getRequestedResourceStub = vi + .spyOn(WebDavUtils, 'getRequestedResource') + .mockResolvedValue(requestedFileResource); + const getAndSearchItemFromResourceStub = vi + .spyOn(WebDavUtils, 'getAndSearchItemFromResource') + .mockResolvedValue(mockFile); + await requestHandler.handle(request, response); - expect(response.status).toHaveBeenCalledWith(405); + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Content-length', (mockSize - rangeStart).toString()); + expect(response.header).toHaveBeenCalledWith('Content-Type', 'application/octet-stream'); + expect(getRequestedResourceStub).toHaveBeenCalledOnce(); + expect(getAndSearchItemFromResourceStub).toHaveBeenCalledOnce(); }); }); diff --git a/test/webdav/handlers/OPTIONS.handler.test.ts b/test/webdav/handlers/OPTIONS.handler.test.ts index f1f49b8a..17e51d4c 100644 --- a/test/webdav/handlers/OPTIONS.handler.test.ts +++ b/test/webdav/handlers/OPTIONS.handler.test.ts @@ -8,12 +8,13 @@ describe('OPTIONS request handler', () => { vi.restoreAllMocks(); }); - it('When a WebDav client sends an OPTIONS request, it should return the allowed methods', async () => { + it('When the root folder is requested, it should return all of the server allowed methods', async () => { const requestHandler = new OPTIONSRequestHandler(); const request = createWebDavRequestFixture({ method: 'OPTIONS', user: UserSettingsFixture, + url: '/', }); const response = createWebDavResponseFixture({ header: vi.fn(), @@ -23,10 +24,47 @@ describe('OPTIONS request handler', () => { await requestHandler.handle(request, response); expect(response.status).toHaveBeenCalledWith(200); - expect(response.header).toHaveBeenCalledWith( - 'Allow', - 'OPTIONS, GET, HEAD, POST, PUT, DELETE, PROPFIND, PROPPATCH, MKCOL, COPY, MOVE, LOCK, UNLOCK', - ); + expect(response.header).toHaveBeenCalledWith('Allow', 'DELETE, GET, HEAD, MKCOL, MOVE, OPTIONS, PROPFIND, PUT'); + expect(response.header).toHaveBeenCalledWith('DAV', '1, 2, ordered-collections'); + }); + + it('When a folder is requested, it should return all of the folder allowed methods', async () => { + const requestHandler = new OPTIONSRequestHandler(); + + const request = createWebDavRequestFixture({ + method: 'OPTIONS', + user: UserSettingsFixture, + url: '/folder/', + }); + const response = createWebDavResponseFixture({ + header: vi.fn(), + status: vi.fn().mockReturnValue({ send: vi.fn() }), + }); + + await requestHandler.handle(request, response); + + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Allow', 'DELETE, HEAD, MKCOL, MOVE, OPTIONS, PROPFIND'); + expect(response.header).toHaveBeenCalledWith('DAV', '1, 2, ordered-collections'); + }); + + it('When a file is requested, it should return all of the file allowed methods', async () => { + const requestHandler = new OPTIONSRequestHandler(); + + const request = createWebDavRequestFixture({ + method: 'OPTIONS', + user: UserSettingsFixture, + url: '/file', + }); + const response = createWebDavResponseFixture({ + header: vi.fn(), + status: vi.fn().mockReturnValue({ send: vi.fn() }), + }); + + await requestHandler.handle(request, response); + + expect(response.status).toHaveBeenCalledWith(200); + expect(response.header).toHaveBeenCalledWith('Allow', 'DELETE, GET, HEAD, MOVE, OPTIONS, PROPFIND, PUT'); expect(response.header).toHaveBeenCalledWith('DAV', '1, 2, ordered-collections'); }); }); diff --git a/yarn.lock b/yarn.lock index 1c31aa7d..7861a29a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2990,7 +2990,7 @@ resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.17.tgz#fc560f60946d0aeff2f914eb41679659d3310e1a" integrity sha512-rX4/bPcfmvxHDv0XjfJELTTr+iB+tn032nPILqHm5wbthUUUuVtNGGqzhya9XUxjTP8Fpr0qYgSZZKxGY++svQ== -"@types/range-parser@*": +"@types/range-parser@*", "@types/range-parser@^1.2.7": version "1.2.7" resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.7.tgz#50ae4353eaaddc04044279812f52c8c65857dbcb" integrity sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ== @@ -7230,7 +7230,7 @@ quick-lru@^5.1.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== -range-parser@~1.2.1: +range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==