diff --git a/package.json b/package.json index 67bdd9e..ee83b4f 100644 --- a/package.json +++ b/package.json @@ -44,6 +44,7 @@ "express-basic-auth": "^1.2.1", "fast-xml-parser": "^4.3.5", "mime-types": "^2.1.35", + "node-fetch": "2", "openpgp": "^5.11.1", "pm2": "^5.3.1", "realm": "^12.6.2", @@ -60,6 +61,7 @@ "@types/mime-types": "^2.1.4", "@types/mocha": "^10", "@types/node": "^18", + "@types/node-fetch": "^2.6.11", "@types/sinon-chai": "^3.2.12", "@types/sinon-express-mock": "^1.3.12", "@types/superagent": "^8.1.3", diff --git a/src/commands/upload.ts b/src/commands/upload.ts index 5ea51e6..7842994 100644 --- a/src/commands/upload.ts +++ b/src/commands/upload.ts @@ -11,7 +11,6 @@ import { DriveFileService } from '../services/drive/drive-file.service'; import { UploadService } from '../services/network/upload.service'; import { CryptoService } from '../services/crypto.service'; import { DownloadService } from '../services/network/download.service'; -import { StreamUtils } from '../utils/stream.utils'; import { ErrorUtils } from '../utils/errors.utils'; import { DriveFolderService } from '../services/drive/drive-folder.service'; @@ -76,7 +75,7 @@ export default class Upload extends Command { user.bucket, mnemonic, stat.size, - StreamUtils.readStreamToReadableStream(fileStream), + fileStream, { progressCallback: (progress) => { progressBar.update(progress); diff --git a/src/services/auth.service.ts b/src/services/auth.service.ts index 0d4b095..aa3beef 100644 --- a/src/services/auth.service.ts +++ b/src/services/auth.service.ts @@ -5,6 +5,7 @@ import { CryptoService } from './crypto.service'; import { ConfigService } from './config.service'; import { LoginCredentials } from '../types/command.types'; import { ValidationService } from './validation.service'; +import { UserSettings } from '@internxt/sdk/dist/shared/types/userSettings'; export class AuthService { public static readonly instance: AuthService = new AuthService(); @@ -88,7 +89,12 @@ export class AuthService { * * @returns The user plain mnemonic and the auth tokens */ - public getAuthDetails = async (): Promise<{ token: string; newToken: string; mnemonic: string }> => { + public getAuthDetails = async (): Promise<{ + token: string; + newToken: string; + mnemonic: string; + user: UserSettings; + }> => { const loginCredentials = await ConfigService.instance.readUser(); if (!loginCredentials) { throw new Error('Credentials not found, please login first'); @@ -116,6 +122,7 @@ export class AuthService { token, newToken, mnemonic, + user: loginCredentials.user, }; }; } diff --git a/src/services/crypto.service.ts b/src/services/crypto.service.ts index e5bb59e..2ac03d3 100644 --- a/src/services/crypto.service.ts +++ b/src/services/crypto.service.ts @@ -1,9 +1,10 @@ import { CryptoProvider } from '@internxt/sdk'; import { Keys, Password } from '@internxt/sdk/dist/auth'; -import crypto, { Cipher, createCipheriv, createDecipheriv, createHash } from 'crypto'; +import crypto, { createCipheriv, createDecipheriv } from 'node:crypto'; import { KeysService } from './keys.service'; import { ConfigService } from '../services/config.service'; import { StreamUtils } from '../utils/stream.utils'; +import { Transform } from 'stream'; export class CryptoService { public static readonly instance: CryptoService = new CryptoService(); @@ -109,29 +110,6 @@ export class CryptoService { return Buffer.concat([decipher.update(contentsToDecrypt), decipher.final()]).toString('utf8'); }; - private encryptReadable(readable: ReadableStream, cipher: Cipher): ReadableStream { - const reader = readable.getReader(); - - const encryptedFileReadable = new ReadableStream({ - async start(controller) { - let done = false; - - while (!done) { - const status = await reader.read(); - - if (!status.done) { - controller.enqueue(cipher.update(status.value)); - } - - done = status.done; - } - controller.close(); - }, - }); - - return encryptedFileReadable; - } - public async decryptStream(inputSlices: ReadableStream[], key: Buffer, iv: Buffer) { const decipher = createDecipheriv('aes-256-ctr', key, iv); const encryptedStream = StreamUtils.joinReadableBinaryStreams(inputSlices); @@ -161,36 +139,11 @@ export class CryptoService { return decryptedStream; } - public async encryptStream( - input: ReadableStream, - key: Buffer, - iv: Buffer, - ): Promise<{ blob: Blob; hash: Buffer }> { + public async getEncryptionTransform(key: Buffer, iv: Buffer): Promise { const cipher = createCipheriv('aes-256-ctr', key, iv); - const readable = this.encryptReadable(input, cipher).getReader(); - const hasher = createHash('sha256'); - const blobParts: ArrayBuffer[] = []; - - let done = false; - - while (!done) { - const status = await readable.read(); - - if (!status.done) { - hasher.update(status.value); - blobParts.push(status.value); - } - - done = status.done; - } - - return { - blob: new Blob(blobParts, { type: 'application/octet-stream' }), - hash: createHash('ripemd160').update(Buffer.from(hasher.digest())).digest(), - }; + return cipher; } - /** * Generates the key and the iv by transforming a secret and a salt. * It will generate the same key and iv if the same secret and salt is used. diff --git a/src/services/drive/drive-file.service.ts b/src/services/drive/drive-file.service.ts index 96452b1..ca1a701 100644 --- a/src/services/drive/drive-file.service.ts +++ b/src/services/drive/drive-file.service.ts @@ -36,7 +36,7 @@ export class DriveFileService { }); return { - size: driveFile.size, + size: Number(driveFile.size), uuid: driveFile.uuid, encryptedName, name: payload.name, diff --git a/src/services/network/network-facade.service.ts b/src/services/network/network-facade.service.ts index b628a19..0b77ac7 100644 --- a/src/services/network/network-facade.service.ts +++ b/src/services/network/network-facade.service.ts @@ -15,6 +15,9 @@ import { CryptoService } from '../crypto.service'; import { UploadService } from './upload.service'; import { DownloadService } from './download.service'; import { ValidationService } from '../validation.service'; +import { Readable } from 'node:stream'; +import { Transform } from 'stream'; +import { HashStream } from '../../utils/hash.utils'; export class NetworkFacade { private readonly cryptoLib: Network.Crypto; @@ -122,12 +125,13 @@ export class NetworkFacade { bucketId: string, mnemonic: string, size: number, - from: ReadableStream, + from: Readable, options?: UploadOptions, ): Promise<[Promise<{ fileId: string; hash: Buffer }>, AbortController]> { + const hashStream = new HashStream(); const abortable = options?.abortController ?? new AbortController(); let fileHash: Buffer; - let encryptedBlob: Blob; + let encryptionTransform: Transform; const onProgress: UploadProgressCallback = (progress: number) => { if (!options?.progressCallback) return; @@ -139,23 +143,23 @@ export class NetworkFacade { }; const encryptFile: EncryptFileFunction = async (_, key, iv) => { - const { blob, hash } = await this.cryptoService.encryptStream( - from, - Buffer.from(key as ArrayBuffer), - Buffer.from(iv as ArrayBuffer), - ); - - fileHash = hash; - encryptedBlob = blob; + encryptionTransform = from + .pipe( + await this.cryptoService.getEncryptionTransform( + Buffer.from(key as ArrayBuffer), + Buffer.from(iv as ArrayBuffer), + ), + ) + .pipe(hashStream); }; const uploadFile: UploadFileFunction = async (url) => { - await this.uploadService.uploadFile(url, encryptedBlob, { + await this.uploadService.uploadFile(url, encryptionTransform, { abortController: abortable, progressCallback: onUploadProgress, }); - return fileHash.toString('hex'); + return hashStream.getHash().toString('hex'); }; const uploadOperation = async () => { const uploadResult = await NetworkUpload.uploadFile( diff --git a/src/services/network/upload.service.ts b/src/services/network/upload.service.ts index a809738..199f353 100644 --- a/src/services/network/upload.service.ts +++ b/src/services/network/upload.service.ts @@ -1,29 +1,14 @@ +import { Readable } from 'node:stream'; import { UploadOptions } from '../../types/network.types'; -import superagent from 'superagent'; +import fetch from 'node-fetch'; export class UploadService { public static readonly instance: UploadService = new UploadService(); - async uploadFile(url: string, data: Blob, options: UploadOptions): Promise<{ etag: string }> { - const request = superagent - .put(url) - .set('Content-Length', data.size.toString()) - .set('Content-Type', data.type) - .send(Buffer.from(await data.arrayBuffer())) - .on('progress', (progressEvent) => { - if (options.progressCallback && progressEvent.total) { - const reportedProgress = progressEvent.loaded / parseInt(progressEvent.total); - options.progressCallback(reportedProgress); - } - }); + async uploadFile(url: string, from: Readable, options: UploadOptions): Promise<{ etag: string }> { + const response = await fetch(url, { method: 'PUT', body: from, signal: options.abortController?.signal }); - options.abortController?.signal.addEventListener('abort', () => { - request.abort(); - }); - - const response = await request; - - const etag = response.headers.etag; + const etag = response.headers.get('etag'); options.progressCallback(1); if (!etag) { throw new Error('Missing Etag in response when uploading file'); diff --git a/src/types/webdav.types.ts b/src/types/webdav.types.ts index 1407b8c..49f4706 100644 --- a/src/types/webdav.types.ts +++ b/src/types/webdav.types.ts @@ -10,7 +10,7 @@ export type WebDavMethodHandlerOptions = { }; export type WebDavRequestedResource = { - type: 'file' | 'folder' | 'root'; + type: 'file' | 'folder'; url: string; name: string; path: ParsedPath; diff --git a/src/utils/errors.utils.ts b/src/utils/errors.utils.ts index e0fc8a9..3e31567 100644 --- a/src/utils/errors.utils.ts +++ b/src/utils/errors.utils.ts @@ -30,6 +30,16 @@ export class BadRequestError extends Error { } } +export class UnsupportedMediaTypeError extends Error { + public statusCode = 415; + + constructor(message: string) { + super(message); + this.name = 'UnsupportedMediaTypeError'; + Object.setPrototypeOf(this, UnsupportedMediaTypeError.prototype); + } +} + export class NotImplementedError extends Error { public statusCode = 501; diff --git a/src/utils/hash.utils.ts b/src/utils/hash.utils.ts new file mode 100644 index 0000000..716f95d --- /dev/null +++ b/src/utils/hash.utils.ts @@ -0,0 +1,42 @@ +import { createHash, Hash } from 'crypto'; +import { Transform, TransformCallback, TransformOptions } from 'stream'; + +export class HashStream extends Transform { + hasher: Hash; + finalHash: Buffer; + + constructor(opts?: TransformOptions) { + super(opts); + this.hasher = createHash('sha256'); + this.finalHash = Buffer.alloc(0); + } + + _transform(chunk: Buffer, enc: BufferEncoding, cb: TransformCallback) { + this.hasher.update(chunk); + cb(null, chunk); + } + + _flush(cb: (err: Error | null) => void) { + return this.hasher.end(cb); + } + + reset() { + this.hasher = createHash('sha256'); + } + + readHash() { + if (!this.finalHash.length) { + this.finalHash = this.hasher.read(); + } + + return this.finalHash; + } + + getHash() { + if (!this.finalHash.length) { + this.readHash(); + } + + return createHash('ripemd160').update(this.finalHash).digest(); + } +} diff --git a/src/webdav/handlers/PROPFIND.handler.ts b/src/webdav/handlers/PROPFIND.handler.ts index 5ed3362..5953c60 100644 --- a/src/webdav/handlers/PROPFIND.handler.ts +++ b/src/webdav/handlers/PROPFIND.handler.ts @@ -22,7 +22,7 @@ export class PROPFINDRequestHandler implements WebDavMethodHandler { switch (resource.type) { case 'file': { - res.status(200).send(await this.getFileMetaXML(resource)); + res.status(207).send(await this.getFileMetaXML(resource)); break; } @@ -39,7 +39,7 @@ export class PROPFINDRequestHandler implements WebDavMethodHandler { createdAt: new Date(rootFolder.createdAt), updatedAt: new Date(rootFolder.updatedAt), }); - res.status(200).send(await this.getFolderContentXML('/', rootFolder.uuid, depth, true)); + res.status(207).send(await this.getFolderContentXML('/', rootFolder.uuid, depth, true)); break; } @@ -50,7 +50,7 @@ export class PROPFINDRequestHandler implements WebDavMethodHandler { return; } - res.status(200).send(await this.getFolderContentXML(resource.url, driveParentFolder.uuid, depth)); + res.status(207).send(await this.getFolderContentXML(resource.url, driveParentFolder.uuid, depth)); break; } } diff --git a/src/webdav/handlers/PUT.handler.ts b/src/webdav/handlers/PUT.handler.ts new file mode 100644 index 0000000..6882041 --- /dev/null +++ b/src/webdav/handlers/PUT.handler.ts @@ -0,0 +1,82 @@ +import { Request, Response } from 'express'; +import { DriveFileService } from '../../services/drive/drive-file.service'; +import { DriveRealmManager } from '../../services/realms/drive-realm-manager.service'; +import { NetworkFacade } from '../../services/network/network-facade.service'; +import { UploadService } from '../../services/network/upload.service'; +import { DownloadService } from '../../services/network/download.service'; +import { CryptoService } from '../../services/crypto.service'; +import { AuthService } from '../../services/auth.service'; +import { DriveFolderRealmSchema } from '../../services/realms/drive-folders.realm'; +import { WebDavMethodHandler, WebDavRequestedResource } from '../../types/webdav.types'; +import { NotFoundError, UnsupportedMediaTypeError } from '../../utils/errors.utils'; +import { WebDavUtils } from '../../utils/webdav.utils'; +import { webdavLogger } from '../../utils/logger.utils'; + +export class PUTRequestHandler implements WebDavMethodHandler { + constructor( + private dependencies: { + driveFileService: DriveFileService; + driveRealmManager: DriveRealmManager; + uploadService: UploadService; + downloadService: DownloadService; + cryptoService: CryptoService; + authService: AuthService; + networkFacade: NetworkFacade; + }, + ) {} + + handle = async (req: Request, res: Response) => { + const contentLength = Number(req.headers['content-length']); + if (!contentLength || isNaN(contentLength) || contentLength <= 0) { + throw new UnsupportedMediaTypeError('Empty files are not supported'); + } + + const resource = WebDavUtils.getRequestedResource(req, this.dependencies.driveRealmManager); + const driveFolder = await this.getDriveFolderRealmObject(resource); + + webdavLogger.info(`PUT request received for uploading file '${resource.name}' to '${resource.path.dir}'`); + if (!driveFolder) { + throw new NotFoundError('Drive destination folder not found'); + } + + const { user, mnemonic } = await this.dependencies.authService.getAuthDetails(); + + const [uploadPromise] = await this.dependencies.networkFacade.uploadFromStream( + user.bucket, + mnemonic, + contentLength, + req, + { + progressCallback: (progress) => { + webdavLogger.info(`Upload progress for file ${resource.name}: ${progress}%`); + }, + }, + ); + + const uploadResult = await uploadPromise; + + webdavLogger.info('✅ File uploaded to network'); + + const file = await DriveFileService.instance.createFile({ + name: resource.path.name, + type: resource.path.ext.replaceAll('.', ''), + size: contentLength, + folderId: driveFolder.id, + fileId: uploadResult.fileId, + bucket: user.bucket, + }); + + webdavLogger.info('✅ File uploaded to internxt drive'); + + this.dependencies.driveRealmManager.createFile(file); + + res.status(200); + res.send(); + }; + + private async getDriveFolderRealmObject(resource: WebDavRequestedResource) { + const { driveRealmManager } = this.dependencies; + const result = driveRealmManager.findByRelativePath(resource.path.dir); + return result as DriveFolderRealmSchema | null; + } +} diff --git a/src/webdav/webdav-server.ts b/src/webdav/webdav-server.ts index b2910e5..1a9c35c 100644 --- a/src/webdav/webdav-server.ts +++ b/src/webdav/webdav-server.ts @@ -21,6 +21,7 @@ import asyncHandler from 'express-async-handler'; import { SdkManager } from '../services/sdk-manager.service'; import { NetworkFacade } from '../services/network/network-facade.service'; import { NetworkUtils } from '../utils/network.utils'; +import { PUTRequestHandler } from './handlers/PUT.handler'; export class WebDavServer { constructor( @@ -85,6 +86,20 @@ export class WebDavServer { ).handle, ), ); + this.app.put( + '*', + asyncHandler( + new PUTRequestHandler({ + driveFileService: this.driveFileService, + driveRealmManager: this.driveRealmManager, + uploadService: this.uploadService, + downloadService: this.downloadService, + cryptoService: this.cryptoService, + authService: this.authService, + networkFacade: await this.getNetwork(), + }).handle, + ), + ); }; async start() { diff --git a/test/services/auth.service.test.ts b/test/services/auth.service.test.ts index fecc769..9bbc2fd 100644 --- a/test/services/auth.service.test.ts +++ b/test/services/auth.service.test.ts @@ -119,6 +119,7 @@ describe('Auth service', () => { token: 'test_auth_token', newToken: 'test_new_auth_token', mnemonic: 'test_mnemonic', + user: UserFixture, }); }); diff --git a/test/services/crypto.service.test.ts b/test/services/crypto.service.test.ts index 1a58e79..a7494fe 100644 --- a/test/services/crypto.service.test.ts +++ b/test/services/crypto.service.test.ts @@ -7,9 +7,6 @@ import { CryptoService } from '../../src/services/crypto.service'; import { ConfigKeys } from '../../src/types/config.types'; import { Keys } from '@internxt/sdk'; import { KeysService } from '../../src/services/keys.service'; -import { createReadStream } from 'fs'; -import { StreamUtils } from '../../src/utils/stream.utils'; -import path from 'path'; chai.use(sinonChai); @@ -121,22 +118,6 @@ describe('Crypto service', () => { expect(keysServiceStub).to.be.calledWith(password); }); - it('When encrypting an stream, the output hash should be correct', async () => { - const key = Buffer.from('4ba9058b2efc8c7c9c869b6573b725aa8bf67aecb26d3ebd678e624565570e9c', 'hex'); - const iv = Buffer.from('4ae6fcc4dd6ebcdb9076f2396d64da48', 'hex'); - const file = path.join(process.cwd(), 'test/fixtures/test-content.fixture.txt'); - const readStream = createReadStream(file); - - const result = await CryptoService.instance.encryptStream( - StreamUtils.readStreamToReadableStream(readStream), - key, - iv, - ); - - const buffer = Buffer.from(await result.blob.arrayBuffer()); - expect(buffer.toString('hex')).to.be.equal('5d14c5cf376caeb54154'); - }); - /** * This test is commented since the CryptoJS library is not available in the project. * diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index d09e76d..fcb615a 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -9,7 +9,6 @@ import { expect } from 'chai'; import { UploadService } from '../../../src/services/network/upload.service'; import { CryptoService } from '../../../src/services/crypto.service'; import { DownloadService } from '../../../src/services/network/download.service'; -import { StreamUtils } from '../../../src/utils/stream.utils'; import { Readable } from 'stream'; import axios from 'axios'; @@ -47,7 +46,7 @@ describe('Network Facade Service', () => { 'f1858bc9675f9e4f7ab29429', 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', 100, - StreamUtils.readStreamToReadableStream(readStream), + readStream, options, ); @@ -74,7 +73,7 @@ describe('Network Facade Service', () => { 'f1858bc9675f9e4f7ab29429', 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', 100, - StreamUtils.readStreamToReadableStream(readStream), + readStream, options, ); diff --git a/test/services/network/upload.service.test.ts b/test/services/network/upload.service.test.ts index c4902e8..4930e3b 100644 --- a/test/services/network/upload.service.test.ts +++ b/test/services/network/upload.service.test.ts @@ -1,8 +1,8 @@ import sinon from 'sinon'; -import superagent from 'superagent'; import { expect } from 'chai'; import { UploadService } from '../../../src/services/network/upload.service'; import nock from 'nock'; +import { Readable } from 'stream'; describe('Upload Service', () => { let sut: UploadService; @@ -16,7 +16,12 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is missing, should throw an error', async () => { const url = 'https://example.com/upload'; - const data = new Blob(['test content'], { type: 'text/plain' }); + const data = new Readable({ + read() { + this.push('test content'); + this.push(null); + }, + }); const options = { progressCallback: sinon.stub(), abortController: new AbortController(), @@ -33,7 +38,12 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is returned, the etag should be returned', async () => { const url = 'https://example.com/upload'; - const data = new Blob(['test content'], { type: 'text/plain' }); + const data = new Readable({ + read() { + this.push('test content'); + this.push(null); + }, + }); const options = { progressCallback: sinon.stub(), abortController: new AbortController(), @@ -49,7 +59,12 @@ describe('Upload Service', () => { it('When a file is uploaded, should update the progress', async () => { const url = 'https://example.com/upload'; - const data = new Blob(['test content'], { type: 'text/plain' }); + const data = new Readable({ + read() { + this.push('test content'); + this.push(null); + }, + }); const options = { progressCallback: sinon.stub(), abortController: new AbortController(), @@ -59,37 +74,18 @@ describe('Upload Service', () => { etag: 'test-etag', }); - sinon.stub(superagent, 'put').returns({ - // @ts-expect-error - Partiak Superagent request mock - set: () => { - return { - set: () => { - return { - send: () => { - return { - on: sinon - .stub() - .callsFake((event, callback) => { - if (event === 'progress') { - callback({ total: 100, loaded: 50 }); - } - }) - .resolves({ headers: { etag: 'test-etag' } }), - }; - }, - }; - }, - }; - }, - }); - await sut.uploadFile(url, data, options); sinon.assert.calledWithExactly(options.progressCallback, 1); }); it('When a file is uploaded and the upload is aborted, should cancel the request', async () => { const url = 'https://example.com/upload'; - const data = new Blob(['test content'], { type: 'text/plain' }); + const data = new Readable({ + read() { + this.push('test content'); + this.push(null); + }, + }); const options = { progressCallback: sinon.stub(), abortController: new AbortController(), @@ -99,20 +95,8 @@ describe('Upload Service', () => { etag: 'test-etag', }); - // @ts-expect-error - Partial mock response - const requestStub = sinon.stub(superagent, 'put').resolves({ - on: sinon.stub().callsFake((event, callback) => { - if (event === 'progress') { - callback({ total: 100, loaded: 50 }); - } - }), - }); - sut.uploadFile(url, data, options); options.abortController.abort(); - - expect(requestStub.called).to.be.true; - expect(requestStub.args[0][0]).to.equal(url); }); }); diff --git a/test/utils/hash.utils.test.ts b/test/utils/hash.utils.test.ts new file mode 100644 index 0000000..e961edb --- /dev/null +++ b/test/utils/hash.utils.test.ts @@ -0,0 +1,38 @@ +import { expect } from 'chai'; +import { HashStream } from '../../src/utils/hash.utils'; +import sinon from 'sinon'; + +describe('Hash Utils', () => { + let hashStream: HashStream; + const sandbox = sinon.createSandbox(); + beforeEach(() => { + hashStream = new HashStream(); + }); + + afterEach(() => { + sandbox.restore(); + }); + it('should update the hasher with data chunk on _transform call', (done) => { + const spy = sandbox.spy(hashStream.hasher, 'update'); + const chunk = Buffer.from('Test data'); + + hashStream._transform(chunk, 'utf8', () => { + expect(spy.calledOnce).to.be.true; + done(); + }); + }); + + it('should correctly calculate hash on readHash call', (done) => { + const testData = 'Some test data'; + hashStream.on('data', () => {}); + hashStream.on('end', () => { + const readHash = hashStream.readHash(); + expect(readHash).to.be.instanceof(Buffer); + expect(readHash.length).to.be.greaterThan(0); + done(); + }); + + hashStream.write(testData); + hashStream.end(); + }); +}); diff --git a/test/webdav/handlers/GET.handler.test.ts b/test/webdav/handlers/GET.handler.test.ts index 6c33819..8f145c4 100644 --- a/test/webdav/handlers/GET.handler.test.ts +++ b/test/webdav/handlers/GET.handler.test.ts @@ -11,6 +11,7 @@ import { expect } from 'chai'; import { NotFoundError, NotImplementedError } from '../../../src/utils/errors.utils'; import { SdkManager } from '../../../src/services/sdk-manager.service'; import { NetworkFacade } from '../../../src/services/network/network-facade.service'; +import { UserFixture } from '../../fixtures/auth.fixture'; describe('GET request handler', () => { const sandbox = sinon.createSandbox(); @@ -125,7 +126,7 @@ describe('GET request handler', () => { sandbox.stub(driveRealmManager, 'findByRelativePath').returns(driveFileRealmObject); sandbox .stub(authService, 'getAuthDetails') - .resolves({ mnemonic: 'MNEMONIC', token: 'TOKEN', newToken: 'NEW_TOKEN' }); + .resolves({ mnemonic: 'MNEMONIC', token: 'TOKEN', newToken: 'NEW_TOKEN', user: UserFixture }); sandbox.stub(networkFacade, 'downloadToStream').resolves([Promise.resolve(), new AbortController()]); const response = createWebDavResponseFixture({ diff --git a/test/webdav/handlers/PROPFIND.handler.test.ts b/test/webdav/handlers/PROPFIND.handler.test.ts index 06b3745..badf32e 100644 --- a/test/webdav/handlers/PROPFIND.handler.test.ts +++ b/test/webdav/handlers/PROPFIND.handler.test.ts @@ -58,7 +58,7 @@ describe('PROPFIND request handler', () => { }); await requestHandler.handle(request, response); - sinon.assert.calledWith(response.status, 200); + sinon.assert.calledWith(response.status, 207); sinon.assert.calledWith( sendStub, `/HTTP/1.1 200 OKapplication/octet-stream${FormatUtils.formatDateForWebDav(folderFixture.updatedAt)}F00000030`, @@ -111,7 +111,7 @@ describe('PROPFIND request handler', () => { }); await requestHandler.handle(request, response); - sinon.assert.calledWith(response.status, 200); + sinon.assert.calledWith(response.status, 207); sinon.assert.calledWith( sendStub, `/HTTP/1.1 200 OKapplication/octet-stream${FormatUtils.formatDateForWebDav(folderFixture.updatedAt)}F00000030/${paginatedFolder1.plainName}/HTTP/1.1 200 OK${paginatedFolder1.plainName}${FormatUtils.formatDateForWebDav(paginatedFolder1.updatedAt)}0`, @@ -152,7 +152,7 @@ describe('PROPFIND request handler', () => { }); await requestHandler.handle(request, response); - sinon.assert.calledWith(response.status, 200); + sinon.assert.calledWith(response.status, 207); // TODO: Test the XML response }); @@ -196,7 +196,7 @@ describe('PROPFIND request handler', () => { }); await requestHandler.handle(request, response); - sinon.assert.calledWith(response.status, 200); + sinon.assert.calledWith(response.status, 207); // TODO: Test the XML response }); diff --git a/test/webdav/handlers/PUT.handler.test.ts b/test/webdav/handlers/PUT.handler.test.ts new file mode 100644 index 0000000..45d238e --- /dev/null +++ b/test/webdav/handlers/PUT.handler.test.ts @@ -0,0 +1,158 @@ +import sinon from 'sinon'; +import { createWebDavRequestFixture, createWebDavResponseFixture } from '../../fixtures/webdav.fixture'; +import { DriveFileService } from '../../../src/services/drive/drive-file.service'; +import { + getDriveFileRealmSchemaFixture, + getDriveFolderRealmSchemaFixture, + getDriveRealmManager, +} from '../../fixtures/drive-realm.fixture'; +import { CryptoService } from '../../../src/services/crypto.service'; +import { DownloadService } from '../../../src/services/network/download.service'; +import { UploadService } from '../../../src/services/network/upload.service'; +import { AuthService } from '../../../src/services/auth.service'; +import { expect } from 'chai'; +import { NotFoundError, UnsupportedMediaTypeError } from '../../../src/utils/errors.utils'; +import { SdkManager } from '../../../src/services/sdk-manager.service'; +import { NetworkFacade } from '../../../src/services/network/network-facade.service'; +import { UserFixture } from '../../fixtures/auth.fixture'; +import { PUTRequestHandler } from '../../../src/webdav/handlers/PUT.handler'; + +describe('PUT request handler', () => { + const sandbox = sinon.createSandbox(); + const getNetworkMock = () => { + return SdkManager.instance.getNetwork({ + user: 'user', + pass: 'pass', + }); + }; + + afterEach(() => { + sandbox.restore(); + }); + + it('When a WebDav client sends a PUT request and it contains a content-length of 0, then it should throw an UnsupportedMediaTypeError', async () => { + const networkFacade = new NetworkFacade( + getNetworkMock(), + UploadService.instance, + DownloadService.instance, + CryptoService.instance, + ); + const sut = new PUTRequestHandler({ + driveFileService: DriveFileService.instance, + uploadService: UploadService.instance, + downloadService: DownloadService.instance, + driveRealmManager: getDriveRealmManager(), + authService: AuthService.instance, + cryptoService: CryptoService.instance, + networkFacade, + }); + + const request = createWebDavRequestFixture({ + method: 'PUT', + url: '/file.txt', + headers: { + 'content-length': '0', + }, + }); + + const response = createWebDavResponseFixture({ + status: sandbox.stub().returns({ send: sandbox.stub() }), + }); + + try { + await sut.handle(request, response); + expect(true).to.be.false; + } catch (error) { + expect(error).to.be.instanceOf(UnsupportedMediaTypeError); + } + }); + + it('When a WebDav client sends a PUT request, and the Drive destination folder is not found, then it should throw a NotFoundError', async () => { + const driveRealmManager = getDriveRealmManager(); + const downloadService = DownloadService.instance; + const uploadService = UploadService.instance; + const cryptoService = CryptoService.instance; + const networkFacade = new NetworkFacade(getNetworkMock(), uploadService, downloadService, cryptoService); + const sut = new PUTRequestHandler({ + driveFileService: DriveFileService.instance, + uploadService, + downloadService, + driveRealmManager, + authService: AuthService.instance, + cryptoService, + networkFacade, + }); + + const request = createWebDavRequestFixture({ + method: 'PUT', + url: '/file.txt', + headers: { + 'content-length': '100', + }, + }); + + sandbox.stub(driveRealmManager, 'findByRelativePath').returns(null); + const response = createWebDavResponseFixture({ + status: sandbox.stub().returns({ send: sandbox.stub() }), + }); + + try { + await sut.handle(request, response); + expect(true).to.be.false; + } catch (error) { + expect(error).to.be.instanceOf(NotFoundError); + } + }); + + it('When a WebDav client sends a PUT request, and the Drive destination folder is found, then it should upload the file to the folder', async () => { + const driveRealmManager = getDriveRealmManager(); + const downloadService = DownloadService.instance; + const uploadService = UploadService.instance; + const cryptoService = CryptoService.instance; + const authService = AuthService.instance; + const networkFacade = new NetworkFacade(getNetworkMock(), uploadService, downloadService, cryptoService); + const sut = new PUTRequestHandler({ + driveFileService: DriveFileService.instance, + uploadService, + downloadService, + driveRealmManager, + authService, + cryptoService, + networkFacade, + }); + + const request = createWebDavRequestFixture({ + method: 'PUT', + url: '/file.txt', + headers: { + 'content-length': '150', + }, + }); + + const driveFileRealmObject = getDriveFileRealmSchemaFixture({ name: 'file' }); + const driveFolderRealmObject = getDriveFolderRealmSchemaFixture({}); + + sandbox + .stub(driveRealmManager, 'findByRelativePath') + .withArgs('/file.txt') + .returns(driveFileRealmObject) + .withArgs('/') + .returns(driveFolderRealmObject); + sandbox + .stub(authService, 'getAuthDetails') + .resolves({ mnemonic: 'MNEMONIC', token: 'TOKEN', newToken: 'NEW_TOKEN', user: UserFixture }); + + sandbox + .stub(networkFacade, 'uploadFromStream') + .resolves([Promise.resolve({ fileId: '09218313209', hash: Buffer.from('test') }), new AbortController()]); + sandbox.stub(DriveFileService.instance, 'createFile').resolves(); + sandbox.stub(driveRealmManager, 'createFile').resolves(); + + const response = createWebDavResponseFixture({ + status: sandbox.stub().returns({ send: sandbox.stub() }), + }); + + await sut.handle(request, response); + expect(response.status.calledWith(200)).to.be.true; + }); +}); diff --git a/yarn.lock b/yarn.lock index bdacdcb..4bc3760 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2925,6 +2925,14 @@ resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-10.0.6.tgz#818551d39113081048bdddbef96701b4e8bb9d1b" integrity sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg== +"@types/node-fetch@^2.6.11": + version "2.6.11" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" + integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + "@types/node-forge@^1.3.0": version "1.3.11" resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" @@ -7510,6 +7518,13 @@ node-abi@^3.3.0: dependencies: semver "^7.3.5" +node-fetch@2: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"