From 734d85eed11fee6475730c2629bb3689af9fac3a Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 15:36:22 -0400 Subject: [PATCH 01/19] feat: adjust the router for large buffers --- src/Routers/FilesRouter.js | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/src/Routers/FilesRouter.js b/src/Routers/FilesRouter.js index 332cd75748..9147e0145f 100644 --- a/src/Routers/FilesRouter.js +++ b/src/Routers/FilesRouter.js @@ -172,8 +172,22 @@ export class FilesRouter { } } - const base64 = req.body.toString('base64'); - const file = new Parse.File(filename, { base64 }, contentType); + // If the request body is a buffer and it's size is greater than the V8 string size limit + // we need to use a Blob to avoid the V8 string size limit + const MAX_V8_STRING_SIZE_BYTES = 536_870_912; + + let file; + + if ( + typeof Blob !== 'undefined' && + Buffer.isBuffer(req.body) && + req.body?.length >= MAX_V8_STRING_SIZE_BYTES + ) { + file = new Parse.File(filename, new Blob([req.body]), contentType); + } else { + file = new Parse.File(filename, { base64: req.body.toString('base64') }, contentType); + } + const { metadata = {}, tags = {} } = req.fileData || {}; try { // Scan request data for denied keywords @@ -213,8 +227,17 @@ export class FilesRouter { // if the ParseFile returned is type uri, download the file before saving it await addFileDataIfNeeded(fileObject.file); // update fileSize - const bufferData = Buffer.from(fileObject.file._data, 'base64'); - fileObject.fileSize = Buffer.byteLength(bufferData); + let fileData = fileObject.file._data; + // if the file is a blob, get the size from the blob + if (typeof Blob !== 'undefined' && fileObject.file._source?.file instanceof Blob) { + // get the size of the blob + fileObject.fileSize = fileObject.file._source.file.size; + // set the file data + fileData = fileObject.file._source?.file; + } else { + const bufferData = Buffer.from(fileObject.file._data, 'base64'); + fileObject.fileSize = Buffer.byteLength(bufferData); + } // prepare file options const fileOptions = { metadata: fileObject.file._metadata, @@ -228,7 +251,7 @@ export class FilesRouter { const createFileResult = await filesController.createFile( config, fileObject.file._name, - bufferData, + fileData, fileObject.file._source.type, fileOptions ); From 088c669a832859c9cd1c6276a468d9b906c3055f Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 15:39:10 -0400 Subject: [PATCH 02/19] feat: adjust adapter for large blobs --- src/Adapters/Files/GridFSBucketAdapter.js | 55 ++++++++++++++++++----- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index 76a8f25d1b..3b75703bf3 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -68,24 +68,57 @@ export class GridFSBucketAdapter extends FilesAdapter { const stream = await bucket.openUploadStream(filename, { metadata: options.metadata, }); - if (this._encryptionKey !== null) { + + // when working with a Blob, it could be over the max size of a buffer, so we need to stream it + if (typeof Blob !== 'undefined' && data instanceof Blob) { + const reader = data.stream().getReader(); + const iv = crypto.randomBytes(16); + const cipher = this._encryptionKey !== null ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) : null; + + const processChunk = async ({ done, value }) => { + if (done) { + if (cipher) { + const finalChunk = Buffer.concat([cipher.final(), iv, cipher.getAuthTag()]); + await stream.write(finalChunk); + } + stream.end(); + return; + } + + if (cipher) { + value = cipher.update(value); + } + + await stream.write(value); + reader.read().then(processChunk); + }; try { - const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv); - const encryptedResult = Buffer.concat([ - cipher.update(data), - cipher.final(), - iv, - cipher.getAuthTag(), - ]); - await stream.write(encryptedResult); + reader.read().then(processChunk); } catch (err) { return new Promise((resolve, reject) => { return reject(err); }); } } else { - await stream.write(data); + if (this._encryptionKey !== null) { + try { + const iv = crypto.randomBytes(16); + const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv); + const encryptedResult = Buffer.concat([ + cipher.update(data), + cipher.final(), + iv, + cipher.getAuthTag(), + ]); + await stream.write(encryptedResult); + } catch (err) { + return new Promise((resolve, reject) => { + return reject(err); + }); + } + } else { + await stream.write(data); + } } stream.end(); return new Promise((resolve, reject) => { From 3385d2f7f00cc794dd3f92d02abc0da68258ab6b Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 15:40:00 -0400 Subject: [PATCH 03/19] test: test file uploads --- package-lock.json | 35 +++++++++++++ package.json | 1 + spec/FilesController.spec.js | 95 ++++++++++++++++++++++++++++++++++++ 3 files changed, 131 insertions(+) diff --git a/package-lock.json b/package-lock.json index b113386d8d..7dca77e68d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -76,6 +76,7 @@ "@semantic-release/release-notes-generator": "14.0.1", "all-node-versions": "12.1.0", "apollo-upload-client": "17.0.0", + "axios": "^1.7.4", "clean-jsdoc-theme": "4.3.0", "cross-env": "7.0.2", "deep-diff": "1.0.2", @@ -6514,6 +6515,17 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, + "node_modules/axios": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", + "dev": true, + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, "node_modules/babel-plugin-polyfill-corejs2": { "version": "0.4.11", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", @@ -17760,6 +17772,12 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, "node_modules/pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", @@ -25809,6 +25827,17 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, + "axios": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", + "dev": true, + "requires": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, "babel-plugin-polyfill-corejs2": { "version": "0.4.11", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", @@ -33929,6 +33958,12 @@ "ipaddr.js": "1.9.1" } }, + "proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, "pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", diff --git a/package.json b/package.json index 81a44ded8a..76cd60758d 100644 --- a/package.json +++ b/package.json @@ -82,6 +82,7 @@ "@semantic-release/release-notes-generator": "14.0.1", "all-node-versions": "12.1.0", "apollo-upload-client": "17.0.0", + "axios": "^1.7.4", "clean-jsdoc-theme": "4.3.0", "cross-env": "7.0.2", "deep-diff": "1.0.2", diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index a16451f3ef..bb83414b7e 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -162,4 +162,99 @@ describe('FilesController', () => { expect(gridFSAdapter.validateFilename(fileName)).not.toBe(null); done(); }); + + it('should allow Parse.File uploads over and under 512MB', async done => { + // add required modules + const fs = require('fs'); + const path = require('path'); + const axios = require('axios'); + + + const ONE_GB_BYTES = 1024 * 1024 * 1024; + const V8_STRING_LIMIT_BYTES = 536_870_912; + // Add 50 MB to test the limit + const LARGE_FILE_BTYES = V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024; + const SMALL_FILE_BTYES = 1024 * 1024; + + reconfigureServer({ + // Increase the max upload size to 1GB + maxUploadSize: ONE_GB_BYTES, + // Change to an available port to avoid + // "Uncaught exception: Error: listen EADDRINUSE: address already in use 0.0.0.0:8378" + port: 8384, + }); + + + /** + * Quick helper function to upload the file to the server via the REST API + * We do this becuase creating a Parse.File object with a file over 512MB + * will try to the the Web API FileReader API, which will fail the test + * + * @param {string} fileName the name of the file + * @param {string} filePath the path to the file locally + * @returns + */ + const postFile = async (fileName, filePath) => { + const url = `${Parse.serverURL}/files/${fileName}`; + const headers = { + 'X-Parse-Application-Id': Parse.applicationId, + 'X-Parse-Master-Key': Parse.masterKey, + 'Content-Type': 'multipart/form-data', + }; + + // Create a FormData object to send the file + const formData = new FormData(); + formData.append('file', fs.createReadStream(filePath)); + + // Use axios to send the file + return axios.post(url, formData, { headers }) + }; + + // Make a exact 512MB file + const exactFileRawData = Buffer.alloc(V8_STRING_LIMIT_BYTES); + const exactFileName = 'exactfile.txt'; + // Write the file to disk locally + await fs.promises.writeFile(exactFileName, exactFileRawData); + const exactFilePath = path.resolve(exactFileName); + + + // make a large file + const largeFileRawData = Buffer.alloc(LARGE_FILE_BTYES); + const largeFileName = 'bigfile.txt'; + // Write the file to disk locally + await fs.promises.writeFile(largeFileName, largeFileRawData); + const largeFilePath = path.resolve(largeFileName); + + // Make a 1MB file + const smallFileRawData = Buffer.alloc(SMALL_FILE_BTYES); + const smallFileName = 'smallfile.txt'; + // Write the file to disk locally + await fs.promises.writeFile(smallFileName, smallFileRawData); + const smallFilePath = path.resolve(smallFileName); + + try { + // Test a small file + const smallFileRes = await postFile(smallFileName, smallFilePath); + expect(smallFileRes.data.url).not.toBe(null); + + // Test a file that is exactly 512MB + const exactFileRes = await postFile(exactFileName, exactFilePath); + expect(exactFileRes.data.url).not.toBe(null); + + // Test a large file + const largeFileRes = await postFile(largeFileName, largeFilePath); + expect(largeFileRes.data.url).not.toBe(null); + + // Test a normal Parse.File object + const smallFile = new Parse.File(smallFileName, [...smallFileRawData]); + const normalSmallFile = await smallFile.save(); + expect(normalSmallFile.url()).not.toBe(null); + + } catch (error) { + fail(error); + } finally { + done(); + } + }); + }); From 0a2c90470a1e1ecb6ab22e4623573d837c1dae03 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 18:57:53 -0400 Subject: [PATCH 04/19] fix: don't depend on axios --- package-lock.json | 35 ----------------------------------- package.json | 1 - spec/FilesController.spec.js | 17 ++++++++++------- 3 files changed, 10 insertions(+), 43 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7dca77e68d..b113386d8d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -76,7 +76,6 @@ "@semantic-release/release-notes-generator": "14.0.1", "all-node-versions": "12.1.0", "apollo-upload-client": "17.0.0", - "axios": "^1.7.4", "clean-jsdoc-theme": "4.3.0", "cross-env": "7.0.2", "deep-diff": "1.0.2", @@ -6515,17 +6514,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, - "node_modules/axios": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", - "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", - "dev": true, - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" - } - }, "node_modules/babel-plugin-polyfill-corejs2": { "version": "0.4.11", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", @@ -17772,12 +17760,6 @@ "node": ">= 0.10" } }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true - }, "node_modules/pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", @@ -25827,17 +25809,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, - "axios": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", - "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", - "dev": true, - "requires": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" - } - }, "babel-plugin-polyfill-corejs2": { "version": "0.4.11", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", @@ -33958,12 +33929,6 @@ "ipaddr.js": "1.9.1" } }, - "proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true - }, "pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", diff --git a/package.json b/package.json index 76cd60758d..81a44ded8a 100644 --- a/package.json +++ b/package.json @@ -82,7 +82,6 @@ "@semantic-release/release-notes-generator": "14.0.1", "all-node-versions": "12.1.0", "apollo-upload-client": "17.0.0", - "axios": "^1.7.4", "clean-jsdoc-theme": "4.3.0", "cross-env": "7.0.2", "deep-diff": "1.0.2", diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index bb83414b7e..2e41c4c3af 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -167,8 +167,6 @@ describe('FilesController', () => { // add required modules const fs = require('fs'); const path = require('path'); - const axios = require('axios'); - const ONE_GB_BYTES = 1024 * 1024 * 1024; const V8_STRING_LIMIT_BYTES = 536_870_912; @@ -206,8 +204,13 @@ describe('FilesController', () => { const formData = new FormData(); formData.append('file', fs.createReadStream(filePath)); - // Use axios to send the file - return axios.post(url, formData, { headers }) + // Use fetch to send the file + return fetch(url, { + method: 'POST', + headers: headers, + body: formData + }) + .then(response => response.json()); }; // Make a exact 512MB file @@ -235,15 +238,15 @@ describe('FilesController', () => { try { // Test a small file const smallFileRes = await postFile(smallFileName, smallFilePath); - expect(smallFileRes.data.url).not.toBe(null); + expect(smallFileRes.url).not.toBe(null); // Test a file that is exactly 512MB const exactFileRes = await postFile(exactFileName, exactFilePath); - expect(exactFileRes.data.url).not.toBe(null); + expect(exactFileRes.url).not.toBe(null); // Test a large file const largeFileRes = await postFile(largeFileName, largeFilePath); - expect(largeFileRes.data.url).not.toBe(null); + expect(largeFileRes.url).not.toBe(null); // Test a normal Parse.File object const smallFile = new Parse.File(smallFileName, [...smallFileRawData]); From d2c89af9d341812165fde0e44dc84ea00a5dad22 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 18:58:56 -0400 Subject: [PATCH 05/19] feat: clean up files --- spec/FilesController.spec.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index 2e41c4c3af..25fe56460d 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -256,6 +256,10 @@ describe('FilesController', () => { } catch (error) { fail(error); } finally { + // Clean up the files + await fs.promises.unlink(exactFilePath); + await fs.promises.unlink(largeFilePath); + await fs.promises.unlink(smallFilePath); done(); } }); From 216c07864abb156e31992a9df72a0fed6143b5f7 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 19:27:17 -0400 Subject: [PATCH 06/19] fix: file default data and adapter comment --- src/Adapters/Files/FilesAdapter.js | 2 +- src/Routers/FilesRouter.js | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Adapters/Files/FilesAdapter.js b/src/Adapters/Files/FilesAdapter.js index afd06942e9..9d632c6c3c 100644 --- a/src/Adapters/Files/FilesAdapter.js +++ b/src/Adapters/Files/FilesAdapter.js @@ -26,7 +26,7 @@ export class FilesAdapter { /** Responsible for storing the file in order to be retrieved later by its filename * * @param {string} filename - the filename to save - * @param {*} data - the buffer of data from the file + * @param {*} data - the repersentation of data from the file as buffer or a blob * @param {string} contentType - the supposed contentType * @discussion the contentType can be undefined if the controller was not able to determine it * @param {object} options - (Optional) options to be passed to file adapter (S3 File Adapter Only) diff --git a/src/Routers/FilesRouter.js b/src/Routers/FilesRouter.js index 9147e0145f..baff9c48a8 100644 --- a/src/Routers/FilesRouter.js +++ b/src/Routers/FilesRouter.js @@ -227,7 +227,7 @@ export class FilesRouter { // if the ParseFile returned is type uri, download the file before saving it await addFileDataIfNeeded(fileObject.file); // update fileSize - let fileData = fileObject.file._data; + let fileData; // if the file is a blob, get the size from the blob if (typeof Blob !== 'undefined' && fileObject.file._source?.file instanceof Blob) { // get the size of the blob @@ -237,6 +237,7 @@ export class FilesRouter { } else { const bufferData = Buffer.from(fileObject.file._data, 'base64'); fileObject.fileSize = Buffer.byteLength(bufferData); + fileData = bufferData; } // prepare file options const fileOptions = { From a5594537d513b25b9b1cc3f46be7ffc8d8c249e1 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 19:52:45 -0400 Subject: [PATCH 07/19] test: split test and move to better file --- spec/FilesController.spec.js | 101 ----------------------------------- spec/FilesRouter.spec.js | 94 ++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 101 deletions(-) create mode 100644 spec/FilesRouter.spec.js diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index 25fe56460d..5115d5d548 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -163,105 +163,4 @@ describe('FilesController', () => { done(); }); - it('should allow Parse.File uploads over and under 512MB', async done => { - // add required modules - const fs = require('fs'); - const path = require('path'); - - const ONE_GB_BYTES = 1024 * 1024 * 1024; - const V8_STRING_LIMIT_BYTES = 536_870_912; - // Add 50 MB to test the limit - const LARGE_FILE_BTYES = V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024; - const SMALL_FILE_BTYES = 1024 * 1024; - - reconfigureServer({ - // Increase the max upload size to 1GB - maxUploadSize: ONE_GB_BYTES, - // Change to an available port to avoid - // "Uncaught exception: Error: listen EADDRINUSE: address already in use 0.0.0.0:8378" - port: 8384, - }); - - - /** - * Quick helper function to upload the file to the server via the REST API - * We do this becuase creating a Parse.File object with a file over 512MB - * will try to the the Web API FileReader API, which will fail the test - * - * @param {string} fileName the name of the file - * @param {string} filePath the path to the file locally - * @returns - */ - const postFile = async (fileName, filePath) => { - const url = `${Parse.serverURL}/files/${fileName}`; - const headers = { - 'X-Parse-Application-Id': Parse.applicationId, - 'X-Parse-Master-Key': Parse.masterKey, - 'Content-Type': 'multipart/form-data', - }; - - // Create a FormData object to send the file - const formData = new FormData(); - formData.append('file', fs.createReadStream(filePath)); - - // Use fetch to send the file - return fetch(url, { - method: 'POST', - headers: headers, - body: formData - }) - .then(response => response.json()); - }; - - // Make a exact 512MB file - const exactFileRawData = Buffer.alloc(V8_STRING_LIMIT_BYTES); - const exactFileName = 'exactfile.txt'; - // Write the file to disk locally - await fs.promises.writeFile(exactFileName, exactFileRawData); - const exactFilePath = path.resolve(exactFileName); - - - // make a large file - const largeFileRawData = Buffer.alloc(LARGE_FILE_BTYES); - const largeFileName = 'bigfile.txt'; - // Write the file to disk locally - await fs.promises.writeFile(largeFileName, largeFileRawData); - const largeFilePath = path.resolve(largeFileName); - - // Make a 1MB file - const smallFileRawData = Buffer.alloc(SMALL_FILE_BTYES); - const smallFileName = 'smallfile.txt'; - // Write the file to disk locally - await fs.promises.writeFile(smallFileName, smallFileRawData); - const smallFilePath = path.resolve(smallFileName); - - try { - // Test a small file - const smallFileRes = await postFile(smallFileName, smallFilePath); - expect(smallFileRes.url).not.toBe(null); - - // Test a file that is exactly 512MB - const exactFileRes = await postFile(exactFileName, exactFilePath); - expect(exactFileRes.url).not.toBe(null); - - // Test a large file - const largeFileRes = await postFile(largeFileName, largeFilePath); - expect(largeFileRes.url).not.toBe(null); - - // Test a normal Parse.File object - const smallFile = new Parse.File(smallFileName, [...smallFileRawData]); - const normalSmallFile = await smallFile.save(); - expect(normalSmallFile.url()).not.toBe(null); - - } catch (error) { - fail(error); - } finally { - // Clean up the files - await fs.promises.unlink(exactFilePath); - await fs.promises.unlink(largeFilePath); - await fs.promises.unlink(smallFilePath); - done(); - } - }); - }); diff --git a/spec/FilesRouter.spec.js b/spec/FilesRouter.spec.js new file mode 100644 index 0000000000..484ab241e6 --- /dev/null +++ b/spec/FilesRouter.spec.js @@ -0,0 +1,94 @@ +const fs = require('fs'); +const path = require('path'); + +describe('FilesRouter', () => { + describe('File Uploads', () => { + const V8_STRING_LIMIT_BYTES = 536_870_912; + + let server; + + beforeAll(async () => { + server = await reconfigureServer({ + maxUploadSize: '1GB', + port: 8384, + }); + }); + + afterAll(async () => { + // clean up the server for resuse + if (server && server.close) { + await new Promise((resolve, reject) => { + server.close(err => { + if (err) return reject(err); + resolve(); + }); + }); + } + }); + + /** + * Quick helper function to upload the file to the server via the REST API + * We do this because creating a Parse.File object with a file over 512MB + * will try to use the Web API FileReader API, which will fail the test + * + * @param {string} fileName the name of the file + * @param {string} filePath the path to the file locally + * @returns + */ + const postFile = async (fileName, filePath) => { + const url = `${Parse.serverURL}/files/${fileName}`; + const headers = { + 'X-Parse-Application-Id': Parse.applicationId, + 'X-Parse-Master-Key': Parse.masterKey, + 'Content-Type': 'multipart/form-data', + }; + + // Create a FormData object to send the file + const formData = new FormData(); + formData.append('file', fs.createReadStream(filePath)); + + // Send the request + const response = await fetch(url, { + method: 'POST', + headers, + body: formData, + }); + + return response; + }; + + + it('should allow Parse.File uploads under 512MB', async done => { + const filePath = path.join(__dirname, 'file.txt'); + fs.writeFileSync(filePath, Buffer.alloc(1024 * 1024)); + + const response = await postFile('file.txt', filePath); + expect(response.ok).toBe(true); + + fs.unlinkSync(filePath); + done(); + }); + + it('should allow Parse.File uploads exactly 512MB', async done => { + const filePath = path.join(__dirname, 'file.txt'); + fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES)); + + const response = await postFile('file.txt', filePath); + expect(response.ok).toBe(true); + + fs.unlinkSync(filePath); + done(); + }); + + it('should allow Parse.File uploads over 512MB', async done => { + const filePath = path.join(__dirname, 'file.txt'); + fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024)); + + const response = await postFile('file.txt', filePath); + expect(response.ok).toBe(true); + + fs.unlinkSync(filePath); + done(); + }); + }); +}); From 39face2e08e208f0140d17f8a915ca47ae84d5c6 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sat, 17 Aug 2024 20:40:16 -0400 Subject: [PATCH 08/19] fix: remove unwanted line --- spec/FilesController.spec.js | 1 - 1 file changed, 1 deletion(-) diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index 5115d5d548..a16451f3ef 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -162,5 +162,4 @@ describe('FilesController', () => { expect(gridFSAdapter.validateFilename(fileName)).not.toBe(null); done(); }); - }); From aedd9845cc1d2a3ac24641ec1566ace012e72cac Mon Sep 17 00:00:00 2001 From: Manuel <5673677+mtrezza@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:43:15 +0200 Subject: [PATCH 09/19] fix typo Signed-off-by: Manuel <5673677+mtrezza@users.noreply.github.com> --- src/Adapters/Files/FilesAdapter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Adapters/Files/FilesAdapter.js b/src/Adapters/Files/FilesAdapter.js index 9d632c6c3c..146a2a90b5 100644 --- a/src/Adapters/Files/FilesAdapter.js +++ b/src/Adapters/Files/FilesAdapter.js @@ -26,7 +26,7 @@ export class FilesAdapter { /** Responsible for storing the file in order to be retrieved later by its filename * * @param {string} filename - the filename to save - * @param {*} data - the repersentation of data from the file as buffer or a blob + * @param {*} data - the representation of data from the file as Buffer or Blob * @param {string} contentType - the supposed contentType * @discussion the contentType can be undefined if the controller was not able to determine it * @param {object} options - (Optional) options to be passed to file adapter (S3 File Adapter Only) From 0cf139160fa14a271d22514a35a003fab82d28ac Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sun, 18 Aug 2024 10:12:09 -0400 Subject: [PATCH 10/19] fix: remove unneeded server reconfigure --- spec/FilesRouter.spec.js | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/spec/FilesRouter.spec.js b/spec/FilesRouter.spec.js index 484ab241e6..fed1bc250f 100644 --- a/spec/FilesRouter.spec.js +++ b/spec/FilesRouter.spec.js @@ -5,27 +5,6 @@ describe('FilesRouter', () => { describe('File Uploads', () => { const V8_STRING_LIMIT_BYTES = 536_870_912; - let server; - - beforeAll(async () => { - server = await reconfigureServer({ - maxUploadSize: '1GB', - port: 8384, - }); - }); - - afterAll(async () => { - // clean up the server for resuse - if (server && server.close) { - await new Promise((resolve, reject) => { - server.close(err => { - if (err) return reject(err); - resolve(); - }); - }); - } - }); - /** * Quick helper function to upload the file to the server via the REST API * We do this because creating a Parse.File object with a file over 512MB From 92b5ec59b1e181139b2a0ab13d0a807f12bf428d Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sun, 18 Aug 2024 17:20:00 -0400 Subject: [PATCH 11/19] fix: actually upload the full files --- spec/FilesRouter.spec.js | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/spec/FilesRouter.spec.js b/spec/FilesRouter.spec.js index fed1bc250f..9f6d8a905c 100644 --- a/spec/FilesRouter.spec.js +++ b/spec/FilesRouter.spec.js @@ -3,12 +3,20 @@ const path = require('path'); describe('FilesRouter', () => { describe('File Uploads', () => { + + beforeEach(async () => { + // Set the maxUploadSize to 1GB + await reconfigureServer({ + maxUploadSize: '1GB', + }); + }); + const V8_STRING_LIMIT_BYTES = 536_870_912; /** * Quick helper function to upload the file to the server via the REST API * We do this because creating a Parse.File object with a file over 512MB - * will try to use the Web API FileReader API, which will fail the test + * will try to use the Web FileReader API, which will fail the test * * @param {string} fileName the name of the file * @param {string} filePath the path to the file locally @@ -22,24 +30,22 @@ describe('FilesRouter', () => { 'Content-Type': 'multipart/form-data', }; - // Create a FormData object to send the file - const formData = new FormData(); - formData.append('file', fs.createReadStream(filePath)); + const fileStream = fs.createReadStream(filePath); // Send the request const response = await fetch(url, { method: 'POST', headers, - body: formData, + body: fileStream, + duplex: 'half' // This is required to send a stream }); return response; }; - it('should allow Parse.File uploads under 512MB', async done => { const filePath = path.join(__dirname, 'file.txt'); - fs.writeFileSync(filePath, Buffer.alloc(1024 * 1024)); + await fs.promises.writeFile(filePath, Buffer.alloc(1024 * 1024)); const response = await postFile('file.txt', filePath); expect(response.ok).toBe(true); @@ -50,7 +56,7 @@ describe('FilesRouter', () => { it('should allow Parse.File uploads exactly 512MB', async done => { const filePath = path.join(__dirname, 'file.txt'); - fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES)); + await fs.promises.writeFile(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES)); const response = await postFile('file.txt', filePath); expect(response.ok).toBe(true); @@ -61,7 +67,7 @@ describe('FilesRouter', () => { it('should allow Parse.File uploads over 512MB', async done => { const filePath = path.join(__dirname, 'file.txt'); - fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024)); + await fs.promises.writeFile(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024)); const response = await postFile('file.txt', filePath); expect(response.ok).toBe(true); From 5bd46b66f291e45a307bb8784b9948ee363fbb9d Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sun, 18 Aug 2024 22:13:05 -0400 Subject: [PATCH 12/19] fix: encryption chunking --- src/Adapters/Files/GridFSBucketAdapter.js | 68 +++++++++++------------ 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index 3b75703bf3..31ce582429 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -68,40 +68,39 @@ export class GridFSBucketAdapter extends FilesAdapter { const stream = await bucket.openUploadStream(filename, { metadata: options.metadata, }); + try { + // when working with a Blob, it could be over the max size of a buffer, so we need to stream it + if (typeof Blob !== 'undefined' && data instanceof Blob) { + const reader = data.stream().getReader(); + const iv = crypto.randomBytes(16); + const cipher = this._encryptionKey !== null + ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) + : null; - // when working with a Blob, it could be over the max size of a buffer, so we need to stream it - if (typeof Blob !== 'undefined' && data instanceof Blob) { - const reader = data.stream().getReader(); - const iv = crypto.randomBytes(16); - const cipher = this._encryptionKey !== null ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) : null; + const processChunk = async ({ done, value }) => { + if (done) { + if (cipher) { + const finalChunk = Buffer.concat([cipher.final()]); + await stream.write(finalChunk); + await stream.write(iv); + await stream.write(cipher.getAuthTag()); + } + stream.end(); + return; + } - const processChunk = async ({ done, value }) => { - if (done) { if (cipher) { - const finalChunk = Buffer.concat([cipher.final(), iv, cipher.getAuthTag()]); - await stream.write(finalChunk); + value = cipher.update(value); } - stream.end(); - return; - } - if (cipher) { - value = cipher.update(value); - } + await stream.write(value); + reader.read().then(processChunk); + }; - await stream.write(value); - reader.read().then(processChunk); - }; - try { reader.read().then(processChunk); - } catch (err) { - return new Promise((resolve, reject) => { - return reject(err); - }); - } - } else { - if (this._encryptionKey !== null) { - try { + + } else { + if (this._encryptionKey !== null) { const iv = crypto.randomBytes(16); const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv); const encryptedResult = Buffer.concat([ @@ -111,16 +110,17 @@ export class GridFSBucketAdapter extends FilesAdapter { cipher.getAuthTag(), ]); await stream.write(encryptedResult); - } catch (err) { - return new Promise((resolve, reject) => { - return reject(err); - }); + + } else { + await stream.write(data); } - } else { - await stream.write(data); + stream.end(); } + } catch (err) { + return new Promise((resolve, reject) => { + return reject(err); + }); } - stream.end(); return new Promise((resolve, reject) => { stream.on('finish', resolve); stream.on('error', reject); From 01cce43cd01cef63db9f549021695280fcf6b957 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Sun, 18 Aug 2024 22:13:22 -0400 Subject: [PATCH 13/19] test: adjust to test blobs too --- spec/GridFSBucketStorageAdapter.spec.js | 567 +++++++++++++----------- 1 file changed, 305 insertions(+), 262 deletions(-) diff --git a/spec/GridFSBucketStorageAdapter.spec.js b/spec/GridFSBucketStorageAdapter.spec.js index 7e9c84a59e..e863ad97f5 100644 --- a/spec/GridFSBucketStorageAdapter.spec.js +++ b/spec/GridFSBucketStorageAdapter.spec.js @@ -9,7 +9,33 @@ async function expectMissingFile(gfsAdapter, name) { await gfsAdapter.getFileData(name); fail('should have thrown'); } catch (e) { - expect(e.message).toEqual('FileNotFound: file myFileName was not found'); + expect(e.message).toEqual(`FileNotFound: file ${name} was not found`); + } +} + +const TYPES = ['string', 'blob']; + +const createData = (type, data) => { + switch (type) { + case 'string': + return data; + case 'blob': + return new Blob([data]); + default: + throw new Error(`Invalid type: ${type}`); + } +} + +const getDataAsString = async (type, data, encoding = 'utf8') => { + switch (type) { + case 'string': + return data.toString(encoding); + case 'blob': + return (typeof Blob !== 'undefined' && data instanceof Blob) + ? await data.text(encoding) : + data.toString(encoding); + default: + throw new Error(`Invalid type: ${type}`); } } @@ -43,13 +69,21 @@ describe_only_db('mongo')('GridFSBucket', () => { {}, '89E4AFF1-DFE4-4603-9574-BFA16BB446FD' ); - await expectMissingFile(encryptedAdapter, 'myFileName'); - const originalString = 'abcdefghi'; - await encryptedAdapter.createFile('myFileName', originalString); - const unencryptedResult = await unencryptedAdapter.getFileData('myFileName'); - expect(unencryptedResult.toString('utf8')).not.toBe(originalString); - const encryptedResult = await encryptedAdapter.getFileData('myFileName'); - expect(encryptedResult.toString('utf8')).toBe(originalString); + + for (const type of TYPES) { + const fileName = `myFileName-${type}`; + await expectMissingFile(encryptedAdapter, fileName); + const rawData = 'abcdefghi'; + + const originalData = createData(type, rawData); + await encryptedAdapter.createFile(fileName, originalData); + + const unencryptedResult = await unencryptedAdapter.getFileData(fileName); + expect(unencryptedResult.toString('utf8')).not.toBe(rawData); + + const encryptedResult = await encryptedAdapter.getFileData(fileName); + expect(encryptedResult.toString('utf8')).toBe(rawData); + } }); it('should rotate key of all unencrypted GridFS files to encrypted files', async () => { @@ -59,155 +93,144 @@ describe_only_db('mongo')('GridFSBucket', () => { {}, '89E4AFF1-DFE4-4603-9574-BFA16BB446FD' ); - const fileName1 = 'file1.txt'; - const data1 = 'hello world'; - const fileName2 = 'file2.txt'; - const data2 = 'hello new world'; - //Store unecrypted files - await unencryptedAdapter.createFile(fileName1, data1); - const unencryptedResult1 = await unencryptedAdapter.getFileData(fileName1); - expect(unencryptedResult1.toString('utf8')).toBe(data1); - await unencryptedAdapter.createFile(fileName2, data2); - const unencryptedResult2 = await unencryptedAdapter.getFileData(fileName2); - expect(unencryptedResult2.toString('utf8')).toBe(data2); - //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter - const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey(); - expect(rotated.length).toEqual(2); - expect( - rotated.filter(function (value) { - return value === fileName1; - }).length - ).toEqual(1); - expect( - rotated.filter(function (value) { - return value === fileName2; - }).length - ).toEqual(1); - expect(notRotated.length).toEqual(0); - let result = await encryptedAdapter.getFileData(fileName1); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data1); - const encryptedData1 = await unencryptedAdapter.getFileData(fileName1); - expect(encryptedData1.toString('utf-8')).not.toEqual(unencryptedResult1); - result = await encryptedAdapter.getFileData(fileName2); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data2); - const encryptedData2 = await unencryptedAdapter.getFileData(fileName2); - expect(encryptedData2.toString('utf-8')).not.toEqual(unencryptedResult2); + + for (const type of TYPES) { + const rawData = [`hello world ${type}`, `hello new world ${type}`]; + const fileNames = ['file1.txt', 'file2.txt']; + + // Store unencrypted files and verify + for (let i = 0; i < fileNames.length; i++) { + const data = createData(type, rawData[i]); + await unencryptedAdapter.createFile(fileNames[i], data); + const unencryptedResult = await unencryptedAdapter.getFileData(fileNames[i]); + expect(await getDataAsString(type, unencryptedResult)).toBe(rawData[i]); + } + + // Rotate encryption key and verify + const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey(); + expect(rotated.length).toEqual(fileNames.length); + fileNames.forEach(fileName => { + expect(rotated.includes(fileName)).toBe(true); + }); + expect(notRotated.length).toEqual(0); + + // clear files for next iteration + for (let i = 0; i < fileNames.length; i++) { + await unencryptedAdapter.deleteFile(fileNames[i]); + expectMissingFile(unencryptedAdapter, fileNames[i]); + } + } }); it('should rotate key of all old encrypted GridFS files to encrypted files', async () => { const oldEncryptionKey = 'oldKeyThatILoved'; const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); - const fileName1 = 'file1.txt'; - const data1 = 'hello world'; - const fileName2 = 'file2.txt'; - const data2 = 'hello new world'; - //Store unecrypted files - await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); - expect(oldEncryptedResult1.toString('utf8')).toBe(data1); - await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); - expect(oldEncryptedResult2.toString('utf8')).toBe(data2); - //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter - const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ - oldKey: oldEncryptionKey, - }); - expect(rotated.length).toEqual(2); - expect( - rotated.filter(function (value) { - return value === fileName1; - }).length - ).toEqual(1); - expect( - rotated.filter(function (value) { - return value === fileName2; - }).length - ).toEqual(1); - expect(notRotated.length).toEqual(0); - let result = await encryptedAdapter.getFileData(fileName1); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data1); - let decryptionError1; - let encryptedData1; - try { - encryptedData1 = await oldEncryptedAdapter.getFileData(fileName1); - } catch (err) { - decryptionError1 = err; - } - expect(decryptionError1).toMatch('Error'); - expect(encryptedData1).toBeUndefined(); - result = await encryptedAdapter.getFileData(fileName2); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data2); - let decryptionError2; - let encryptedData2; - try { - encryptedData2 = await oldEncryptedAdapter.getFileData(fileName2); - } catch (err) { - decryptionError2 = err; + + for (const type of TYPES) { + const rawData = [`hello world ${type}`, `hello new world ${type}`]; + const fileNames = ['file1.txt', 'file2.txt']; + + //Store unecrypted files + for (let i = 0; i < fileNames.length; i++) { + await oldEncryptedAdapter.createFile(fileNames[i], createData(type, rawData[i])); + const oldEncryptedResult = await oldEncryptedAdapter.getFileData(fileNames[i]); + expect(await getDataAsString(type, oldEncryptedResult)).toBe(rawData[i]); + } + + //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter + const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ + oldKey: oldEncryptionKey, + }); + expect(rotated.length).toEqual(2); + expect( + rotated.filter(function (value) { + return value === fileNames[0]; + }).length + ).toEqual(1); + expect( + rotated.filter(function (value) { + return value === fileNames[1]; + }).length + ).toEqual(1); + expect(notRotated.length).toEqual(0); + + // make sure old encrypted files can't be decrypted + for (let i = 0; i < fileNames.length; i++) { + const result = await encryptedAdapter.getFileData(fileNames[i]); + expect(result instanceof Buffer).toBe(true); + expect(await getDataAsString(type, result)).toEqual(rawData[i]); + + let decryptionError; + let encryptedData; + try { + encryptedData = await oldEncryptedAdapter.getFileData(fileNames[i]); + } catch (err) { + decryptionError = err; + } + expect(decryptionError).toMatch('Error'); + expect(encryptedData).toBeUndefined(); + + // clear files for next iteration + await oldEncryptedAdapter.deleteFile(fileNames[i]); + expectMissingFile(oldEncryptedAdapter, fileNames[i]); + } } - expect(decryptionError2).toMatch('Error'); - expect(encryptedData2).toBeUndefined(); }); it('should rotate key of all old encrypted GridFS files to unencrypted files', async () => { const oldEncryptionKey = 'oldKeyThatILoved'; const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); - const fileName1 = 'file1.txt'; - const data1 = 'hello world'; - const fileName2 = 'file2.txt'; - const data2 = 'hello new world'; - //Store unecrypted files - await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); - expect(oldEncryptedResult1.toString('utf8')).toBe(data1); - await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); - expect(oldEncryptedResult2.toString('utf8')).toBe(data2); - //Check if unEncrypted adapter can read data and make sure it's not the same as oldEncrypted adapter - const { rotated, notRotated } = await unEncryptedAdapter.rotateEncryptionKey({ - oldKey: oldEncryptionKey, - }); - expect(rotated.length).toEqual(2); - expect( - rotated.filter(function (value) { - return value === fileName1; - }).length - ).toEqual(1); - expect( - rotated.filter(function (value) { - return value === fileName2; - }).length - ).toEqual(1); - expect(notRotated.length).toEqual(0); - let result = await unEncryptedAdapter.getFileData(fileName1); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data1); - let decryptionError1; - let encryptedData1; - try { - encryptedData1 = await oldEncryptedAdapter.getFileData(fileName1); - } catch (err) { - decryptionError1 = err; - } - expect(decryptionError1).toMatch('Error'); - expect(encryptedData1).toBeUndefined(); - result = await unEncryptedAdapter.getFileData(fileName2); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data2); - let decryptionError2; - let encryptedData2; - try { - encryptedData2 = await oldEncryptedAdapter.getFileData(fileName2); - } catch (err) { - decryptionError2 = err; + for (const type of TYPES) { + const rawData = [`hello world ${type}`, `hello new world ${type}`]; + const fileNames = ['file1.txt', 'file2.txt']; + + //Store unecrypted files + for (let i = 0; i < fileNames.length; i++) { + await oldEncryptedAdapter.createFile(fileNames[i], createData(type, rawData[i])); + const oldEncryptedResult = await oldEncryptedAdapter.getFileData(fileNames[i]); + expect(await getDataAsString(type, oldEncryptedResult)).toBe(rawData[i]); + } + + //Check if unEncrypted adapter can read data and make sure it's not the same as oldEncrypted adapter + const { rotated, notRotated } = await unEncryptedAdapter.rotateEncryptionKey({ + oldKey: oldEncryptionKey, + }); + expect(rotated.length).toEqual(2); + expect( + rotated.filter(function (value) { + return value === fileNames[0]; + }).length + ).toEqual(1); + expect( + rotated.filter(function (value) { + return value === fileNames[1]; + }).length + ).toEqual(1); + expect(notRotated.length).toEqual(0); + + // make sure the files can be decrypted by the new adapter + for (let i = 0; i < fileNames.length; i++) { + const result = await unEncryptedAdapter.getFileData(fileNames[i]); + expect(result instanceof Buffer).toBe(true); + expect(await getDataAsString(type, result)).toEqual(rawData[i]); + let decryptionError; + let encryptedData; + try { + encryptedData = await oldEncryptedAdapter.getFileData(fileNames[i]); + } catch (err) { + decryptionError = err; + } + expect(decryptionError).toMatch('Error'); + expect(encryptedData).toBeUndefined(); + + // clear files for next iteration + await oldEncryptedAdapter.deleteFile(fileNames[i]); + expectMissingFile(oldEncryptedAdapter, fileNames[i]); + } + } - expect(decryptionError2).toMatch('Error'); - expect(encryptedData2).toBeUndefined(); }); it('should only encrypt specified fileNames', async () => { @@ -215,67 +238,70 @@ describe_only_db('mongo')('GridFSBucket', () => { const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); - const fileName1 = 'file1.txt'; - const data1 = 'hello world'; - const fileName2 = 'file2.txt'; - const data2 = 'hello new world'; - //Store unecrypted files - await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); - expect(oldEncryptedResult1.toString('utf8')).toBe(data1); - await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); - expect(oldEncryptedResult2.toString('utf8')).toBe(data2); - //Inject unecrypted file to see if causes an issue - const fileName3 = 'file3.txt'; - const data3 = 'hello past world'; - await unEncryptedAdapter.createFile(fileName3, data3, 'text/utf8'); - //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter - const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ - oldKey: oldEncryptionKey, - fileNames: [fileName1, fileName2], - }); - expect(rotated.length).toEqual(2); - expect( - rotated.filter(function (value) { - return value === fileName1; - }).length - ).toEqual(1); - expect( - rotated.filter(function (value) { - return value === fileName2; - }).length - ).toEqual(1); - expect(notRotated.length).toEqual(0); - expect( - rotated.filter(function (value) { - return value === fileName3; - }).length - ).toEqual(0); - let result = await encryptedAdapter.getFileData(fileName1); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data1); - let decryptionError1; - let encryptedData1; - try { - encryptedData1 = await oldEncryptedAdapter.getFileData(fileName1); - } catch (err) { - decryptionError1 = err; - } - expect(decryptionError1).toMatch('Error'); - expect(encryptedData1).toBeUndefined(); - result = await encryptedAdapter.getFileData(fileName2); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data2); - let decryptionError2; - let encryptedData2; - try { - encryptedData2 = await oldEncryptedAdapter.getFileData(fileName2); - } catch (err) { - decryptionError2 = err; + + for (const type of TYPES) { + const rawData = [`hello world ${type}`, `hello new world ${type}`]; + const fileNames = ['file1.txt', 'file2.txt']; + + //Store unecrypted files + for (let i = 0; i < fileNames.length; i++) { + await oldEncryptedAdapter.createFile(fileNames[i], createData(type, rawData[i])); + const oldEncryptedResult = await oldEncryptedAdapter.getFileData(fileNames[i]); + expect(await getDataAsString(type, oldEncryptedResult)).toBe(rawData[i]); + } + + + //Inject unecrypted file to see if causes an issue + const fileName3 = 'file3.txt'; + const data3 = 'hello past world'; + await unEncryptedAdapter.createFile(fileName3, data3, 'text/utf8'); + + //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter + const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ + oldKey: oldEncryptionKey, + fileNames, + }); + expect(rotated.length).toEqual(2); + expect( + rotated.filter(function (value) { + return value === fileNames[0]; + }).length + ).toEqual(1); + expect( + rotated.filter(function (value) { + return value === fileNames[1]; + }).length + ).toEqual(1); + expect(notRotated.length).toEqual(0); + expect( + rotated.filter(function (value) { + return value === fileName3; + }).length + ).toEqual(0); + + for (let i = 0; i < fileNames.length; i++) { + const result = await encryptedAdapter.getFileData(fileNames[i]); + expect(result instanceof Buffer).toBe(true); + expect(await getDataAsString(type, result)).toEqual(rawData[i]); + let decryptionError; + let encryptedData; + try { + encryptedData = await oldEncryptedAdapter.getFileData(fileNames[i]); + } catch (err) { + decryptionError = err; + } + expect(decryptionError).toMatch('Error'); + expect(encryptedData).toBeUndefined(); + + // clear files for next iteration + await oldEncryptedAdapter.deleteFile(fileNames[i]); + expectMissingFile(oldEncryptedAdapter, fileNames[i]); + } + + // clear file3 for next iteration + await unEncryptedAdapter.deleteFile(fileName3); + expectMissingFile(unEncryptedAdapter, fileName3); } - expect(decryptionError2).toMatch('Error'); - expect(encryptedData2).toBeUndefined(); }); it("should return fileNames of those it can't encrypt with the new key", async () => { @@ -283,66 +309,69 @@ describe_only_db('mongo')('GridFSBucket', () => { const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); - const fileName1 = 'file1.txt'; - const data1 = 'hello world'; - const fileName2 = 'file2.txt'; - const data2 = 'hello new world'; - //Store unecrypted files - await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); - expect(oldEncryptedResult1.toString('utf8')).toBe(data1); - await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); - expect(oldEncryptedResult2.toString('utf8')).toBe(data2); - //Inject unecrypted file to see if causes an issue - const fileName3 = 'file3.txt'; - const data3 = 'hello past world'; - await unEncryptedAdapter.createFile(fileName3, data3, 'text/utf8'); - //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter - const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ - oldKey: oldEncryptionKey, - }); - expect(rotated.length).toEqual(2); - expect( - rotated.filter(function (value) { - return value === fileName1; - }).length - ).toEqual(1); - expect( - rotated.filter(function (value) { - return value === fileName2; - }).length - ).toEqual(1); - expect(notRotated.length).toEqual(1); - expect( - notRotated.filter(function (value) { - return value === fileName3; - }).length - ).toEqual(1); - let result = await encryptedAdapter.getFileData(fileName1); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data1); - let decryptionError1; - let encryptedData1; - try { - encryptedData1 = await oldEncryptedAdapter.getFileData(fileName1); - } catch (err) { - decryptionError1 = err; - } - expect(decryptionError1).toMatch('Error'); - expect(encryptedData1).toBeUndefined(); - result = await encryptedAdapter.getFileData(fileName2); - expect(result instanceof Buffer).toBe(true); - expect(result.toString('utf-8')).toEqual(data2); - let decryptionError2; - let encryptedData2; - try { - encryptedData2 = await oldEncryptedAdapter.getFileData(fileName2); - } catch (err) { - decryptionError2 = err; + + for (const type of TYPES) { + const rawData = [`hello world ${type}`, `hello new world ${type}`]; + const fileNames = ['file1.txt', 'file2.txt']; + + //Store unecrypted files + for (let i = 0; i < fileNames.length; i++) { + await oldEncryptedAdapter.createFile(fileNames[i], createData(type, rawData[i])); + const oldEncryptedResult = await oldEncryptedAdapter.getFileData(fileNames[i]); + expect(await getDataAsString(type, oldEncryptedResult)).toBe(rawData[i]); + } + + //Inject unecrypted file to see if causes an issue + const fileName3 = 'file3.txt'; + const data3 = 'hello past world'; + await unEncryptedAdapter.createFile(fileName3, data3, 'text/utf8'); + + //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter + const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ + oldKey: oldEncryptionKey, + }); + expect(rotated.length).toEqual(2); + expect( + rotated.filter(function (value) { + return value === fileNames[0]; + }).length + ).toEqual(1); + expect( + rotated.filter(function (value) { + return value === fileNames[1]; + }).length + ).toEqual(1); + expect(notRotated.length).toEqual(1); + expect( + notRotated.filter(function (value) { + return value === fileName3; + }).length + ).toEqual(1); + + // make sure the files can be decrypted by the new adapter + for (let i = 0; i < fileNames.length; i++) { + const result = await encryptedAdapter.getFileData(fileNames[i]); + expect(result instanceof Buffer).toBe(true); + expect(await getDataAsString(type, result)).toEqual(rawData[i]); + let decryptionError; + let encryptedData; + try { + encryptedData = await oldEncryptedAdapter.getFileData(fileNames[i]); + } catch (err) { + decryptionError = err; + } + expect(decryptionError).toMatch('Error'); + expect(encryptedData).toBeUndefined(); + + // clear files for next iteration + await oldEncryptedAdapter.deleteFile(fileNames[i]); + expectMissingFile(oldEncryptedAdapter, fileNames[i]); + + } + // clear file3 for next iteration + await unEncryptedAdapter.deleteFile(fileName3); + expectMissingFile(unEncryptedAdapter, fileName3); } - expect(decryptionError2).toMatch('Error'); - expect(encryptedData2).toBeUndefined(); }); it('should save metadata', async () => { @@ -360,6 +389,20 @@ describe_only_db('mongo')('GridFSBucket', () => { // Empty json for file not found gfsMetadata = await gfsAdapter.getMetadata('myUnknownFile'); expect(gfsMetadata).toEqual({}); + + // now do the same for blob + const originalBlob = new Blob([originalString]); + await gfsAdapter.createFile('myFileNameBlob', originalBlob, null, { + metadata, + }); + const gfsResultBlob = await gfsAdapter.getFileData('myFileNameBlob'); + expect(await getDataAsString('blob', gfsResultBlob)).toBe(originalString); + gfsMetadata = await gfsAdapter.getMetadata('myFileNameBlob'); + expect(gfsMetadata.metadata).toEqual(metadata); + + // Empty json for file not found + gfsMetadata = await gfsAdapter.getMetadata('myUnknownFileBlob'); + expect(gfsMetadata).toEqual({}); }); it('should save metadata with file', async () => { From f2968287430cdc1563b2770c52df5f7efd17837e Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:24:11 -0400 Subject: [PATCH 14/19] use pipes --- src/Adapters/Files/GridFSBucketAdapter.js | 72 ++++++++++++++--------- 1 file changed, 44 insertions(+), 28 deletions(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index 31ce582429..963244cebd 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -11,6 +11,10 @@ import { MongoClient, GridFSBucket, Db } from 'mongodb'; import { FilesAdapter, validateFilename } from './FilesAdapter'; import defaults from '../../defaults'; const crypto = require('crypto'); +const { Transform } = require('stream'); +const { Readable } = require('stream'); +const { ReadableStream: WebReadable } = require('stream/web'); + export class GridFSBucketAdapter extends FilesAdapter { _databaseURI: string; @@ -68,41 +72,53 @@ export class GridFSBucketAdapter extends FilesAdapter { const stream = await bucket.openUploadStream(filename, { metadata: options.metadata, }); + + const iv = crypto.randomBytes(16); + const cipher = this._encryptionKey !== null + ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) + : null; + try { // when working with a Blob, it could be over the max size of a buffer, so we need to stream it if (typeof Blob !== 'undefined' && data instanceof Blob) { - const reader = data.stream().getReader(); - const iv = crypto.randomBytes(16); - const cipher = this._encryptionKey !== null - ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) - : null; + let readableStream = data.stream(); - const processChunk = async ({ done, value }) => { - if (done) { - if (cipher) { - const finalChunk = Buffer.concat([cipher.final()]); - await stream.write(finalChunk); - await stream.write(iv); - await stream.write(cipher.getAuthTag()); - } - stream.end(); - return; - } - - if (cipher) { - value = cipher.update(value); - } + // may come in as a web stream, so we need to convert it to a node strea, + if (readableStream instanceof WebReadable) { + readableStream = Readable.fromWeb(readableStream); + } - await stream.write(value); - reader.read().then(processChunk); + const createCipherTransform = (cipher) => { + return new Transform({ + transform(chunk, encoding, callback) { + try { + const encryptedChunk = cipher.update(chunk); + callback(null, encryptedChunk); + } catch (err) { + callback(err); + } + }, + // at the end we need to push the final cipher text, iv, and auth tag + flush(callback) { + try { + this.push(cipher.final()); + this.push(iv); + this.push(cipher.getAuthTag()); + callback(); + } catch (err) { + callback(err); + } + } + }); }; - - reader.read().then(processChunk); - + if (cipher) { + const cipherTransform = createCipherTransform(cipher); + await readableStream.pipe(cipherTransform).pipe(stream); + } else { + await readableStream.pipe(stream); + } } else { - if (this._encryptionKey !== null) { - const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv); + if (cipher) { const encryptedResult = Buffer.concat([ cipher.update(data), cipher.final(), From 73bbdb034d7240976940bb74e2fd84c9a79fb167 Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Mon, 19 Aug 2024 18:10:28 -0400 Subject: [PATCH 15/19] fix: remove web readable --- src/Adapters/Files/GridFSBucketAdapter.js | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index 963244cebd..bfb6252383 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -11,10 +11,7 @@ import { MongoClient, GridFSBucket, Db } from 'mongodb'; import { FilesAdapter, validateFilename } from './FilesAdapter'; import defaults from '../../defaults'; const crypto = require('crypto'); -const { Transform } = require('stream'); -const { Readable } = require('stream'); -const { ReadableStream: WebReadable } = require('stream/web'); - +const { Transform, Readable } = require('stream'); export class GridFSBucketAdapter extends FilesAdapter { _databaseURI: string; @@ -84,7 +81,7 @@ export class GridFSBucketAdapter extends FilesAdapter { let readableStream = data.stream(); // may come in as a web stream, so we need to convert it to a node strea, - if (readableStream instanceof WebReadable) { + if (readableStream instanceof ReadableStream) { readableStream = Readable.fromWeb(readableStream); } From d43a8ad213fe4d346f54b7cb60c94f339c5b252e Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:32:32 -0400 Subject: [PATCH 16/19] fix: test errors and clean up pipes --- spec/GridFSBucketStorageAdapter.spec.js | 78 ++++++++++++++ src/Adapters/Files/GridFSBucketAdapter.js | 123 +++++++++++----------- 2 files changed, 142 insertions(+), 59 deletions(-) diff --git a/spec/GridFSBucketStorageAdapter.spec.js b/spec/GridFSBucketStorageAdapter.spec.js index e863ad97f5..d17dec8681 100644 --- a/spec/GridFSBucketStorageAdapter.spec.js +++ b/spec/GridFSBucketStorageAdapter.spec.js @@ -484,6 +484,84 @@ describe_only_db('mongo')('GridFSBucket', () => { await expectMissingFile(gfsAdapter, 'myFileName'); }); + + it('should reject if there is an error in cipher update', async () => { + const gfsAdapter = new GridFSBucketAdapter(databaseURI, {}, 'encryptionKey'); + const error = new Error('Cipher error'); + const crypto = require('crypto'); + + // Mock the createCipheriv method to return a mocked cipher object + spyOn(crypto, 'createCipheriv').and.returnValue({ + // eslint-disable-next-line no-unused-vars + update: (_chunk) => { + throw error; + }, + final: () => { + return Buffer.from('encryptedData'); + }, + }); + + for (const type of TYPES) { + try { + await gfsAdapter.createFile(`testfile-${type}.txt`, createData(type, 'testdata')); + fail('Expected error not thrown'); + } catch (err) { + expect(err).toEqual(jasmine.any(Error)); + expect(err.message).toBe(error.message); + } + } + // Restore the original method + crypto.createCipheriv.and.callThrough(); + }); + + + it('should reject if there is an error in cipher final', async () => { + const gfsAdapter = new GridFSBucketAdapter(databaseURI, {}, 'encryptionKey'); + const error = new Error('Cipher error'); + const crypto = require('crypto'); + + // Mock the createCipheriv method to return a mocked cipher object + spyOn(crypto, 'createCipheriv').and.returnValue({ + // eslint-disable-next-line no-unused-vars + update: (_chunk) => { + return Buffer.from('encryptedData'); + }, + final: () => { + throw error; + }, + }); + + for (const type of TYPES) { + try { + await gfsAdapter.createFile(`testfile-${type}.txt`, createData(type, 'testdata')); + fail('Expected error not thrown'); + } catch (err) { + expect(err).toEqual(jasmine.any(Error)); + expect(err.message).toBe(error.message); + } + } + // Restore the original method + crypto.createCipheriv.and.callThrough(); + }); + + it ('should handle error in createFile when _getBucket is called', async () => { + const error = new Error('Error in createFile'); + const gfsAdapter = new GridFSBucketAdapter(databaseURI); + spyOn(gfsAdapter, '_getBucket').and.throwError(error); + + for (const type of TYPES) { + try { + await gfsAdapter.createFile(`testfile-${type}.txt`, createData(type, 'testdata')); + fail('Expected error not thrown'); + } catch (err) { + expect(err).toEqual(jasmine.any(Error)); + expect(err.message).toBe(error.message); + } + } + // Restore the original method + gfsAdapter._getBucket.and.callThrough(); + }); + it('handleShutdown, close connection', async () => { const databaseURI = 'mongodb://localhost:27017/parse'; const gfsAdapter = new GridFSBucketAdapter(databaseURI); diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index bfb6252383..a60e2de849 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -11,6 +11,7 @@ import { MongoClient, GridFSBucket, Db } from 'mongodb'; import { FilesAdapter, validateFilename } from './FilesAdapter'; import defaults from '../../defaults'; const crypto = require('crypto'); +const util = require('util'); const { Transform, Readable } = require('stream'); export class GridFSBucketAdapter extends FilesAdapter { @@ -70,73 +71,77 @@ export class GridFSBucketAdapter extends FilesAdapter { metadata: options.metadata, }); - const iv = crypto.randomBytes(16); - const cipher = this._encryptionKey !== null - ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) - : null; + return new Promise((resolve, reject) => { + try { + const iv = crypto.randomBytes(16); + const cipher = this._encryptionKey !== null + ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) + : null; - try { - // when working with a Blob, it could be over the max size of a buffer, so we need to stream it - if (typeof Blob !== 'undefined' && data instanceof Blob) { - let readableStream = data.stream(); + // when working with a Blob, it could be over the max size of a buffer, so we need to stream it + if (typeof Blob !== 'undefined' && data instanceof Blob) { + let readableStream = data.stream(); - // may come in as a web stream, so we need to convert it to a node strea, - if (readableStream instanceof ReadableStream) { - readableStream = Readable.fromWeb(readableStream); - } + // may come in as a web stream, so we need to convert it to a node strea, + if (readableStream instanceof ReadableStream) { + readableStream = Readable.fromWeb(readableStream); + } - const createCipherTransform = (cipher) => { - return new Transform({ - transform(chunk, encoding, callback) { - try { - const encryptedChunk = cipher.update(chunk); - callback(null, encryptedChunk); - } catch (err) { - callback(err); + if (cipher) { + // we need to stream the data through the cipher + const cipherTransform = new Transform({ + transform(chunk, encoding, callback) { + try { + const encryptedChunk = cipher.update(chunk); + callback(null, encryptedChunk); + } catch (err) { + callback(err); + } + }, + // at the end we need to push the final cipher text, iv, and auth tag + flush(callback) { + try { + this.push(cipher.final()); + this.push(iv); + this.push(cipher.getAuthTag()); + callback(); + } catch (err) { + callback(err); + } } - }, - // at the end we need to push the final cipher text, iv, and auth tag - flush(callback) { - try { - this.push(cipher.final()); - this.push(iv); - this.push(cipher.getAuthTag()); - callback(); - } catch (err) { - callback(err); - } - } - }); - }; - if (cipher) { - const cipherTransform = createCipherTransform(cipher); - await readableStream.pipe(cipherTransform).pipe(stream); + }); + // pipe the stream through the cipher and then to the gridfs stream + readableStream + .pipe(cipherTransform) + .on('error', reject) + .pipe(stream) + .on('error', reject); + } else { + // if we don't have a cipher, we can just pipe the stream to the gridfs stream + readableStream.pipe(stream) + .on('error', reject) + } } else { - await readableStream.pipe(stream); - } - } else { - if (cipher) { - const encryptedResult = Buffer.concat([ - cipher.update(data), - cipher.final(), - iv, - cipher.getAuthTag(), - ]); - await stream.write(encryptedResult); + if (cipher) { + const encryptedResult = Buffer.concat([ + cipher.update(data), + cipher.final(), + iv, + cipher.getAuthTag(), + ]); + stream.write(encryptedResult); - } else { - await stream.write(data); + } else { + stream.write(data); + } + stream.end(); } - stream.end(); + + stream.on('finish', resolve); + stream.on('error', reject); + } catch (e) { + reject(e); } - } catch (err) { - return new Promise((resolve, reject) => { - return reject(err); - }); - } - return new Promise((resolve, reject) => { - stream.on('finish', resolve); - stream.on('error', reject); }); } From f817c20da60c814eaeb0599592d109a6c66157ea Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:41:57 -0400 Subject: [PATCH 17/19] fix: remove unused dep --- src/Adapters/Files/GridFSBucketAdapter.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index a60e2de849..f1274456bb 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -11,7 +11,6 @@ import { MongoClient, GridFSBucket, Db } from 'mongodb'; import { FilesAdapter, validateFilename } from './FilesAdapter'; import defaults from '../../defaults'; const crypto = require('crypto'); -const util = require('util'); const { Transform, Readable } = require('stream'); export class GridFSBucketAdapter extends FilesAdapter { From aabae8b7866f15beb2b6ebe761f5a0c80bf4726d Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:59:08 -0400 Subject: [PATCH 18/19] fix: comment typo --- src/Adapters/Files/GridFSBucketAdapter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index f1274456bb..ff95e6f6ce 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -81,7 +81,7 @@ export class GridFSBucketAdapter extends FilesAdapter { if (typeof Blob !== 'undefined' && data instanceof Blob) { let readableStream = data.stream(); - // may come in as a web stream, so we need to convert it to a node strea, + // may come in as a web stream, so we need to convert it to a node stream if (readableStream instanceof ReadableStream) { readableStream = Readable.fromWeb(readableStream); } From 07098f99c848e0bb2cf408070565c8d204d6f16c Mon Sep 17 00:00:00 2001 From: Aidan Daly <74743624+dalyaidan1@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:04:25 -0400 Subject: [PATCH 19/19] feat: ternary iv --- src/Adapters/Files/GridFSBucketAdapter.js | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index ff95e6f6ce..9d61480f71 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -72,8 +72,11 @@ export class GridFSBucketAdapter extends FilesAdapter { return new Promise((resolve, reject) => { try { - const iv = crypto.randomBytes(16); - const cipher = this._encryptionKey !== null + const iv = this._encryptionKey !== null + ? crypto.randomBytes(16) + : null; + + const cipher = this._encryptionKey !== null && iv ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) : null; @@ -86,7 +89,7 @@ export class GridFSBucketAdapter extends FilesAdapter { readableStream = Readable.fromWeb(readableStream); } - if (cipher) { + if (cipher && iv) { // we need to stream the data through the cipher const cipherTransform = new Transform({ transform(chunk, encoding, callback) { @@ -121,7 +124,7 @@ export class GridFSBucketAdapter extends FilesAdapter { .on('error', reject) } } else { - if (cipher) { + if (cipher && iv) { const encryptedResult = Buffer.concat([ cipher.update(data), cipher.final(),