Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions spec/GridFSBucketStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,31 @@ describe_only_db('mongo')('GridFSBucket', () => {
}
});

it('reports supportsStreaming as true', () => {
const gfsAdapter = new GridFSBucketAdapter(databaseURI);
expect(gfsAdapter.supportsStreaming).toBe(true);
});

it('creates file from Readable stream', async () => {
const { Readable } = require('stream');
const gfsAdapter = new GridFSBucketAdapter(databaseURI);
const data = Buffer.from('streamed file content');
const stream = Readable.from(data);
await gfsAdapter.createFile('streamFile.txt', stream);
const result = await gfsAdapter.getFileData('streamFile.txt');
expect(result.toString('utf8')).toBe('streamed file content');
});

it('creates encrypted file from Readable stream (buffers for encryption)', async () => {
const { Readable } = require('stream');
const gfsAdapter = new GridFSBucketAdapter(databaseURI, {}, 'test-encryption-key');
const data = Buffer.from('encrypted streamed content');
const stream = Readable.from(data);
await gfsAdapter.createFile('encryptedStream.txt', stream);
const result = await gfsAdapter.getFileData('encryptedStream.txt');
expect(result.toString('utf8')).toBe('encrypted streamed content');
});

describe('MongoDB Client Metadata', () => {
it('should not pass metadata to MongoClient by default', async () => {
const gfsAdapter = new GridFSBucketAdapter(databaseURI);
Expand Down
261 changes: 261 additions & 0 deletions spec/ParseFile.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -1878,4 +1878,265 @@ describe('Parse.File testing', () => {
).toBeRejectedWith(jasmine.objectContaining({ status: 400 }));
});
});

describe('streaming binary uploads', () => {
describe('createSizeLimitedStream', () => {
const { createSizeLimitedStream } = require('../lib/Routers/FilesRouter');
const { Readable } = require('stream');

it('passes data through when under limit', async () => {
const input = Readable.from(Buffer.from('hello'));
const limited = createSizeLimitedStream(input, 100);
const chunks = [];
for await (const chunk of limited) {
chunks.push(chunk);
}
expect(Buffer.concat(chunks).toString()).toBe('hello');
});

it('destroys stream when data exceeds limit', async () => {
const input = Readable.from(Buffer.from('hello world, this is too long'));
const limited = createSizeLimitedStream(input, 5);
const chunks = [];
try {
for await (const chunk of limited) {
chunks.push(chunk);
}
fail('should have thrown');
} catch (e) {
expect(e.message).toContain('exceeds');
}
});

it('passes through when Content-Length is within limit', async () => {
const input = Readable.from(Buffer.from('hi'));
input.headers = { 'content-length': '2' };
const limited = createSizeLimitedStream(input, 100);
const chunks = [];
for await (const chunk of limited) {
chunks.push(chunk);
}
expect(Buffer.concat(chunks).toString()).toBe('hi');
});
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Outdated
});

it('streams binary upload with X-Parse-Upload-Mode header', async () => {
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
let response;
try {
response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/stream-test.txt',
body: 'streaming file content',
});
} catch (e) {
fail('Request failed: status=' + e.status + ' text=' + e.text + ' data=' + JSON.stringify(e.data));
return;
}
const b = response.data;
expect(b.name).toMatch(/_stream-test.txt$/);
expect(b.url).toMatch(/stream-test\.txt$/);
const getResponse = await request({ url: b.url });
expect(getResponse.text).toEqual('streaming file content');
});

it('uses buffered path without X-Parse-Upload-Mode header', async () => {
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
};
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/buffered-test.txt',
body: 'buffered file content',
});
const b = response.data;
expect(b.name).toMatch(/_buffered-test.txt$/);
const getResponse = await request({ url: b.url });
expect(getResponse.text).toEqual('buffered file content');
});

it('rejects streaming upload exceeding size limit', async () => {
await reconfigureServer({ maxUploadSize: '10b' });
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
try {
await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/big-file.txt',
body: 'this content is definitely longer than 10 bytes',
});
fail('should have thrown');
} catch (response) {
expect(response.data.code).toBe(Parse.Error.FILE_SAVE_ERROR);
expect(response.data.error).toContain('exceeds');
}
});

it('rejects streaming upload with Content-Length exceeding limit', async () => {
await reconfigureServer({ maxUploadSize: '10b' });
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
'Content-Length': '99999',
};
try {
await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/big-file.txt',
body: 'hi',
});
fail('should have thrown');
} catch (response) {
expect(response.data.code).toBe(Parse.Error.FILE_SAVE_ERROR);
expect(response.data.error).toContain('exceeds');
}
});

it('fires beforeSave trigger with request.stream = true on streaming upload', async () => {
let receivedStream;
let receivedData;
Parse.Cloud.beforeSave(Parse.File, (request) => {
receivedStream = request.stream;
receivedData = request.file._data;
request.file.addMetadata('source', 'stream');
request.file.addTag('env', 'test');
});
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/trigger-test.txt',
body: 'trigger test content',
});
expect(response.data.name).toMatch(/_trigger-test.txt$/);
expect(receivedStream).toBe(true);
expect(receivedData).toBeFalsy();
const getResponse = await request({ url: response.data.url });
expect(getResponse.text).toEqual('trigger test content');
});

it('rejects streaming upload when beforeSave trigger throws', async () => {
Parse.Cloud.beforeSave(Parse.File, () => {
throw new Parse.Error(Parse.Error.SCRIPT_FAILED, 'Upload rejected');
});
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
try {
await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/rejected.txt',
body: 'rejected content',
});
fail('should have thrown');
} catch (response) {
expect(response.data.code).toBe(Parse.Error.SCRIPT_FAILED);
expect(response.data.error).toBe('Upload rejected');
}
});

it('skips save when beforeSave trigger returns Parse.File with URL on streaming upload', async () => {
Parse.Cloud.beforeSave(Parse.File, () => {
const file = new Parse.File('existing.txt');
file._url = 'http://example.com/existing.txt';
return file;
});
Comment thread
coderabbitai[bot] marked this conversation as resolved.
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/skip-save.txt',
body: 'should not be saved',
});
expect(response.data.url).toBe('http://example.com/existing.txt');
expect(response.data.name).toBe('existing.txt');
});

it('fires afterSave trigger with request.stream = true on streaming upload', async () => {
let afterSaveStream;
let afterSaveData;
let afterSaveUrl;
Parse.Cloud.afterSave(Parse.File, (request) => {
afterSaveStream = request.stream;
afterSaveData = request.file._data;
afterSaveUrl = request.file._url;
});
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Upload-Mode': 'stream',
};
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/files/after-save.txt',
body: 'after save content',
});
expect(response.data.name).toMatch(/_after-save.txt$/);
expect(afterSaveStream).toBe(true);
expect(afterSaveData).toBeFalsy();
expect(afterSaveUrl).toBeTruthy();
});
Comment thread
coderabbitai[bot] marked this conversation as resolved.

it('verifies FilesAdapter default supportsStreaming is false', () => {
const { FilesAdapter } = require('../lib/Adapters/Files/FilesAdapter');
const adapter = new FilesAdapter();
expect(adapter.supportsStreaming).toBe(false);
});

it('legacy JSON-wrapped upload still works', async () => {
await reconfigureServer({
fileUpload: {
enableForPublic: true,
fileExtensions: ['*'],
},
});
const response = await request({
method: 'POST',
url: 'http://localhost:8378/1/files/legacy.txt',
body: JSON.stringify({
_ApplicationId: 'test',
_JavaScriptKey: 'test',
_ContentType: 'text/plain',
base64: Buffer.from('legacy content').toString('base64'),
}),
});
const b = response.data;
expect(b.name).toMatch(/_legacy.txt$/);
const getResponse = await request({ url: b.url });
expect(getResponse.text).toEqual('legacy content');
});
});
});
63 changes: 63 additions & 0 deletions spec/Utils.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,69 @@ describe('Utils', () => {
});
});

describe('parseSizeToBytes', () => {
it('parses megabyte string', () => {
expect(Utils.parseSizeToBytes('20mb')).toBe(20 * 1024 * 1024);
});

it('parses Mb string (case-insensitive)', () => {
expect(Utils.parseSizeToBytes('20Mb')).toBe(20 * 1024 * 1024);
});

it('parses kilobyte string', () => {
expect(Utils.parseSizeToBytes('512kb')).toBe(512 * 1024);
});

it('parses gigabyte string', () => {
expect(Utils.parseSizeToBytes('1gb')).toBe(1 * 1024 * 1024 * 1024);
});

it('parses bytes suffix', () => {
expect(Utils.parseSizeToBytes('100b')).toBe(100);
});

it('parses plain number as bytes', () => {
expect(Utils.parseSizeToBytes(1048576)).toBe(1048576);
});

it('parses numeric string as bytes', () => {
expect(Utils.parseSizeToBytes('1048576')).toBe(1048576);
});

it('parses decimal value and floors result', () => {
expect(Utils.parseSizeToBytes('1.5mb')).toBe(Math.floor(1.5 * 1024 * 1024));
});

it('trims whitespace around value', () => {
expect(Utils.parseSizeToBytes(' 20mb ')).toBe(20 * 1024 * 1024);
});

it('allows whitespace between number and unit', () => {
expect(Utils.parseSizeToBytes('20 mb')).toBe(20 * 1024 * 1024);
});

it('parses zero', () => {
expect(Utils.parseSizeToBytes('0')).toBe(0);
expect(Utils.parseSizeToBytes(0)).toBe(0);
});

it('throws on invalid string', () => {
expect(() => Utils.parseSizeToBytes('abc')).toThrow();
});

it('throws on negative value', () => {
expect(() => Utils.parseSizeToBytes('-5mb')).toThrow();
});

it('throws on empty string', () => {
expect(() => Utils.parseSizeToBytes('')).toThrow();
});

it('throws on unsupported unit', () => {
expect(() => Utils.parseSizeToBytes('10tb')).toThrow();
});
});

describe('createSanitizedError', () => {
it('should return "Permission denied" when enableSanitizedErrorResponse is true', () => {
const config = { enableSanitizedErrorResponse: true };
Expand Down
12 changes: 11 additions & 1 deletion src/Adapters/Files/FilesAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export class FilesAdapter {
/** Responsible for storing the file in order to be retrieved later by its filename
*
* @param {string} filename - the filename to save
* @param {*} data - the buffer of data from the file
* @param {Buffer|import('stream').Readable} data - the file data as a Buffer, or a Readable stream if the adapter supports streaming (see supportsStreaming)
* @param {string} contentType - the supposed contentType
* @discussion the contentType can be undefined if the controller was not able to determine it
* @param {object} options - (Optional) options to be passed to file adapter (S3 File Adapter Only)
Expand All @@ -38,6 +38,16 @@ export class FilesAdapter {
*/
createFile(filename: string, data, contentType: string, options: Object): Promise {}

/** Whether this adapter supports receiving Readable streams in createFile().
* If false (default), streams are buffered to a Buffer before being passed.
* Override and return true to receive Readable streams directly.
*
* @return {boolean}
*/
get supportsStreaming() {
return false;
}

/** Responsible for deleting the specified file
*
* @param {string} filename - the filename to delete
Expand Down
Loading
Loading