Skip to content

Commit bf7a132

Browse files
authored
docs(NODE-4775): improve documentation on gridfs start and end options (#3460)
1 parent 528449d commit bf7a132

File tree

2 files changed

+33
-43
lines changed

2 files changed

+33
-43
lines changed

src/gridfs/download.ts

+7-2
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,14 @@ import type { GridFSChunk } from './upload';
1919
export interface GridFSBucketReadStreamOptions {
2020
sort?: Sort;
2121
skip?: number;
22-
/** 0-based offset in bytes to start streaming from */
22+
/**
23+
* 0-indexed non-negative byte offset from the beginning of the file
24+
*/
2325
start?: number;
24-
/** 0-based offset in bytes to stop streaming before */
26+
/**
27+
* 0-indexed non-negative byte offset to the end of the file contents
28+
* to be returned by the stream. `end` is non-inclusive
29+
*/
2530
end?: number;
2631
}
2732

test/integration/gridfs/gridfs_stream.test.js

+26-41
Original file line numberDiff line numberDiff line change
@@ -1002,57 +1002,42 @@ describe('GridFS Stream', function () {
10021002
});
10031003
});
10041004

1005-
/**
1006-
* Provide start and end parameters for file download to skip ahead x bytes and limit the total amount of bytes read to n
1007-
*
1008-
* @example-class GridFSBucket
1009-
* @example-method openDownloadStream
1010-
*/
1011-
it('NODE-829 start/end options for openDownloadStream where start-end is < size of chunk', {
1005+
it('should return only end - start bytes when the end is within a chunk', {
10121006
metadata: { requires: { topology: ['single'] } },
1013-
10141007
test(done) {
1015-
const configuration = this.configuration;
1016-
const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 });
1017-
client.connect(function (err, client) {
1018-
const db = client.db(configuration.db);
1019-
const bucket = new GridFSBucket(db, {
1020-
bucketName: 'gridfsdownload',
1021-
chunkSizeBytes: 20
1022-
});
1008+
// Provide start and end parameters for file download to skip
1009+
// ahead x bytes and limit the total amount of bytes read to n
1010+
const db = client.db();
10231011

1024-
const readStream = fs.createReadStream('./LICENSE.md');
1025-
const uploadStream = bucket.openUploadStream('teststart.dat');
1012+
const start = 1;
1013+
const end = 6;
10261014

1027-
uploadStream.once('finish', function () {
1028-
const downloadStream = bucket
1029-
.openDownloadStreamByName('teststart.dat', { start: 1 })
1030-
.end(6);
1015+
const bucket = new GridFSBucket(db, {
1016+
bucketName: 'gridfsdownload',
1017+
chunkSizeBytes: 20
1018+
});
10311019

1032-
downloadStream.on('error', function (error) {
1033-
expect(error).to.not.exist;
1034-
});
1020+
const readStream = fs.createReadStream('./LICENSE.md');
1021+
const uploadStream = bucket.openUploadStream('teststart.dat');
10351022

1036-
let gotData = 0;
1037-
let str = '';
1038-
downloadStream.on('data', function (data) {
1039-
++gotData;
1040-
str += data.toString('utf8');
1041-
});
1023+
uploadStream.once('finish', function () {
1024+
const downloadStream = bucket.openDownloadStreamByName('teststart.dat', { start }).end(end);
10421025

1043-
downloadStream.on('end', function () {
1044-
// Depending on different versions of node, we may get
1045-
// different amounts of 'data' events. node 0.10 gives 2,
1046-
// node >= 0.12 gives 3. Either is correct, but we just
1047-
// care that we got between 1 and 3, and got the right result
1048-
expect(gotData >= 1 && gotData <= 3).to.equal(true);
1049-
expect(str).to.equal('pache');
1050-
client.close(done);
1051-
});
1026+
downloadStream.on('error', done);
1027+
1028+
let str = '';
1029+
downloadStream.on('data', function (data) {
1030+
str += data.toString('utf8');
10521031
});
10531032

1054-
readStream.pipe(uploadStream);
1033+
downloadStream.on('end', function () {
1034+
expect(str).to.equal('pache');
1035+
expect(str).to.have.lengthOf(end - start);
1036+
client.close(done);
1037+
});
10551038
});
1039+
1040+
readStream.pipe(uploadStream);
10561041
}
10571042
});
10581043

0 commit comments

Comments
 (0)