From f88ecc6b69a36e41ea29a40c7b0f3ee7aaa0456a Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Thu, 21 Mar 2024 17:43:09 +0200 Subject: [PATCH] Update tests Signed-off-by: Levko Kravets --- .../unit/result/ArrowResultConverter.test.js | 40 ++++++++++++++++--- tests/unit/result/ArrowResultHandler.test.js | 38 +++++++++++------- .../result/CloudFetchResultHandler.test.js | 31 ++++++++------ 3 files changed, 78 insertions(+), 31 deletions(-) diff --git a/tests/unit/result/ArrowResultConverter.test.js b/tests/unit/result/ArrowResultConverter.test.js index 2cf4949f..f24c35a0 100644 --- a/tests/unit/result/ArrowResultConverter.test.js +++ b/tests/unit/result/ArrowResultConverter.test.js @@ -1,6 +1,7 @@ const { expect } = require('chai'); const fs = require('fs'); const path = require('path'); +const Int64 = require('node-int64'); const ArrowResultConverter = require('../../../dist/result/ArrowResultConverter').default; const ResultsProviderMock = require('./fixtures/ResultsProviderMock'); @@ -53,17 +54,30 @@ const arrowBatchAllNulls = [ fs.readFileSync(path.join(__dirname, 'fixtures/dataAllNulls.arrow')), ]; -describe('ArrowResultHandler', () => { +const emptyItem = { + batches: [], + rowCount: 0, +}; + +describe('ArrowResultConverter', () => { it('should convert data', async () => { const context = {}; - const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]); + const rowSetProvider = new ResultsProviderMock( + [ + { + batches: sampleArrowBatch, + rowCount: 1, + }, + ], + emptyItem, + ); const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([{ 1: 1 }]); }); it('should return empty array if no data to process', async () => { const context = {}; - const rowSetProvider = new ResultsProviderMock([], []); + const rowSetProvider = new ResultsProviderMock([], emptyItem); const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; @@ -71,7 +85,15 @@ describe('ArrowResultHandler', () => { it('should return empty array if no schema available', async () => { const context = {}; - const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]); + const rowSetProvider = new ResultsProviderMock( + [ + { + batches: sampleArrowBatch, + rowCount: 1, + }, + ], + emptyItem, + ); const result = new ArrowResultConverter(context, rowSetProvider, {}); expect(await result.hasMore()).to.be.false; expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); @@ -79,7 +101,15 @@ describe('ArrowResultHandler', () => { it('should detect nulls', async () => { const context = {}; - const rowSetProvider = new ResultsProviderMock([arrowBatchAllNulls]); + const rowSetProvider = new ResultsProviderMock( + [ + { + batches: arrowBatchAllNulls, + rowCount: 1, + }, + ], + emptyItem, + ); const result = new ArrowResultConverter(context, rowSetProvider, { schema: thriftSchemaAllNulls }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { diff --git a/tests/unit/result/ArrowResultHandler.test.js b/tests/unit/result/ArrowResultHandler.test.js index 74bf37c3..72547c53 100644 --- a/tests/unit/result/ArrowResultHandler.test.js +++ b/tests/unit/result/ArrowResultHandler.test.js @@ -1,4 +1,5 @@ const { expect } = require('chai'); +const Int64 = require('node-int64'); const LZ4 = require('lz4'); const ArrowResultHandler = require('../../../dist/result/ArrowResultHandler').default; const ResultsProviderMock = require('./fixtures/ResultsProviderMock'); @@ -21,16 +22,16 @@ const sampleArrowBatch = { 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, ]), - rowCount: 1, + rowCount: new Int64(1), }; const sampleRowSet1 = { - startRowOffset: 0, + startRowOffset: new Int64(0), arrowBatches: [sampleArrowBatch], }; const sampleRowSet1LZ4Compressed = { - startRowOffset: 0, + startRowOffset: new Int64(0), arrowBatches: sampleRowSet1.arrowBatches.map((item) => ({ ...item, batch: LZ4.encode(item.batch), @@ -38,21 +39,21 @@ const sampleRowSet1LZ4Compressed = { }; const sampleRowSet2 = { - startRowOffset: 0, + startRowOffset: new Int64(0), arrowBatches: undefined, }; const sampleRowSet3 = { - startRowOffset: 0, + startRowOffset: new Int64(0), arrowBatches: [], }; const sampleRowSet4 = { - startRowOffset: 0, + startRowOffset: new Int64(0), arrowBatches: [ { batch: undefined, - rowCount: 0, + rowCount: new Int64(0), }, ], }; @@ -63,7 +64,7 @@ describe('ArrowResultHandler', () => { const rowSetProvider = new ResultsProviderMock([sampleRowSet1]); const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); - const batches = await result.fetchNext({ limit: 10000 }); + const { batches } = await result.fetchNext({ limit: 10000 }); expect(await rowSetProvider.hasMore()).to.be.false; expect(await result.hasMore()).to.be.false; @@ -79,7 +80,7 @@ describe('ArrowResultHandler', () => { lz4Compressed: true, }); - const batches = await result.fetchNext({ limit: 10000 }); + const { batches } = await result.fetchNext({ limit: 10000 }); expect(await rowSetProvider.hasMore()).to.be.false; expect(await result.hasMore()).to.be.false; @@ -101,28 +102,34 @@ describe('ArrowResultHandler', () => { it('should return empty array if no data to process', async () => { const context = {}; + + const expectedResult = { + batches: [], + rowCount: 0, + }; + case1: { const rowSetProvider = new ResultsProviderMock(); const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); - expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); + expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult); expect(await result.hasMore()).to.be.false; } case2: { const rowSetProvider = new ResultsProviderMock([sampleRowSet2]); const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); - expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); + expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult); expect(await result.hasMore()).to.be.false; } case3: { const rowSetProvider = new ResultsProviderMock([sampleRowSet3]); const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); - expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); + expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult); expect(await result.hasMore()).to.be.false; } case4: { const rowSetProvider = new ResultsProviderMock([sampleRowSet4]); const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); - expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); + expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult); expect(await result.hasMore()).to.be.false; } }); @@ -158,7 +165,10 @@ describe('ArrowResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleRowSet2]); const result = new ArrowResultHandler(context, rowSetProvider, {}); - expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); + expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq({ + batches: [], + rowCount: 0, + }); expect(await result.hasMore()).to.be.false; }); }); diff --git a/tests/unit/result/CloudFetchResultHandler.test.js b/tests/unit/result/CloudFetchResultHandler.test.js index 3d9c67ea..bca59b0d 100644 --- a/tests/unit/result/CloudFetchResultHandler.test.js +++ b/tests/unit/result/CloudFetchResultHandler.test.js @@ -32,47 +32,54 @@ const sampleRowSet1 = { { fileLink: 'http://example.com/result/1', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, { fileLink: 'http://example.com/result/2', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, ], }; const sampleRowSet2 = { - startRowOffset: 0, + startRowOffset: new Int64(0), resultLinks: [ { fileLink: 'http://example.com/result/3', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, { fileLink: 'http://example.com/result/4', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, { fileLink: 'http://example.com/result/5', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, ], }; const sampleEmptyRowSet = { - startRowOffset: 0, + startRowOffset: new Int64(0), resultLinks: undefined, }; const sampleExpiredRowSet = { - startRowOffset: 0, + startRowOffset: new Int64(0), resultLinks: [ { fileLink: 'http://example.com/result/6', expiryTime: new Int64(defaultLinkExpiryTime), + rowCount: new Int64(1), }, { fileLink: 'http://example.com/result/7', expiryTime: new Int64(Date.now() - 24 * 60 * 60 * 1000), // 24hr in past + rowCount: new Int64(1), }, ], }; @@ -165,7 +172,7 @@ describe('CloudFetchResultHandler', () => { const clientConfig = context.getConfig(); const rowSet = { - startRowOffset: 0, + startRowOffset: new Int64(0), resultLinks: [...sampleRowSet1.resultLinks, ...sampleRowSet2.resultLinks], }; const expectedLinksCount = rowSet.resultLinks.length; // 5 @@ -187,8 +194,8 @@ describe('CloudFetchResultHandler', () => { initialFetch: { // `cloudFetchConcurrentDownloads` out of `expectedLinksCount` links should be scheduled immediately // first one should be `await`-ed and returned from `fetchNext` - const items = await result.fetchNext({ limit: 10000 }); - expect(items.length).to.be.gt(0); + const { batches } = await result.fetchNext({ limit: 10000 }); + expect(batches.length).to.be.gt(0); expect(await rowSetProvider.hasMore()).to.be.false; // it should use retry policy for all requests @@ -200,8 +207,8 @@ describe('CloudFetchResultHandler', () => { secondFetch: { // It should return previously fetched batch, and schedule one more - const items = await result.fetchNext({ limit: 10000 }); - expect(items.length).to.be.gt(0); + const { batches } = await result.fetchNext({ limit: 10000 }); + expect(batches.length).to.be.gt(0); expect(await rowSetProvider.hasMore()).to.be.false; // it should use retry policy for all requests @@ -215,8 +222,8 @@ describe('CloudFetchResultHandler', () => { thirdFetch: { // Now buffer should be empty, and it should fetch next batches - const items = await result.fetchNext({ limit: 10000 }); - expect(items.length).to.be.gt(0); + const { batches } = await result.fetchNext({ limit: 10000 }); + expect(batches.length).to.be.gt(0); expect(await rowSetProvider.hasMore()).to.be.false; // it should use retry policy for all requests @@ -249,13 +256,13 @@ describe('CloudFetchResultHandler', () => { expect(await rowSetProvider.hasMore()).to.be.true; - const items = await result.fetchNext({ limit: 10000 }); + const { batches } = await result.fetchNext({ limit: 10000 }); expect(await rowSetProvider.hasMore()).to.be.false; // it should use retry policy for all requests expect(context.connectionProvider.getRetryPolicy.called).to.be.true; expect(context.fetchHandler.called).to.be.true; - expect(items).to.deep.eq([expectedBatch]); + expect(batches).to.deep.eq([expectedBatch]); }); it('should handle HTTP errors', async () => {