Skip to content

Commit

Permalink
Update tests
Browse files Browse the repository at this point in the history
Signed-off-by: Levko Kravets <[email protected]>
  • Loading branch information
kravets-levko committed Mar 21, 2024
1 parent 85343e9 commit f88ecc6
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 31 deletions.
40 changes: 35 additions & 5 deletions tests/unit/result/ArrowResultConverter.test.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const { expect } = require('chai');
const fs = require('fs');
const path = require('path');
const Int64 = require('node-int64');
const ArrowResultConverter = require('../../../dist/result/ArrowResultConverter').default;
const ResultsProviderMock = require('./fixtures/ResultsProviderMock');

Expand Down Expand Up @@ -53,33 +54,62 @@ const arrowBatchAllNulls = [
fs.readFileSync(path.join(__dirname, 'fixtures/dataAllNulls.arrow')),
];

describe('ArrowResultHandler', () => {
const emptyItem = {
batches: [],
rowCount: 0,
};

describe('ArrowResultConverter', () => {
it('should convert data', async () => {
const context = {};
const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]);
const rowSetProvider = new ResultsProviderMock(
[
{
batches: sampleArrowBatch,
rowCount: 1,
},
],
emptyItem,
);
const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([{ 1: 1 }]);
});

it('should return empty array if no data to process', async () => {
const context = {};
const rowSetProvider = new ResultsProviderMock([], []);
const rowSetProvider = new ResultsProviderMock([], emptyItem);
const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.hasMore()).to.be.false;
});

it('should return empty array if no schema available', async () => {
const context = {};
const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]);
const rowSetProvider = new ResultsProviderMock(
[
{
batches: sampleArrowBatch,
rowCount: 1,
},
],
emptyItem,
);
const result = new ArrowResultConverter(context, rowSetProvider, {});
expect(await result.hasMore()).to.be.false;
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
});

it('should detect nulls', async () => {
const context = {};
const rowSetProvider = new ResultsProviderMock([arrowBatchAllNulls]);
const rowSetProvider = new ResultsProviderMock(
[
{
batches: arrowBatchAllNulls,
rowCount: 1,
},
],
emptyItem,
);
const result = new ArrowResultConverter(context, rowSetProvider, { schema: thriftSchemaAllNulls });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([
{
Expand Down
38 changes: 24 additions & 14 deletions tests/unit/result/ArrowResultHandler.test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const { expect } = require('chai');
const Int64 = require('node-int64');
const LZ4 = require('lz4');
const ArrowResultHandler = require('../../../dist/result/ArrowResultHandler').default;
const ResultsProviderMock = require('./fixtures/ResultsProviderMock');
Expand All @@ -21,38 +22,38 @@ const sampleArrowBatch = {
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
]),
rowCount: 1,
rowCount: new Int64(1),
};

const sampleRowSet1 = {
startRowOffset: 0,
startRowOffset: new Int64(0),
arrowBatches: [sampleArrowBatch],
};

const sampleRowSet1LZ4Compressed = {
startRowOffset: 0,
startRowOffset: new Int64(0),
arrowBatches: sampleRowSet1.arrowBatches.map((item) => ({
...item,
batch: LZ4.encode(item.batch),
})),
};

const sampleRowSet2 = {
startRowOffset: 0,
startRowOffset: new Int64(0),
arrowBatches: undefined,
};

const sampleRowSet3 = {
startRowOffset: 0,
startRowOffset: new Int64(0),
arrowBatches: [],
};

const sampleRowSet4 = {
startRowOffset: 0,
startRowOffset: new Int64(0),
arrowBatches: [
{
batch: undefined,
rowCount: 0,
rowCount: new Int64(0),
},
],
};
Expand All @@ -63,7 +64,7 @@ describe('ArrowResultHandler', () => {
const rowSetProvider = new ResultsProviderMock([sampleRowSet1]);
const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema });

const batches = await result.fetchNext({ limit: 10000 });
const { batches } = await result.fetchNext({ limit: 10000 });
expect(await rowSetProvider.hasMore()).to.be.false;
expect(await result.hasMore()).to.be.false;

Expand All @@ -79,7 +80,7 @@ describe('ArrowResultHandler', () => {
lz4Compressed: true,
});

const batches = await result.fetchNext({ limit: 10000 });
const { batches } = await result.fetchNext({ limit: 10000 });
expect(await rowSetProvider.hasMore()).to.be.false;
expect(await result.hasMore()).to.be.false;

Expand All @@ -101,28 +102,34 @@ describe('ArrowResultHandler', () => {

it('should return empty array if no data to process', async () => {
const context = {};

const expectedResult = {
batches: [],
rowCount: 0,
};

case1: {
const rowSetProvider = new ResultsProviderMock();
const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult);
expect(await result.hasMore()).to.be.false;
}
case2: {
const rowSetProvider = new ResultsProviderMock([sampleRowSet2]);
const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult);
expect(await result.hasMore()).to.be.false;
}
case3: {
const rowSetProvider = new ResultsProviderMock([sampleRowSet3]);
const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult);
expect(await result.hasMore()).to.be.false;
}
case4: {
const rowSetProvider = new ResultsProviderMock([sampleRowSet4]);
const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema });
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq(expectedResult);
expect(await result.hasMore()).to.be.false;
}
});
Expand Down Expand Up @@ -158,7 +165,10 @@ describe('ArrowResultHandler', () => {
const context = {};
const rowSetProvider = new ResultsProviderMock([sampleRowSet2]);
const result = new ArrowResultHandler(context, rowSetProvider, {});
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]);
expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq({
batches: [],
rowCount: 0,
});
expect(await result.hasMore()).to.be.false;
});
});
31 changes: 19 additions & 12 deletions tests/unit/result/CloudFetchResultHandler.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,47 +32,54 @@ const sampleRowSet1 = {
{
fileLink: 'http://example.com/result/1',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
{
fileLink: 'http://example.com/result/2',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
],
};

const sampleRowSet2 = {
startRowOffset: 0,
startRowOffset: new Int64(0),
resultLinks: [
{
fileLink: 'http://example.com/result/3',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
{
fileLink: 'http://example.com/result/4',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
{
fileLink: 'http://example.com/result/5',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
],
};

const sampleEmptyRowSet = {
startRowOffset: 0,
startRowOffset: new Int64(0),
resultLinks: undefined,
};

const sampleExpiredRowSet = {
startRowOffset: 0,
startRowOffset: new Int64(0),
resultLinks: [
{
fileLink: 'http://example.com/result/6',
expiryTime: new Int64(defaultLinkExpiryTime),
rowCount: new Int64(1),
},
{
fileLink: 'http://example.com/result/7',
expiryTime: new Int64(Date.now() - 24 * 60 * 60 * 1000), // 24hr in past
rowCount: new Int64(1),
},
],
};
Expand Down Expand Up @@ -165,7 +172,7 @@ describe('CloudFetchResultHandler', () => {
const clientConfig = context.getConfig();

const rowSet = {
startRowOffset: 0,
startRowOffset: new Int64(0),
resultLinks: [...sampleRowSet1.resultLinks, ...sampleRowSet2.resultLinks],
};
const expectedLinksCount = rowSet.resultLinks.length; // 5
Expand All @@ -187,8 +194,8 @@ describe('CloudFetchResultHandler', () => {
initialFetch: {
// `cloudFetchConcurrentDownloads` out of `expectedLinksCount` links should be scheduled immediately
// first one should be `await`-ed and returned from `fetchNext`
const items = await result.fetchNext({ limit: 10000 });
expect(items.length).to.be.gt(0);
const { batches } = await result.fetchNext({ limit: 10000 });
expect(batches.length).to.be.gt(0);
expect(await rowSetProvider.hasMore()).to.be.false;

// it should use retry policy for all requests
Expand All @@ -200,8 +207,8 @@ describe('CloudFetchResultHandler', () => {

secondFetch: {
// It should return previously fetched batch, and schedule one more
const items = await result.fetchNext({ limit: 10000 });
expect(items.length).to.be.gt(0);
const { batches } = await result.fetchNext({ limit: 10000 });
expect(batches.length).to.be.gt(0);
expect(await rowSetProvider.hasMore()).to.be.false;

// it should use retry policy for all requests
Expand All @@ -215,8 +222,8 @@ describe('CloudFetchResultHandler', () => {

thirdFetch: {
// Now buffer should be empty, and it should fetch next batches
const items = await result.fetchNext({ limit: 10000 });
expect(items.length).to.be.gt(0);
const { batches } = await result.fetchNext({ limit: 10000 });
expect(batches.length).to.be.gt(0);
expect(await rowSetProvider.hasMore()).to.be.false;

// it should use retry policy for all requests
Expand Down Expand Up @@ -249,13 +256,13 @@ describe('CloudFetchResultHandler', () => {

expect(await rowSetProvider.hasMore()).to.be.true;

const items = await result.fetchNext({ limit: 10000 });
const { batches } = await result.fetchNext({ limit: 10000 });
expect(await rowSetProvider.hasMore()).to.be.false;

// it should use retry policy for all requests
expect(context.connectionProvider.getRetryPolicy.called).to.be.true;
expect(context.fetchHandler.called).to.be.true;
expect(items).to.deep.eq([expectedBatch]);
expect(batches).to.deep.eq([expectedBatch]);
});

it('should handle HTTP errors', async () => {
Expand Down

0 comments on commit f88ecc6

Please sign in to comment.