Skip to content

Commit

Permalink
Update existing tests
Browse files Browse the repository at this point in the history
Signed-off-by: Levko Kravets <[email protected]>
  • Loading branch information
kravets-levko committed Nov 2, 2023
1 parent ec96ec6 commit 44168c4
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 35 deletions.
13 changes: 9 additions & 4 deletions tests/e2e/arrow.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const config = require('./utils/config');
const logger = require('./utils/logger')(config.logger);
const { DBSQLClient } = require('../..');
const ArrowResultHandler = require('../../dist/result/ArrowResultHandler').default;
const ResultSlicer = require('../../dist/result/ResultSlicer').default;
const globalConfig = require('../../dist/globalConfig').default;

const fixtures = require('../fixtures/compatibility');
Expand Down Expand Up @@ -77,7 +78,8 @@ describe('Arrow support', () => {
expect(result).to.deep.equal(expectedColumn);

const resultHandler = await operation.getResultHandler();
expect(resultHandler).to.be.not.instanceof(ArrowResultHandler);
expect(resultHandler).to.be.instanceof(ResultSlicer);
expect(resultHandler.source).to.be.not.instanceof(ArrowResultHandler);

await operation.close();
}),
Expand All @@ -94,7 +96,8 @@ describe('Arrow support', () => {
expect(fixArrowResult(result)).to.deep.equal(expectedArrow);

const resultHandler = await operation.getResultHandler();
expect(resultHandler).to.be.instanceof(ArrowResultHandler);
expect(resultHandler).to.be.instanceof(ResultSlicer);
expect(resultHandler.source).to.be.instanceof(ArrowResultHandler);

await operation.close();
}),
Expand All @@ -111,7 +114,8 @@ describe('Arrow support', () => {
expect(fixArrowResult(result)).to.deep.equal(expectedArrowNativeTypes);

const resultHandler = await operation.getResultHandler();
expect(resultHandler).to.be.instanceof(ArrowResultHandler);
expect(resultHandler).to.be.instanceof(ResultSlicer);
expect(resultHandler.source).to.be.instanceof(ArrowResultHandler);

await operation.close();
}),
Expand All @@ -131,7 +135,8 @@ describe('Arrow support', () => {

// We use some internals here to check that server returned response with multiple batches
const resultHandler = await operation.getResultHandler();
expect(resultHandler).to.be.instanceof(ArrowResultHandler);
expect(resultHandler).to.be.instanceof(ResultSlicer);
expect(resultHandler.source).to.be.instanceof(ArrowResultHandler);

sinon.spy(operation._data, 'fetchNext');

Expand Down
4 changes: 3 additions & 1 deletion tests/e2e/batched_fetch.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ describe('Data fetching', () => {
try {
// set `maxRows` to null to disable direct results so all the data are fetched through `driver.fetchResults`
const operation = await session.executeStatement(query, { maxRows: null });
let chunkedOp = await operation.fetchChunk({ maxRows: 10 }).catch((error) => logger(error));
let chunkedOp = await operation
.fetchChunk({ maxRows: 10, disableBuffering: true })
.catch((error) => logger(error));
expect(chunkedOp.length).to.be.equal(10);
// we explicitly requested only one chunk
expect(session.context.driver.fetchResults.callCount).to.equal(1);
Expand Down
22 changes: 13 additions & 9 deletions tests/e2e/cloudfetch.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const config = require('./utils/config');
const logger = require('./utils/logger')(config.logger);
const { DBSQLClient } = require('../..');
const CloudFetchResultHandler = require('../../dist/result/CloudFetchResultHandler').default;
const ResultSlicer = require('../../dist/result/ResultSlicer').default;
const globalConfig = require('../../dist/globalConfig').default;

const openSession = async () => {
Expand Down Expand Up @@ -57,33 +58,36 @@ describe('CloudFetch', () => {

// Check if we're actually getting data via CloudFetch
const resultHandler = await operation.getResultHandler();
expect(resultHandler).to.be.instanceOf(CloudFetchResultHandler);
expect(resultHandler).to.be.instanceof(ResultSlicer);
expect(resultHandler.source).to.be.instanceOf(CloudFetchResultHandler);

const cfResultHandler = resultHandler.source;

// Fetch first chunk and check if result handler behaves properly.
// With the count of rows we queried, there should be at least one row set,
// containing 8 result links. After fetching the first chunk,
// result handler should download 5 of them and schedule the rest
expect(await resultHandler.hasMore()).to.be.false;
expect(resultHandler.pendingLinks.length).to.be.equal(0);
expect(resultHandler.downloadedBatches.length).to.be.equal(0);
expect(await cfResultHandler.hasMore()).to.be.false;
expect(cfResultHandler.pendingLinks.length).to.be.equal(0);
expect(cfResultHandler.downloadedBatches.length).to.be.equal(0);

sinon.spy(operation._data, 'fetchNext');

const chunk = await operation.fetchChunk({ maxRows: 100000 });
const chunk = await operation.fetchChunk({ maxRows: 100000, disableBuffering: true });
// Count links returned from server
const resultSet = await operation._data.fetchNext.firstCall.returnValue;
const resultLinksCount = resultSet?.resultLinks?.length ?? 0;

expect(await resultHandler.hasMore()).to.be.true;
expect(await cfResultHandler.hasMore()).to.be.true;
// expected batches minus first 5 already fetched
expect(resultHandler.pendingLinks.length).to.be.equal(
expect(cfResultHandler.pendingLinks.length).to.be.equal(
resultLinksCount - globalConfig.cloudFetchConcurrentDownloads,
);
expect(resultHandler.downloadedBatches.length).to.be.equal(globalConfig.cloudFetchConcurrentDownloads - 1);
expect(cfResultHandler.downloadedBatches.length).to.be.equal(globalConfig.cloudFetchConcurrentDownloads - 1);

let fetchedRowCount = chunk.length;
while (await operation.hasMoreRows()) {
const chunk = await operation.fetchChunk({ maxRows: 100000 });
const chunk = await operation.fetchChunk({ maxRows: 100000, disableBuffering: true });
fetchedRowCount += chunk.length;
}

Expand Down
46 changes: 25 additions & 21 deletions tests/unit/DBSQLOperation.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ const HiveDriverError = require('../../dist/errors/HiveDriverError').default;
const JsonResultHandler = require('../../dist/result/JsonResultHandler').default;
const ArrowResultHandler = require('../../dist/result/ArrowResultHandler').default;
const CloudFetchResultHandler = require('../../dist/result/CloudFetchResultHandler').default;
const ResultSlicer = require('../../dist/result/ResultSlicer').default;

class OperationHandleMock {
constructor(hasResultSet = true) {
Expand Down Expand Up @@ -407,7 +408,7 @@ describe('DBSQLOperation', () => {
expect(operation.cancelled).to.be.true;

await expectFailure(() => operation.fetchAll());
await expectFailure(() => operation.fetchChunk());
await expectFailure(() => operation.fetchChunk({ disableBuffering: true }));
await expectFailure(() => operation.status());
await expectFailure(() => operation.finished());
await expectFailure(() => operation.getSchema());
Expand Down Expand Up @@ -533,7 +534,7 @@ describe('DBSQLOperation', () => {
expect(operation.closed).to.be.true;

await expectFailure(() => operation.fetchAll());
await expectFailure(() => operation.fetchChunk());
await expectFailure(() => operation.fetchChunk({ disableBuffering: true }));
await expectFailure(() => operation.status());
await expectFailure(() => operation.finished());
await expectFailure(() => operation.getSchema());
Expand Down Expand Up @@ -885,7 +886,8 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });
const resultHandler = await operation.getResultHandler();
expect(context.driver.getResultSetMetadata.called).to.be.true;
expect(resultHandler).to.be.instanceOf(JsonResultHandler);
expect(resultHandler).to.be.instanceOf(ResultSlicer);
expect(resultHandler.source).to.be.instanceOf(JsonResultHandler);
}

arrowHandler: {
Expand All @@ -895,7 +897,8 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });
const resultHandler = await operation.getResultHandler();
expect(context.driver.getResultSetMetadata.called).to.be.true;
expect(resultHandler).to.be.instanceOf(ArrowResultHandler);
expect(resultHandler).to.be.instanceOf(ResultSlicer);
expect(resultHandler.source).to.be.instanceOf(ArrowResultHandler);
}

cloudFetchHandler: {
Expand All @@ -905,7 +908,8 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });
const resultHandler = await operation.getResultHandler();
expect(context.driver.getResultSetMetadata.called).to.be.true;
expect(resultHandler).to.be.instanceOf(CloudFetchResultHandler);
expect(resultHandler).to.be.instanceOf(ResultSlicer);
expect(resultHandler.source).to.be.instanceOf(CloudFetchResultHandler);
}
});
});
Expand All @@ -921,7 +925,7 @@ describe('DBSQLOperation', () => {
sinon.spy(context.driver, 'fetchResults');
const operation = new DBSQLOperation({ handle, context });

const results = await operation.fetchChunk();
const results = await operation.fetchChunk({ disableBuffering: true });

expect(results).to.deep.equal([]);
expect(context.driver.getResultSetMetadata.called).to.be.false;
Expand All @@ -948,7 +952,7 @@ describe('DBSQLOperation', () => {

const operation = new DBSQLOperation({ handle, context });

const results = await operation.fetchChunk();
const results = await operation.fetchChunk({ disableBuffering: true });

expect(context.driver.getOperationStatus.called).to.be.true;
expect(results).to.deep.equal([]);
Expand All @@ -974,7 +978,7 @@ describe('DBSQLOperation', () => {
context.driver.fetchResultsResp.results.columns = [];

const operation = new DBSQLOperation({ handle, context });
await operation.fetchChunk({ progress: true });
await operation.fetchChunk({ progress: true, disableBuffering: true });

expect(context.driver.getOperationStatus.called).to.be.true;
const request = context.driver.getOperationStatus.getCall(0).args[0];
Expand Down Expand Up @@ -1005,7 +1009,7 @@ describe('DBSQLOperation', () => {

const callback = sinon.stub();

await operation.fetchChunk({ callback });
await operation.fetchChunk({ callback, disableBuffering: true });

expect(context.driver.getOperationStatus.called).to.be.true;
expect(callback.callCount).to.be.equal(attemptsUntilFinished);
Expand All @@ -1023,7 +1027,7 @@ describe('DBSQLOperation', () => {

const operation = new DBSQLOperation({ handle, context });

const results = await operation.fetchChunk();
const results = await operation.fetchChunk({ disableBuffering: true });

expect(results).to.deep.equal([{ test: 1 }, { test: 2 }, { test: 3 }]);
expect(context.driver.getResultSetMetadata.called).to.be.true;
Expand Down Expand Up @@ -1060,7 +1064,7 @@ describe('DBSQLOperation', () => {
},
});

const results = await operation.fetchChunk();
const results = await operation.fetchChunk({ disableBuffering: true });

expect(results).to.deep.equal([{ test: 5 }, { test: 6 }]);
expect(context.driver.getResultSetMetadata.called).to.be.true;
Expand Down Expand Up @@ -1098,13 +1102,13 @@ describe('DBSQLOperation', () => {
},
});

const results1 = await operation.fetchChunk();
const results1 = await operation.fetchChunk({ disableBuffering: true });

expect(results1).to.deep.equal([{ test: 5 }, { test: 6 }]);
expect(context.driver.getResultSetMetadata.callCount).to.be.eq(1);
expect(context.driver.fetchResults.callCount).to.be.eq(0);

const results2 = await operation.fetchChunk();
const results2 = await operation.fetchChunk({ disableBuffering: true });

expect(results2).to.deep.equal([{ test: 1 }, { test: 2 }, { test: 3 }]);
expect(context.driver.getResultSetMetadata.callCount).to.be.eq(1);
Expand All @@ -1125,7 +1129,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

try {
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect.fail('It should throw a HiveDriverError');
} catch (e) {
if (e instanceof AssertionError) {
Expand Down Expand Up @@ -1180,7 +1184,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
});

Expand All @@ -1196,7 +1200,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
await operation.close();
expect(await operation.hasMoreRows()).to.be.false;
Expand All @@ -1214,7 +1218,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
await operation.cancel();
expect(await operation.hasMoreRows()).to.be.false;
Expand All @@ -1232,7 +1236,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
});

Expand All @@ -1248,7 +1252,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
});

Expand All @@ -1264,7 +1268,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.true;
});

Expand All @@ -1281,7 +1285,7 @@ describe('DBSQLOperation', () => {
const operation = new DBSQLOperation({ handle, context });

expect(await operation.hasMoreRows()).to.be.false;
await operation.fetchChunk();
await operation.fetchChunk({ disableBuffering: true });
expect(await operation.hasMoreRows()).to.be.false;
});
});
Expand Down

0 comments on commit 44168c4

Please sign in to comment.