diff --git a/test/integration/testLargeResultSet.js b/test/integration/testLargeResultSet.js index f17612ef3..843095fcc 100644 --- a/test/integration/testLargeResultSet.js +++ b/test/integration/testLargeResultSet.js @@ -4,10 +4,10 @@ const assert = require('assert'); const testUtil = require('./testUtil'); -const sourceRowCount = 10000; - describe('Large result Set Tests', function () { + const sourceRowCount = 10000; + let connection; const selectAllFromOrders = `select randstr(1000,random()) from table(generator(rowcount=>${sourceRowCount}))`; @@ -106,3 +106,41 @@ describe('Large result Set Tests', function () }); }); }); + +describe('SNOW-743920: Large result set with ~35 chunks', function () { + let connection; + const tableName = 'test_table'; + const sourceRowCount = 251002; + const createTable = `create or replace table ${tableName} (data string)`; + const populateData = `insert into ${tableName} select randstr(20, random()) from table (generator(rowcount =>${sourceRowCount}))`; + const selectAllFromOrders = `select * from ${tableName}`; + + before(async () => { + connection = testUtil.createConnection(); + await testUtil.connectAsync(connection); + await testUtil.executeCmdAsync(connection, 'alter session set ROWS_PER_RESULTSET = 1000000'); + await testUtil.executeCmdAsync(connection, 'alter session set USE_CACHED_RESULT = false;'); + await testUtil.executeCmdAsync(connection, createTable); + await testUtil.executeCmdAsync(connection, populateData); + }); + + after(async () => { + await testUtil.dropTablesIgnoringErrorsAsync(connection, [tableName]); + await testUtil.destroyConnectionAsync(connection); + }); + + it('fetch result set with many chunks', function (done) { + connection.execute({ + sqlText: selectAllFromOrders, + complete: function (err, _, rows) { + try { + testUtil.checkError(err); + assert.strictEqual(rows.length, sourceRowCount); + done(); + } catch (e) { + done(e); + } + } + }); + }); +});