Skip to content

Commit

Permalink
Merge branch 'master' into test_pr_767
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-pmotacki authored Mar 27, 2024
2 parents 2b1cfb6 + 42acd1d commit cf883fb
Show file tree
Hide file tree
Showing 30 changed files with 568 additions and 108 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/jira_issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
summary: '${{ github.event.issue.title }}'
description: |
${{ github.event.issue.body }} \\ \\ _Created from GitHub Action_ for ${{ github.event.issue.html_url }}
fields: '{ "customfield_11401": {"id": "14583"}, "assignee": {"id": "61027a219798100070592b20"} }'
fields: '{ "customfield_11401": {"id": "14723"}, "assignee": {"id": "712020:3c0352b5-63f7-4e26-9afe-38f6f9f0f4c5"}, "components":[{"id":"19290"}] }'

- name: Update GitHub Issue
uses: ./jira/gajira-issue-update
Expand Down
7 changes: 7 additions & 0 deletions ci/container/test_component.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@ npm install
PACKAGE_NAME=$(cd $WORKSPACE && ls snowflake-sdk*.tgz)
npm install $WORKSPACE/${PACKAGE_NAME}

# Since @azure lib has lost compatibility with node14
#some dependencies have to be replaced by an older version
nodeVersion=$(node -v)
if [[ "$nodeVersion" == 'v14.'* ]]; then
npm install @azure/[email protected]
fi

echo "[INFO] Setting test parameters"
if [[ "$LOCAL_USER_NAME" == "jenkins" ]]; then
echo "[INFO] Use the default test parameters.json"
Expand Down
9 changes: 9 additions & 0 deletions ci/test_windows.bat
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,15 @@ if %ERRORLEVEL% NEQ 0 (
echo [INFO] Installing Snowflake NodeJS Driver
copy %GITHUB_WORKSPACE%\artifacts\* .
for %%f in (snowflake-sdk*.tgz) do cmd /c npm install %%f

REM Since @azure lib has lost compatibility with node14
REM some dependencies have to be replaced by an older version
FOR /F "tokens=* USEBACKQ" %%F IN (`node -v`) DO (
SET nodeVersion=%%F
)
ECHO %nodeVersion%
if not x%nodeVersion:v14.=%==x%str1% cmd /c npm install @azure/core-lro@2.6.0

if %ERRORLEVEL% NEQ 0 (
echo [ERROR] failed to install the Snowflake NodeJS Driver
exit /b 1
Expand Down
26 changes: 19 additions & 7 deletions lib/authentication/auth_okta.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ function AuthOkta(connectionConfig, httpClient) {
ssoUrl = responseData['data']['ssoUrl'];
tokenUrl = responseData['data']['tokenUrl'];

validateURLs(authenticator, ssoUrl, tokenUrl);
this.validateURLs(authenticator, ssoUrl, tokenUrl);

const responseHtml = await getSAMLResponse( await createAccessToken(tokenUrl, username, password), ssoUrl);

Expand Down Expand Up @@ -164,13 +164,25 @@ function AuthOkta(connectionConfig, httpClient) {
*
* @returns {null}
*/
function validateURLs(authenticator, ssoUrl, tokenUrl) {
authenticator = authenticator.toLowerCase();
if (!(authenticator.startsWith(ssoUrl.substring(0, authenticator.length)) &&
authenticator.startsWith(tokenUrl.substring(0, authenticator.length)))) {
throw new Error('The prefix of the SSO/token URL and the specified authenticator do not match.');
this.validateURLs = function (authenticator, ssoUrl, tokenUrl) {
const compareUrlsByProtocolAndHost = (firstUrl, secondUrl) => firstUrl.protocol === secondUrl.protocol && firstUrl.host === secondUrl.host;

try {
const aUrl = new URL(authenticator);
const sUrl = new URL(ssoUrl);
const tUrl = new URL(tokenUrl);
if (!(compareUrlsByProtocolAndHost(aUrl, sUrl) && compareUrlsByProtocolAndHost(aUrl, tUrl))) {
throw new Error('The prefix of the SSO/token URL and the specified authenticator do not match.');
}
} catch (err) {
// we did not get a valid URL to test
if (err instanceof TypeError) {
throw new Error('Authenticator, SSO, or token URL is invalid.');
} else {
throw err;
}
}
}
};

/**
*
Expand Down
24 changes: 16 additions & 8 deletions lib/configuration/client_configuration.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
const os = require('os');
const path = require('path');
const fs = require('fs');
const { isString, exists, isFileNotWritableByGroupOrOthers } = require('../util');
const { isString, exists, isFileNotWritableByGroupOrOthers, getDriverDirectory } = require('../util');
const Logger = require('../logger');
const clientConfigFileName = 'sf_client_config.json';

Expand All @@ -20,12 +20,20 @@ const Levels = Object.freeze({
const defaultDirectories = getDefaultDirectories();

function getDefaultDirectories() {
const directories = [
{
dir: '.',
dirDescription: 'driver'
}
];
const directories = [];

const driverDirectory = getDriverDirectory();

if (driverDirectory) {
directories.push(
{
dir: driverDirectory,
dirDescription: 'driver'
}
);
} else {
Logger.getInstance().warn('Driver directory is not defined');
}

const homedir = os.homedir();

Expand Down Expand Up @@ -227,7 +235,7 @@ function ConfigurationUtil(fsPromisesModule, processModule) {
async function searchForConfigInDictionary(directory, directoryDescription) {
try {
const filePath = path.join(directory, clientConfigFileName);
return onlyIfFileExists(filePath);
return await onlyIfFileExists(filePath);
} catch (e) {
Logger.getInstance().error('Error while searching for the client config in %s directory: %s', directoryDescription, e);
return null;
Expand Down
8 changes: 8 additions & 0 deletions lib/connection/bind_uploader.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const Logger = require('../logger');
const fs = require('fs');

const Statement = require('./statement');
const { isString } = require('util');

const STAGE_NAME = 'SYSTEM$BIND';
const CREATE_STAGE_STMT = 'CREATE OR REPLACE TEMPORARY STAGE '
Expand Down Expand Up @@ -113,6 +114,13 @@ function BindUploader(options, services, connectionConfig, requestId) {
if (data == null || data.toString() === '') {
return '""';
}
if (!isString(data)) {
if (data instanceof Date) {
data = data.toJSON();
} else {
data = JSON.stringify(data);
}
}
if (data.toString().indexOf('"') >= 0
|| data.toString().indexOf(',') >= 0
|| data.toString().indexOf('\\') >= 0
Expand Down
10 changes: 10 additions & 0 deletions lib/connection/connection_config.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ const DEFAULT_PARAMS =
'includeRetryReason',
'disableQueryContextCache',
'retryTimeout',
'forceGCPUseDownscopedCredential'
];
const Logger = require('../logger');

Expand Down Expand Up @@ -481,6 +482,15 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) {
disableConsoleLogin = options.disableConsoleLogin;
}

if (Util.exists(options.forceGCPUseDownscopedCredential)) {
Errors.checkArgumentValid(Util.isBoolean(options.forceGCPUseDownscopedCredential),
ErrorCodes.ERR_CONN_CREATE_INVALID_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL);

process.env.SNOWFLAKE_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = options.forceGCPUseDownscopedCredential;
} else {
process.env.SNOWFLAKE_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = false;
}

/**
* Returns an object that contains information about the proxy hostname, port,
* etc. for when http requests are made.
Expand Down
5 changes: 3 additions & 2 deletions lib/connection/result/sf_timestamp.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,9 @@ function SfTimestamp(epochSeconds, nanoSeconds, scale, timezone, format) {
this.timezone = timezone;
this.format = format;

// compute the epoch milliseconds and create a moment object from them
let moment = Moment((epochSeconds * 1000) + (nanoSeconds / 1000000));
// create a moment object that includes the epoch seconds and the incremental nano seconds
// X corresponds to UNIX epoch, SSSSSSSSS corresponds to nano seconds
let moment = Moment(`${epochSeconds}.${nanoSeconds}`, 'X.SSSSSSSSS');

// set the moment's timezone
if (Util.isString(timezone)) {
Expand Down
2 changes: 1 addition & 1 deletion lib/connection/statement.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const Errors = require('../errors');
const ErrorCodes = Errors.codes;
const Logger = require('../logger');
const NativeTypes = require('./result/data_types').NativeTypes;
const FileTransferAgent = require('.././file_transfer_agent/file_transfer_agent');
const FileTransferAgent = require('../file_transfer_agent/file_transfer_agent');
const Bind = require('./bind_uploader');
const RowMode = require('./../constants/row_mode');

Expand Down
1 change: 1 addition & 0 deletions lib/constants/error_messages.js
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ exports[404044] = 'Invalid retryTimeout value. The specified value must be a num
exports[404045] = 'Invalid account. The specified value must be a valid subdomain string.';
exports[404046] = 'Invalid region. The specified value must be a valid subdomain string.';
exports[404047] = 'Invalid disableConsoleLogin. The specified value must be a boolean';
exports[404048] = 'Invalid disableGCPTokenUpload. The specified value must be a boolean';

// 405001
exports[405001] = 'Invalid callback. The specified value must be a function.';
Expand Down
5 changes: 4 additions & 1 deletion lib/core.js
Original file line number Diff line number Diff line change
Expand Up @@ -152,11 +152,14 @@ function Core(options) {
configure: function (options) {
const logLevel = extractLogLevel(options);
const logFilePath = options.logFilePath;
const additionalLogToConsole = options.additionalLogToConsole;
if (logLevel != null || logFilePath) {
Logger.getInstance().debug(`Configuring logger with level: ${logLevel}, filePath: ${logFilePath}, additionalLogToConsole: ${additionalLogToConsole}`);
Logger.getInstance().configure(
{
level: logLevel,
filePath: logFilePath
filePath: logFilePath,
additionalLogToConsole: additionalLogToConsole
});
}

Expand Down
1 change: 1 addition & 0 deletions lib/errors.js
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ codes.ERR_CONN_CREATE_INVALID_RETRY_TIMEOUT = 404044;
codes.ERR_CONN_CREATE_INVALID_ACCOUNT_REGEX = 404045;
codes.ERR_CONN_CREATE_INVALID_REGION_REGEX = 404046;
codes.ERR_CONN_CREATE_INVALID_DISABLE_CONSOLE_LOGIN = 404047;
codes.ERR_CONN_CREATE_INVALID_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = 404048;

// 405001
codes.ERR_CONN_CONNECT_INVALID_CALLBACK = 405001;
Expand Down
11 changes: 6 additions & 5 deletions lib/file_transfer_agent/file_transfer_agent.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

const binascii = require('binascii');
const crypto = require('crypto');
const glob = require('glob');
const fs = require('fs');
const os = require('os');
const mime = require('mime-types');
Expand All @@ -13,14 +12,15 @@ const path = require('path');
const statement = require('../connection/statement');
const fileCompressionType = require('./file_compression_type');
const expandTilde = require('expand-tilde');
const SnowflakeFileUtil = new (require('./file_util').FileUtil)();
const SnowflakeRemoteStorageUtil = require('./remote_storage_util').RemoteStorageUtil;
const LocalUtil = require('./local_util').LocalUtil;
const SnowflakeFileEncryptionMaterial = require('./remote_storage_util').SnowflakeFileEncryptionMaterial;
const SnowflakeS3Util = require('./s3_util');
const SnowflakeLocalUtil = new (require('./local_util').LocalUtil)();

const { FileUtil, getMatchingFilePaths } = require('./file_util');
const resultStatus = require('./file_util').resultStatus;

const SnowflakeFileUtil = new FileUtil();
const SnowflakeLocalUtil = new LocalUtil();
const S3_FS = 'S3';
const AZURE_FS = 'AZURE';
const GCS_FS = 'GCS';
Expand Down Expand Up @@ -380,6 +380,7 @@ function FileTransferAgent(context) {
meta['errorDetails'] = err.toString();
meta['errorDetails'] += ` file=${meta['srcFileName']}, real file=${meta['realSrcFilePath']}`;
} finally {

// Remove all files inside tmp folder if compression is set
if (meta['requireCompress']) {
const matchingFileNames = glob.sync(path.join(meta['tmpDir'], meta['srcFileName'] + '*'));

Check failure on line 386 in lib/file_transfer_agent/file_transfer_agent.js

View workflow job for this annotation

GitHub Actions / Run lint

'glob' is not defined
Expand Down Expand Up @@ -622,7 +623,7 @@ function FileTransferAgent(context) {
// If file name has a wildcard
if (fileName.includes('*')) {
// Get all file names that matches the wildcard
const matchingFileNames = glob.sync(path.join(root, fileName));
const matchingFileNames = getMatchingFilePaths(root, fileName);

for (const matchingFileName of matchingFileNames) {
initEncryptionMaterial();
Expand Down
13 changes: 12 additions & 1 deletion lib/file_transfer_agent/file_util.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ const fs = require('fs');
const path = require('path');
const struct = require('python-struct');
const zlib = require('zlib');
const os = require('os');
const glob = require('glob');

const resultStatus = {
ERROR: 'ERROR',
Expand Down Expand Up @@ -132,5 +134,14 @@ function FileUtil() {
};
};
}

exports.FileUtil = FileUtil;

function getMatchingFilePaths(dir, fileName) {
const pathWithWildcard = path.join(dir, fileName);
const pathWithWildcardDependsOnPlatform = os.platform() === 'win32'
? pathWithWildcard.replace(/\\/g, '/')
: pathWithWildcard;
return glob.sync(pathWithWildcardDependsOnPlatform);
}

exports.getMatchingFilePaths = getMatchingFilePaths;
9 changes: 5 additions & 4 deletions lib/file_transfer_agent/gcs_util.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

const EncryptionMetadata = require('./encrypt_util').EncryptionMetadata;
const FileHeader = require('./file_util').FileHeader;
const { shouldPerformGCPBucket } = require('../util');

const GCS_METADATA_PREFIX = 'x-goog-meta-';
const SFC_DIGEST = 'sfc-digest';
Expand Down Expand Up @@ -137,7 +138,7 @@ function GCSUtil(httpclient, filestream) {
let matDescKey;

try {
if (accessToken) {
if (shouldPerformGCPBucket(accessToken)) {
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']);

const metadata = await meta['client'].gcsClient
Expand Down Expand Up @@ -177,7 +178,7 @@ function GCSUtil(httpclient, filestream) {
encryptionMetadata
);
} catch (err) {
const errCode = err['code'] ? err['code'] : err.response.status;
const errCode = !isNaN(err['code']) && !isNaN(parseInt(err['code'])) ? err['code'] : err.response.status;

if ([403, 408, 429, 500, 503].includes(errCode)) {
meta['lastError'] = err;
Expand Down Expand Up @@ -277,7 +278,7 @@ function GCSUtil(httpclient, filestream) {
}

try {
if (accessToken) {
if (shouldPerformGCPBucket(accessToken)) {
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']);

await meta['client'].gcsClient
Expand Down Expand Up @@ -350,7 +351,7 @@ function GCSUtil(httpclient, filestream) {
let size;

try {
if (accessToken) {
if (shouldPerformGCPBucket(accessToken)) {
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']);

await meta['client'].gcsClient
Expand Down
4 changes: 3 additions & 1 deletion lib/logger/core.js
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ exports.createLogger = function (options, logMessage, reconfigureOperation) {
let localMessageMaxLength;
let localLevel;
let localFilePath;
let localAdditionalLogToConsole;

// if an options argument is specified
if (Util.exists(options)) {
Expand All @@ -143,6 +144,7 @@ exports.createLogger = function (options, logMessage, reconfigureOperation) {
localMessageMaxLength = options.messageMaxLength;
localLevel = options.level;
localFilePath = options.filePath;
localAdditionalLogToConsole = options.additionalLogToConsole;
}

// if an includeTimestamp options is specified, convert it to a boolean
Expand Down Expand Up @@ -184,7 +186,7 @@ exports.createLogger = function (options, logMessage, reconfigureOperation) {
}

if (Util.isFunction(reconfigureOperation)) {
reconfigureOperation(localFilePath);
reconfigureOperation(localFilePath, localAdditionalLogToConsole);
}
},

Expand Down
13 changes: 7 additions & 6 deletions lib/logger/easy_logging_starter.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ exports.init = async function (configFilePathFromConnectionString) {
logger.info('Initializing Easy Logging with logPath=%s and logLevel=%s from file: %s', logPath, config.loggingConfig.logLevel, config.configPath);
logger.configure({
level: logLevel,
filePath: path.join(logPath, 'snowflake.log')
filePath: path.join(logPath, 'snowflake.log'),
additionalLogToConsole: false
});
logger.easyLoggingConfigureCounter = (logger.easyLoggingConfigureCounter ?? 0) + 1;
initTrialParameters = {
Expand Down Expand Up @@ -96,16 +97,16 @@ async function getLogPath(config) {
}
const pathWithNodeJsSubdirectory = path.join(logPath, 'nodejs');
await fsPromises.access(pathWithNodeJsSubdirectory, fs.constants.F_OK)
.then(() => {
if (!isFileModeCorrect(pathWithNodeJsSubdirectory, 0o700, fsPromises)) {
.then(async () => {
if (!(await isFileModeCorrect(pathWithNodeJsSubdirectory, 0o700, fsPromises))) {
Logger.getInstance().warn('Log directory: %s could potentially be accessed by others', pathWithNodeJsSubdirectory);
}
})
.catch(() => {
.catch(async () => {
try {
return fsPromises.mkdir(pathWithNodeJsSubdirectory, { recursive: true, mode: 0o700 });
await fsPromises.mkdir(pathWithNodeJsSubdirectory, { recursive: true, mode: 0o700 });
} catch (err) {
throw new EasyLoggingError('Failed to create the directory for logs: %s', pathWithNodeJsSubdirectory);
throw new EasyLoggingError(`Failed to create the directory for logs: ${pathWithNodeJsSubdirectory}`);
}
});
return pathWithNodeJsSubdirectory;
Expand Down
Loading

0 comments on commit cf883fb

Please sign in to comment.