snowflake-sdk
Advanced tools
Comparing version 1.9.3 to 1.10.0
@@ -5,3 +5,2 @@ /* | ||
const Util = require('../util'); | ||
const HttpsAgent = require('https').Agent; | ||
@@ -19,3 +18,3 @@ const SocketUtil = require('./socket_util'); | ||
function HttpsOcspAgent(options) { | ||
const agent = HttpsAgent.apply(this, arguments); | ||
const agent = HttpsAgent.apply(this, [options]); | ||
agent.createConnection = function (port, host, options) { | ||
@@ -22,0 +21,0 @@ // make sure the 'options' variables references the argument that actually |
@@ -54,2 +54,3 @@ /* | ||
* @param {String} host | ||
* @param {Object} agent | ||
* @param {Object} mock | ||
@@ -56,0 +57,0 @@ * |
@@ -25,7 +25,5 @@ /* | ||
this.authenticate = async function (authenticator, serviceName, account, username) { | ||
return; | ||
}; | ||
this.authenticate = async function () {}; | ||
} | ||
module.exports = AuthDefault; |
@@ -25,7 +25,5 @@ /* | ||
this.authenticate = async function (authenticator, serviceName, account, username) { | ||
return; | ||
}; | ||
this.authenticate = async function () {}; | ||
} | ||
module.exports = AuthOauth; |
@@ -7,2 +7,3 @@ /* | ||
const rest = require('../global_config').rest; | ||
const Logger = require('../logger'); | ||
@@ -12,7 +13,3 @@ /** | ||
* | ||
* @param {String} password | ||
* @param {String} region | ||
* @param {String} account | ||
* @param {String} clientType | ||
* @param {String} clientVersion | ||
* @param {Object} connectionConfig | ||
* @param {HttpClient} httpClient | ||
@@ -23,9 +20,14 @@ * | ||
*/ | ||
function AuthOkta(password, region, account, clientType, clientVersion, httpClient) { | ||
function AuthOkta(connectionConfig, httpClient) { | ||
const password = connectionConfig.password; | ||
const region = connectionConfig.region; | ||
const account = connectionConfig.account; | ||
const clientAppId = connectionConfig.getClientType(); | ||
const clientAppVersion = connectionConfig.getClientVersion(); | ||
const host = util.constructHostname(region, account); | ||
const port = rest.HTTPS_PORT; | ||
const protocol = rest.HTTPS_PROTOCOL; | ||
const clientAppId = clientType; | ||
const clientAppVersion = clientVersion; | ||
let user; | ||
let ssoUrl; | ||
let tokenUrl; | ||
let samlResponse; | ||
@@ -55,45 +57,69 @@ | ||
this.authenticate = async function (authenticator, serviceName, account, username) { | ||
let ssoUrl; | ||
let tokenUrl; | ||
await step1(authenticator, serviceName, account, username).then((response) => { | ||
const responseData = response['data']; | ||
const success = responseData['success']; | ||
const errorCode = responseData['code']; | ||
const errorMessage = responseData['message']; | ||
const response = await getAuthURLs(authenticator, serviceName, account, username); | ||
const responseData = response['data']; | ||
const success = responseData['success']; | ||
const errorCode = responseData['code']; | ||
const errorMessage = responseData['message']; | ||
user = username; | ||
if (typeof success === 'undefined' || errorCode === 'undefined' || errorMessage === 'undefined') { | ||
throw new Error('Unable to use provided Okta address as an authenticator. Is the authenticator URL correct?'); | ||
} | ||
if (typeof success === 'undefined' || errorCode === 'undefined' || errorMessage === 'undefined') { | ||
throw new Error('Unable to use provided Okta address as an authenticator. Is the authenticator URL correct?'); | ||
} | ||
if (success !== true) { | ||
throw new Error(`Unable to use provided Okta address as an authenticator. Error code: ${errorCode}, error message: ${errorMessage}`); | ||
} | ||
if (success !== true) { | ||
throw new Error(`Unable to use provided Okta address as an authenticator. Error code: ${errorCode}, error message: ${errorMessage}`); | ||
} | ||
ssoUrl = responseData['data']['ssoUrl']; | ||
tokenUrl = responseData['data']['tokenUrl']; | ||
}); | ||
ssoUrl = responseData['data']['ssoUrl']; | ||
tokenUrl = responseData['data']['tokenUrl']; | ||
step2(authenticator, ssoUrl, tokenUrl); | ||
validateURLs(authenticator, ssoUrl, tokenUrl); | ||
const responseHtml = await getSAMLResponse( await createAccessToken(tokenUrl, username, password), ssoUrl); | ||
let oneTimeToken; | ||
await step3(tokenUrl, username, password).then((response) => { | ||
const data = response['data']; | ||
validateSAML(responseHtml); | ||
}; | ||
if (data['sessionToken']) { | ||
oneTimeToken = data['sessionToken']; | ||
} else { | ||
oneTimeToken = data['cookieToken']; | ||
} | ||
}); | ||
this.reauthenticate = async function (body, retryOption) { | ||
const maxRetryTimeout = connectionConfig.getRetryTimeout(); | ||
const maxRetryCount = connectionConfig.getRetrySfMaxLoginRetries(); | ||
const remainingTimeout = (maxRetryTimeout - retryOption.totalElapsedTime) * 1000; | ||
const startTime = Date.now(); | ||
const authRetryOption = { | ||
maxRetryCount, | ||
numRetries: retryOption.numRetries, | ||
startTime, | ||
remainingTimeout, | ||
maxRetryTimeout, | ||
}; | ||
let responseHtml; | ||
await step4(oneTimeToken, ssoUrl).then((response) => { | ||
responseHtml = response['data']; | ||
}); | ||
step5(responseHtml); | ||
}; | ||
while (util.shouldRetryOktaAuth(authRetryOption)) { | ||
try { | ||
responseHtml = await getSAMLResponse( await createAccessToken(tokenUrl, user, password), ssoUrl); | ||
break; | ||
} catch (err) { | ||
Logger.getInstance().debug('getSAMLResponse: refresh token for re-authentication'); | ||
authRetryOption.numRetries++; | ||
} | ||
} | ||
if (remainingTimeout !== 0 && startTime + remainingTimeout < Date.now()) { | ||
Logger.getInstance().warn(`getSAMLResponse: Fail to get SAML response, timeout reached: ${remainingTimeout} miliseconds`); | ||
throw new Error('Reached out to the Login Timeout'); | ||
} | ||
if (maxRetryCount < authRetryOption.numRetries){ | ||
Logger.getInstance().warn(`getSAMLResponse: Fail to get SAML response, max retry reached: ${maxRetryCount} time`); | ||
throw new Error('Reached out to the max retry count'); | ||
} | ||
retryOption.totalElapsedTime += ((Date.now() - startTime) / 1000); | ||
retryOption.numRetries = authRetryOption.numRetries; | ||
validateSAML(responseHtml); | ||
this.updateBody(body); | ||
}; | ||
/** | ||
* Obtain the SSO URL and token URL. | ||
* | ||
@@ -107,3 +133,3 @@ * @param {String} authenticator | ||
*/ | ||
function step1(authenticator, serviceName, account, username) { | ||
async function getAuthURLs(authenticator, serviceName, account, username) { | ||
// Create URL to send POST request to | ||
@@ -133,13 +159,8 @@ const url = protocol + '://' + host + '/session/authenticator-request'; | ||
// POST request to get SSO URL and token URL | ||
return httpClient | ||
.post(url, body, { | ||
headers: header | ||
}) | ||
.catch(requestErr => { | ||
throw requestErr; | ||
}); | ||
return await httpClient.post(url, body, { | ||
headers: header | ||
}); | ||
} | ||
/** | ||
* Check the URLs prefix are equal to the authenticator. | ||
* | ||
@@ -152,3 +173,3 @@ * @param {String} authenticator | ||
*/ | ||
function step2(authenticator, ssoUrl, tokenUrl) { | ||
function validateURLs(authenticator, ssoUrl, tokenUrl) { | ||
authenticator = authenticator.toLowerCase(); | ||
@@ -162,3 +183,2 @@ if (!(authenticator.startsWith(ssoUrl.substring(0, authenticator.length)) && | ||
/** | ||
* Retrieve the access token through the token url. | ||
* | ||
@@ -171,3 +191,3 @@ * @param {String} tokenUrl | ||
*/ | ||
function step3(tokenUrl, username, password) { | ||
async function createAccessToken(tokenUrl, username, password) { | ||
// JSON body to send with POST request | ||
@@ -180,11 +200,15 @@ const body = { | ||
// Query IDP token url to authenticate and retrieve access token | ||
return httpClient | ||
.post(tokenUrl, body) | ||
.catch(requestErr => { | ||
throw requestErr; | ||
}); | ||
const response = await httpClient.post(tokenUrl, body); | ||
const data = response['data']; | ||
let oneTimeToken; | ||
if (data['sessionToken']) { | ||
oneTimeToken = data['sessionToken']; | ||
} else { | ||
oneTimeToken = data['cookieToken']; | ||
} | ||
return oneTimeToken; | ||
} | ||
/** | ||
* Retrieve the SAML response through the SSO URL. | ||
* | ||
@@ -196,18 +220,15 @@ * @param {String} oneTimeToken | ||
*/ | ||
function step4(oneTimeToken, ssoUrl) { | ||
async function getSAMLResponse(oneTimeToken, ssoUrl) { | ||
// Query IDP URL to get SAML response | ||
return httpClient | ||
.get(ssoUrl, { | ||
params: { | ||
'RelayState': '/some/deep/link', | ||
'onetimetoken': oneTimeToken, | ||
} } | ||
) | ||
.catch(requestErr => { | ||
throw requestErr; | ||
}); | ||
const response = await httpClient.get(ssoUrl, { | ||
params: { | ||
'RelayState': '/some/deep/link', | ||
'onetimetoken': oneTimeToken, | ||
} } | ||
); | ||
return response['data']; | ||
} | ||
/** | ||
* Validate the postback URL inside the SAML response. | ||
* | ||
@@ -218,3 +239,3 @@ * @param {String} responseHtml | ||
*/ | ||
function step5(responseHtml) { | ||
function validateSAML(responseHtml) { | ||
const postBackUrl = getPostBackUrlFromHtml(responseHtml); | ||
@@ -230,3 +251,2 @@ const fullUrl = util.format('%s://%s:%s', protocol, host, port); | ||
} | ||
samlResponse = responseHtml; | ||
@@ -233,0 +253,0 @@ } |
@@ -18,3 +18,3 @@ /* | ||
* @param {Object} connectionConfig | ||
* @param {Object} ssoUrlProvider | ||
* @param {Object} httpClient | ||
* @param {module} webbrowser | ||
@@ -21,0 +21,0 @@ * |
@@ -11,2 +11,4 @@ /* | ||
let authenticator; | ||
const authenticationTypes = | ||
@@ -72,27 +74,24 @@ { | ||
exports.getAuthenticator = function getAuthenticator(connectionConfig, httpClient) { | ||
const auth = connectionConfig.getAuthenticator(); | ||
if (auth === authenticationTypes.DEFAULT_AUTHENTICATOR) { | ||
return new AuthDefault(connectionConfig.password); | ||
} else if (auth === authenticationTypes.EXTERNAL_BROWSER_AUTHENTICATOR) { | ||
return new AuthWeb(connectionConfig, httpClient); | ||
const authType = connectionConfig.getAuthenticator(); | ||
let auth; | ||
if (authType === authenticationTypes.DEFAULT_AUTHENTICATOR) { | ||
auth = new AuthDefault(connectionConfig.password); | ||
} else if (authType === authenticationTypes.EXTERNAL_BROWSER_AUTHENTICATOR) { | ||
auth = new AuthWeb(connectionConfig, httpClient); | ||
} | ||
if (auth === authenticationTypes.KEY_PAIR_AUTHENTICATOR) { | ||
return new AuthKeypair(connectionConfig.getPrivateKey(), | ||
if (authType === authenticationTypes.KEY_PAIR_AUTHENTICATOR) { | ||
auth = new AuthKeypair(connectionConfig.getPrivateKey(), | ||
connectionConfig.getPrivateKeyPath(), | ||
connectionConfig.getPrivateKeyPass()); | ||
} else if (auth === authenticationTypes.OAUTH_AUTHENTICATOR) { | ||
return new AuthOauth(connectionConfig.getToken()); | ||
} else if (this.isOktaAuth(auth)) { | ||
return new AuthOkta(connectionConfig.password, | ||
connectionConfig.region, | ||
connectionConfig.account, | ||
connectionConfig.getClientType(), | ||
connectionConfig.getClientVersion(), | ||
httpClient | ||
); | ||
} else if (authType === authenticationTypes.OAUTH_AUTHENTICATOR) { | ||
auth = new AuthOauth(connectionConfig.getToken()); | ||
} else if (this.isOktaAuth(authType)) { | ||
auth = new AuthOkta(connectionConfig, httpClient); | ||
} else { | ||
// Authenticator specified does not exist | ||
return new AuthDefault(connectionConfig.password); | ||
auth = new AuthDefault(connectionConfig.password); | ||
} | ||
authenticator = auth; | ||
return auth; | ||
}; | ||
@@ -109,1 +108,5 @@ | ||
}; | ||
exports.getCurrentAuth = function () { | ||
return authenticator; | ||
}; |
@@ -7,3 +7,3 @@ /* | ||
const fs = require('fs'); | ||
const { isString } = require('../util'); | ||
const { isString, exists, isFileNotWritableByGroupOrOthers, getDriverDirectory } = require('../util'); | ||
const Logger = require('../logger'); | ||
@@ -21,6 +21,42 @@ const clientConfigFileName = 'sf_client_config.json'; | ||
const defaultDirectories = getDefaultDirectories(); | ||
function getDefaultDirectories() { | ||
const directories = []; | ||
const driverDirectory = getDriverDirectory(); | ||
if (driverDirectory) { | ||
directories.push( | ||
{ | ||
dir: driverDirectory, | ||
dirDescription: 'driver' | ||
} | ||
); | ||
} else { | ||
Logger.getInstance().warn('Driver directory is not defined'); | ||
} | ||
const homedir = os.homedir(); | ||
if (exists(homedir)) { | ||
directories.push( | ||
{ | ||
dir: homedir, | ||
dirDescription: 'home' | ||
} | ||
); | ||
} else { | ||
Logger.getInstance().warn('Home directory of the user is not present'); | ||
} | ||
return directories; | ||
} | ||
const knownCommonEntries = ['log_level', 'log_path']; | ||
const allLevels = Object.values(Levels); | ||
class ClientConfig { | ||
constructor(loggingConfig) { | ||
constructor(filePath, loggingConfig) { | ||
this.configPath = filePath; | ||
this.loggingConfig = loggingConfig; | ||
@@ -79,7 +115,16 @@ } | ||
const path = await findConfig(configFilePath); | ||
if (path == null) { | ||
if (!exists(path) || path === '') { | ||
return null; | ||
} | ||
const isFileOk = await isFileNotWritableByGroupOrOthers(path, fsPromises).catch(err => { | ||
throw new ConfigurationError('Finding client configuration failed', err); | ||
}); | ||
if (!isFileOk) { | ||
throw new ConfigurationError(`Configuration file: ${path} can be modified by group or others`, 'IncorrectPerms'); | ||
} | ||
const configFileContents = await readFileConfig(path); | ||
return configFileContents == null ? null : parseConfigFile(configFileContents); | ||
return configFileContents == null ? null : parseConfigFile(path, configFileContents); | ||
}; | ||
@@ -97,7 +142,9 @@ | ||
function parseConfigFile(configurationJson) { | ||
function parseConfigFile(path, configurationJson) { | ||
try { | ||
const parsedConfiguration = JSON.parse(configurationJson); | ||
checkUnknownEntries(parsedConfiguration); | ||
validate(parsedConfiguration); | ||
return new ClientConfig( | ||
path, | ||
new ClientLoggingConfig( | ||
@@ -113,2 +160,10 @@ getLogLevel(parsedConfiguration), | ||
function checkUnknownEntries(config) { | ||
for (const key in config.common) { | ||
if (!knownCommonEntries.includes(key.toLowerCase())) { | ||
Logger.getInstance().warn('Unknown configuration entry: %s with value: %s', key, config.common[key]); | ||
} | ||
} | ||
} | ||
function validate(configuration) { | ||
@@ -148,8 +203,19 @@ validateLogLevel(configuration); | ||
function findConfig(filePathFromConnectionString) { | ||
return verifyNotEmpty(filePathFromConnectionString) | ||
.then((filePath) => filePath ?? getFilePathFromEnvironmentVariable()) | ||
.then((filePath) => filePath ?? searchForConfigInDictionary(() => '.', 'driver')) | ||
.then((filePath) => filePath ?? searchForConfigInDictionary(() => os.homedir(), 'home')) | ||
.then((filePath) => filePath ?? searchForConfigInDictionary(() => os.tmpdir(), 'temp')); | ||
async function findConfig(filePathFromConnectionString) { | ||
if (exists(filePathFromConnectionString)) { | ||
Logger.getInstance().info('Using client configuration path from a connection string: %s', filePathFromConnectionString); | ||
return filePathFromConnectionString; | ||
} | ||
const filePathFromEnvVariable = await getFilePathFromEnvironmentVariable(); | ||
if (exists(filePathFromEnvVariable)) { | ||
Logger.getInstance().info('Using client configuration path from an environment variable: %s', filePathFromEnvVariable); | ||
return filePathFromEnvVariable; | ||
} | ||
const fileFromDefDirs = await searchForConfigInDefaultDirectories(); | ||
if (exists(fileFromDefDirs)) { | ||
Logger.getInstance().info('Using client configuration path from %s directory: %s', fileFromDefDirs.dirDescription, fileFromDefDirs.configPath); | ||
return fileFromDefDirs.configPath; | ||
} | ||
Logger.getInstance().info('No client config file found in default directories'); | ||
return null; | ||
} | ||
@@ -165,7 +231,16 @@ | ||
async function searchForConfigInDictionary(directoryProvider, directoryDescription) { | ||
async function searchForConfigInDefaultDirectories() { | ||
for (const directory of defaultDirectories) { | ||
const configPath = await searchForConfigInDictionary(directory.dir, directory.dirDescription); | ||
if (exists(configPath)) { | ||
return { configPath: configPath, dirDescription: directory.dirDescription }; | ||
} | ||
} | ||
return null; | ||
} | ||
async function searchForConfigInDictionary(directory, directoryDescription) { | ||
try { | ||
const directory = directoryProvider(); | ||
const filePath = path.join(directory, clientConfigFileName); | ||
return onlyIfFileExists(filePath); | ||
return await onlyIfFileExists(filePath); | ||
} catch (e) { | ||
@@ -172,0 +247,0 @@ Logger.getInstance().error('Error while searching for the client config in %s directory: %s', directoryDescription, e); |
@@ -6,7 +6,5 @@ /* | ||
const Readable = require('stream').Readable; | ||
const fs = require('fs'); | ||
const Statement = require('./statement'); | ||
const fileCompressionType = require('.././file_transfer_agent/file_compression_type'); | ||
@@ -69,3 +67,3 @@ const STAGE_NAME = 'SYSTEM$BIND'; | ||
sqlText: putStmt, fileStream: fileData, | ||
complete: function (err, stmt, rows) { | ||
complete: function (err, stmt) { | ||
if (err) { | ||
@@ -72,0 +70,0 @@ Logger.getInstance().debug('err ' + err); |
@@ -55,2 +55,3 @@ /* | ||
'retryTimeout', | ||
'forceGCPUseDownscopedCredential' | ||
]; | ||
@@ -485,2 +486,11 @@ const Logger = require('../logger'); | ||
if (Util.exists(options.forceGCPUseDownscopedCredential)) { | ||
Errors.checkArgumentValid(Util.isBoolean(options.forceGCPUseDownscopedCredential), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL); | ||
process.env.SNOWFLAKE_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = options.forceGCPUseDownscopedCredential; | ||
} else { | ||
process.env.SNOWFLAKE_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = false; | ||
} | ||
/** | ||
@@ -487,0 +497,0 @@ * Returns an object that contains information about the proxy hostname, port, |
@@ -7,3 +7,2 @@ /* | ||
const QueryString = require('querystring'); | ||
const GSErrors = require('../constants/gs_errors'); | ||
const QueryStatus = require('../constants/query_status'); | ||
@@ -20,2 +19,3 @@ | ||
const { isOktaAuth } = require('../authentication/authentication'); | ||
const { init: initEasyLogging } = require('../logger/easy_logging_starter'); | ||
@@ -239,6 +239,15 @@ const PRIVATELINK_URL_SUFFIX = '.privatelink.snowflakecomputing.com'; | ||
services.sf.connect({ | ||
callback: connectCallback(self, callback), | ||
json: body | ||
}); | ||
initEasyLogging(connectionConfig.clientConfigFile) | ||
.then(() => { | ||
try { | ||
services.sf.connect({ | ||
callback: connectCallback(self, callback), | ||
json: body | ||
}); | ||
} catch (e) { | ||
// we don't expect an error here since callback method should be called | ||
Logger.getInstance().error('Unexpected error from calling callback function', e); | ||
} | ||
}) | ||
.catch(() => callback(Errors.createClientError(ErrorCodes.ERR_CONN_CONNECT_INVALID_CLIENT_CONFIG, true))); | ||
return this; | ||
@@ -274,2 +283,8 @@ }; | ||
try { | ||
await initEasyLogging(connectionConfig.clientConfigFile); | ||
} catch (err) { | ||
throw Errors.createClientError(ErrorCodes.ERR_CONN_CONNECT_INVALID_CLIENT_CONFIG, true); | ||
} | ||
try { | ||
await auth.authenticate(connectionConfig.getAuthenticator(), | ||
@@ -430,3 +445,3 @@ connectionConfig.getServiceName(), | ||
this.getQueryStatusThrowIfError = async function (queryId) { | ||
const status = this.getQueryStatus(queryId); | ||
const status = await this.getQueryStatus(queryId); | ||
@@ -433,0 +448,0 @@ let message, code, sqlState = null; |
@@ -236,8 +236,6 @@ /* | ||
* @param {String} rawColumnValue | ||
* @param {Object} column | ||
* @param {Object} context | ||
* | ||
* @returns {Object} | ||
*/ | ||
function convertRawNumber(rawColumnValue, column, context) { | ||
function convertRawNumber(rawColumnValue) { | ||
return { | ||
@@ -255,7 +253,5 @@ raw: rawColumnValue, | ||
* @param rawColumnValue | ||
* @param column | ||
* @param context | ||
* @returns {{processed: bigInt.BigInteger, raw: *}} | ||
*/ | ||
function convertRawBigInt(rawColumnValue, column, context) { | ||
function convertRawBigInt(rawColumnValue) { | ||
return { | ||
@@ -272,8 +268,6 @@ raw: rawColumnValue, | ||
* @param {String} rawColumnValue | ||
* @param {Object} column | ||
* @param {Object} context | ||
* | ||
* @returns {Boolean} | ||
*/ | ||
function convertRawBoolean(rawColumnValue, column, context) { | ||
function convertRawBoolean(rawColumnValue) { | ||
let ret; | ||
@@ -486,8 +480,6 @@ | ||
* @param {String} rawColumnValue | ||
* @param {Object} column | ||
* @param {Object} context | ||
* | ||
* @returns {Object | Array} | ||
*/ | ||
function convertRawVariant(rawColumnValue, column, context) { | ||
function convertRawVariant(rawColumnValue) { | ||
// if the input is a non-empty string, convert it to a json object | ||
@@ -494,0 +486,0 @@ if (Util.string.isNotNullOrEmpty(rawColumnValue)) { |
@@ -83,3 +83,3 @@ /* | ||
// Fire off requests to load all the chunks in the buffer that aren't already loading | ||
let chunk, index, length; | ||
let chunk, index; | ||
for (index = currChunk; index < chunks.length && index <= (currChunk + prefetchSize); index++) { | ||
@@ -86,0 +86,0 @@ chunk = chunks[index]; |
@@ -18,3 +18,3 @@ /* | ||
const NativeTypes = require('./result/data_types').NativeTypes; | ||
const FileTransferAgent = require('.././file_transfer_agent/file_transfer_agent'); | ||
const FileTransferAgent = require('../file_transfer_agent/file_transfer_agent'); | ||
const Bind = require('./bind_uploader'); | ||
@@ -62,3 +62,3 @@ const RowMode = require('./../constants/row_mode'); | ||
// call super | ||
BaseStatement.apply(this, arguments); | ||
BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); | ||
} | ||
@@ -620,6 +620,4 @@ | ||
* their own implementation. | ||
* | ||
* @param {Object} body | ||
*/ | ||
context.onStatementRequestSucc = function (body) { | ||
context.onStatementRequestSucc = function () { | ||
}; | ||
@@ -692,3 +690,3 @@ } | ||
// call super | ||
BaseStatement.apply(this, arguments); | ||
BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); | ||
@@ -795,3 +793,3 @@ // add the result request headers to the context | ||
// call super | ||
BaseStatement.apply(this, arguments); | ||
BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); | ||
@@ -874,6 +872,4 @@ // add the result request headers to the context | ||
* their own implementation. | ||
* | ||
* @param {Object} body | ||
*/ | ||
context.onStatementRequestSucc = function (body) { | ||
context.onStatementRequestSucc = function () { | ||
//do nothing | ||
@@ -942,3 +938,3 @@ }; | ||
// call super | ||
BaseStatement.apply(this, arguments); | ||
BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); | ||
@@ -1324,3 +1320,3 @@ // add the result request headers to the context | ||
return new Promise((resolve, reject) => { | ||
return new Promise((resolve) => { | ||
resolve(sf.postAsync(options)); | ||
@@ -1596,3 +1592,3 @@ }); | ||
function hasNextResult(statement, context) { | ||
return function (options) { | ||
return function () { | ||
return (context.multiResultIds != null && context.multiCurId + 1 < context.multiResultIds.length); | ||
@@ -1603,3 +1599,3 @@ }; | ||
function createNextReuslt(statement, context) { | ||
return function (options) { | ||
return function () { | ||
if (hasNextResult(statement, context)) { | ||
@@ -1606,0 +1602,0 @@ context.multiCurId++; |
@@ -75,2 +75,3 @@ /* | ||
exports[404047] = 'Invalid disableConsoleLogin. The specified value must be a boolean'; | ||
exports[404048] = 'Invalid disableGCPTokenUpload. The specified value must be a boolean'; | ||
@@ -77,0 +78,0 @@ // 405001 |
@@ -155,7 +155,10 @@ /* | ||
const logFilePath = options.logFilePath; | ||
const additionalLogToConsole = options.additionalLogToConsole; | ||
if (logLevel != null || logFilePath) { | ||
Logger.getInstance().debug(`Configuring logger with level: ${logLevel}, filePath: ${logFilePath}, additionalLogToConsole: ${additionalLogToConsole}`); | ||
Logger.getInstance().configure( | ||
{ | ||
level: logLevel, | ||
filePath: logFilePath | ||
filePath: logFilePath, | ||
additionalLogToConsole: additionalLogToConsole | ||
}); | ||
@@ -271,3 +274,3 @@ } | ||
return new Promise((resolve) => { | ||
connection.destroy(function (err, conn) { | ||
connection.destroy(function (err) { | ||
if (err) { | ||
@@ -274,0 +277,0 @@ Logger.getInstance().error('Unable to disconnect: ' + err.message); |
@@ -80,2 +80,3 @@ /* | ||
codes.ERR_CONN_CREATE_INVALID_DISABLE_CONSOLE_LOGIN = 404047; | ||
codes.ERR_CONN_CREATE_INVALID_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL = 404048; | ||
@@ -579,3 +580,3 @@ // 405001 | ||
if (!options.synchronous) { | ||
error.externalize = function (errorCode, errorMessageArgs, sqlState) { | ||
error.externalize = function () { | ||
const propNames = | ||
@@ -582,0 +583,0 @@ [ |
@@ -161,7 +161,6 @@ ///* | ||
* @param {Object} encryptionMetadata | ||
* @param {Number} maxConcurrency | ||
* | ||
* @returns {null} | ||
*/ | ||
this.uploadFileStream = async function (fileStream, meta, encryptionMetadata, maxConcurrency) { | ||
this.uploadFileStream = async function (fileStream, meta, encryptionMetadata) { | ||
const azureMetadata = { | ||
@@ -226,12 +225,10 @@ 'sfcdigest': meta['SHA256_DIGEST'] | ||
/** | ||
* Download the file blob then write the file. | ||
* | ||
* @param {String} dataFile | ||
* @param {Object} meta | ||
* @param {Object} encryptionMetadata | ||
* @param {Number} maxConcurrency | ||
* | ||
* @returns {null} | ||
*/ | ||
this.nativeDownloadFile = async function (meta, fullDstPath, maxConcurrency) { | ||
* Download the file blob then write the file. | ||
* | ||
* @param {Object} meta | ||
* @param fullDstPath | ||
* | ||
* @returns {null} | ||
*/ | ||
this.nativeDownloadFile = async function (meta, fullDstPath) { | ||
const stageInfo = meta['stageInfo']; | ||
@@ -238,0 +235,0 @@ const client = this.createClient(stageInfo); |
@@ -132,4 +132,3 @@ /* | ||
*/ | ||
this.encryptFileStream = async function (encryptionMaterial, fileStream, | ||
tmpDir = null, chunkSize = blockSize * 4 * 1024) { | ||
this.encryptFileStream = async function (encryptionMaterial, fileStream) { | ||
// Get decoded key from base64 encoded value | ||
@@ -202,3 +201,3 @@ const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
await new Promise(function (resolve, reject) { | ||
await new Promise(function (resolve) { | ||
const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); | ||
@@ -296,3 +295,3 @@ const outfile = fs.createWriteStream(tempOutputFileName); | ||
await new Promise(function (resolve, reject) { | ||
await new Promise(function (resolve) { | ||
const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); | ||
@@ -299,0 +298,0 @@ const outfile = fs.createWriteStream(tempOutputFileName); |
@@ -7,3 +7,2 @@ /* | ||
const crypto = require('crypto'); | ||
const glob = require('glob'); | ||
const fs = require('fs'); | ||
@@ -17,10 +16,11 @@ const os = require('os'); | ||
const expandTilde = require('expand-tilde'); | ||
const SnowflakeFileUtil = new (require('./file_util').FileUtil)(); | ||
const SnowflakeRemoteStorageUtil = new (require('./remote_storage_util').RemoteStorageUtil)(); | ||
const SnowflakeRemoteStorageUtil = require('./remote_storage_util').RemoteStorageUtil; | ||
const LocalUtil = require('./local_util').LocalUtil; | ||
const SnowflakeFileEncryptionMaterial = require('./remote_storage_util').SnowflakeFileEncryptionMaterial; | ||
const SnowflakeS3Util = new (require('./s3_util'))(); | ||
const SnowflakeLocalUtil = new (require('./local_util').LocalUtil)(); | ||
const SnowflakeS3Util = require('./s3_util'); | ||
const { FileUtil, getMatchingFilePaths } = require('./file_util'); | ||
const resultStatus = require('./file_util').resultStatus; | ||
const SnowflakeFileUtil = new FileUtil(); | ||
const SnowflakeLocalUtil = new LocalUtil(); | ||
const S3_FS = 'S3'; | ||
@@ -66,2 +66,3 @@ const AZURE_FS = 'AZURE'; | ||
function FileTransferAgent(context) { | ||
const remoteStorageUtil = new SnowflakeRemoteStorageUtil(context.connectionConfig); | ||
const response = context.fileMetadata; | ||
@@ -384,3 +385,3 @@ const command = context.sqlText; | ||
// Remove all files inside tmp folder | ||
const matchingFileNames = glob.sync(path.join(meta['tmpDir'], meta['srcFileName'] + '*')); | ||
const matchingFileNames = getMatchingFilePaths(meta['tmpDir'], meta['srcFileName'] + '*'); | ||
for (const matchingFileName of matchingFileNames) { | ||
@@ -472,6 +473,6 @@ await new Promise((resolve, reject) => { | ||
async function downloadOneFile(meta) { | ||
const tmpDir = await new Promise((resolve, reject) => { | ||
meta['tmpDir'] = await new Promise((resolve, reject) => { | ||
fs.mkdtemp(path.join(os.tmpdir(), 'tmp'), (err, dir) => { | ||
if (err) { | ||
reject(err); | ||
reject(err); | ||
} | ||
@@ -481,4 +482,2 @@ resolve(dir); | ||
}); | ||
meta['tmpDir'] = tmpDir; | ||
try { | ||
@@ -507,3 +506,3 @@ const storageClient = getStorageClient(meta['stageLocationType']); | ||
if (stageLocationType === S3_FS) { | ||
const client = SnowflakeRemoteStorageUtil.createClient(stageInfo, false); | ||
const client = remoteStorageUtil.createClient(stageInfo, false); | ||
const s3location = SnowflakeS3Util.extractBucketNameAndPath(stageInfo['location']); | ||
@@ -531,3 +530,3 @@ | ||
// presigned url only applies to remote storage | ||
if (storageClient === SnowflakeRemoteStorageUtil) { | ||
if (storageClient === remoteStorageUtil) { | ||
// presigned url only applies to GCS | ||
@@ -593,3 +592,3 @@ if (stageLocationType === GCS_FS) { | ||
stageLocationType === GCS_FS) { | ||
return SnowflakeRemoteStorageUtil; | ||
return remoteStorageUtil; | ||
} else { | ||
@@ -631,3 +630,3 @@ return null; | ||
// Get all file names that matches the wildcard | ||
const matchingFileNames = glob.sync(path.join(root, fileName)); | ||
const matchingFileNames = getMatchingFilePaths(root, fileName); | ||
@@ -634,0 +633,0 @@ for (const matchingFileName of matchingFileNames) { |
@@ -10,2 +10,4 @@ /* | ||
const zlib = require('zlib'); | ||
const os = require('os'); | ||
const glob = require('glob'); | ||
@@ -58,3 +60,3 @@ const resultStatus = { | ||
await new Promise(function (resolve, reject) { | ||
await new Promise(function (resolve) { | ||
// Create gzip object | ||
@@ -115,3 +117,3 @@ const gzip = zlib.createGzip(); | ||
let buffer = []; | ||
await new Promise(function (resolve, reject) { | ||
await new Promise(function (resolve) { | ||
// Create reader stream and set maximum chunk size | ||
@@ -138,3 +140,12 @@ const infile = fs.createReadStream(fileName, { highWaterMark: chunkSize }); | ||
} | ||
exports.FileUtil = FileUtil; | ||
exports.FileUtil = FileUtil; | ||
function getMatchingFilePaths(dir, fileName) { | ||
const pathWithWildcard = path.join(dir, fileName); | ||
const pathWithWildcardDependsOnPlatform = os.platform() === 'win32' | ||
? pathWithWildcard.replace(/\\/g, '/') | ||
: pathWithWildcard; | ||
return glob.sync(pathWithWildcardDependsOnPlatform); | ||
} | ||
exports.getMatchingFilePaths = getMatchingFilePaths; |
@@ -7,2 +7,3 @@ /* | ||
const FileHeader = require('./file_util').FileHeader; | ||
const { shouldPerformGCPBucket } = require('../util'); | ||
@@ -19,6 +20,4 @@ const GCS_METADATA_PREFIX = 'x-goog-meta-'; | ||
const GCS_FILE_HEADER_ENCRYPTION_METADATA = 'gcs-file-header-encryption-metadata'; | ||
const CONTENT_CHUNK_SIZE = 10 * 1024; | ||
const HTTP_HEADER_CONTENT_ENCODING = 'Content-Encoding'; | ||
const HTTP_HEADER_ACCEPT_ENCODING = 'Accept-Encoding'; | ||
const resultStatus = require('./file_util').resultStatus; | ||
@@ -144,3 +143,3 @@ | ||
try { | ||
if (accessToken) { | ||
if (shouldPerformGCPBucket(accessToken)) { | ||
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); | ||
@@ -185,3 +184,3 @@ | ||
} catch (err) { | ||
const errCode = err['code'] ? err['code'] : err.response.status; | ||
const errCode = !isNaN(err['code']) && !isNaN(parseInt(err['code'])) ? err['code'] : err.response.status; | ||
@@ -230,7 +229,6 @@ if ([403, 408, 429, 500, 503].includes(errCode)) { | ||
* @param {Object} encryptionMetadata | ||
* @param {Number} maxConcurrency | ||
* | ||
* @returns {null} | ||
*/ | ||
this.uploadFileStream = async function (fileStream, meta, encryptionMetadata, maxConcurrency) { | ||
this.uploadFileStream = async function (fileStream, meta, encryptionMetadata) { | ||
let uploadUrl = meta['presignedUrl']; | ||
@@ -288,3 +286,3 @@ let accessToken = null; | ||
try { | ||
if (accessToken) { | ||
if (shouldPerformGCPBucket(accessToken)) { | ||
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); | ||
@@ -338,10 +336,8 @@ | ||
* | ||
* @param {String} dataFile | ||
* @param {Object} meta | ||
* @param {Object} encryptionMetadata | ||
* @param {Number} maxConcurrency | ||
* @param fullDstPath | ||
* | ||
* @returns {null} | ||
*/ | ||
this.nativeDownloadFile = async function (meta, fullDstPath, maxConcurrency) { | ||
this.nativeDownloadFile = async function (meta, fullDstPath) { | ||
let downloadUrl = meta['presignedUrl']; | ||
@@ -365,3 +361,3 @@ let accessToken = null; | ||
try { | ||
if (accessToken) { | ||
if (shouldPerformGCPBucket(accessToken)) { | ||
const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); | ||
@@ -368,0 +364,0 @@ |
@@ -17,3 +17,3 @@ /* | ||
function LocalUtil() { | ||
this.createClient = function (stageInfo, useAccelerateEndpoint) { | ||
this.createClient = function () { | ||
return null; | ||
@@ -20,0 +20,0 @@ }; |
@@ -7,3 +7,3 @@ /* | ||
const path = require('path'); | ||
const SnowflakeS3Util = require('./s3_util'); | ||
const SnowflakeS3Util = require('./s3_util').S3Util; | ||
const SnowflakeAzureUtil = require('./azure_util'); | ||
@@ -30,6 +30,2 @@ const SnowflakeGCSUtil = require('./gcs_util'); | ||
function NeedRenewTokenError(Exception) { | ||
return; | ||
} | ||
/** | ||
@@ -41,3 +37,3 @@ * Creates a remote storage utility object. | ||
*/ | ||
function RemoteStorageUtil() { | ||
function RemoteStorageUtil(connectionConfig) { | ||
/** | ||
@@ -52,3 +48,3 @@ * Get storage type based on location type. | ||
if (type === 'S3') { | ||
return new SnowflakeS3Util(); | ||
return new SnowflakeS3Util(connectionConfig); | ||
} else if (type === 'AZURE') { | ||
@@ -65,7 +61,2 @@ return new SnowflakeAzureUtil(); | ||
* Create the client based on the location type. | ||
* | ||
* @param {Object} stageInfo | ||
* @param {Boolean} useAccelerateEndpoint | ||
* | ||
* @returns {Object} | ||
*/ | ||
@@ -72,0 +63,0 @@ this.createClient = function (stageInfo, useAccelerateEndpoint = false) { |
@@ -5,5 +5,7 @@ /* | ||
const { NodeHttpHandler } = require('@aws-sdk/node-http-handler'); | ||
const EncryptionMetadata = require('./encrypt_util').EncryptionMetadata; | ||
const FileHeader = require('./file_util').FileHeader; | ||
const expandTilde = require('expand-tilde'); | ||
const getProxyAgent = require('../http/node').getProxyAgent; | ||
@@ -17,4 +19,6 @@ const AMZ_IV = 'x-amz-iv'; | ||
const NO_SUCH_KEY = 'NoSuchKey'; | ||
const SNOWFLAKE_S3_DESTINATION = 's3.amazonaws.com'; | ||
const ERRORNO_WSAECONNABORTED = 10053; // network connection was aborted | ||
const DATA_SIZE_THRESHOLD = 67108864; // magic number, given from error message. | ||
@@ -36,21 +40,14 @@ const resultStatus = require('./file_util').resultStatus; | ||
* | ||
* @param {module} s3 | ||
* @param {module} filestream | ||
* @param connectionConfig | ||
* | ||
* @param s3 - used for tests, mock can be supplied | ||
* @param filestream - used for tests, mock can be supplied | ||
* @returns {Object} | ||
* @constructor | ||
*/ | ||
function S3Util(s3, filestream) { | ||
function S3Util(connectionConfig, s3, filestream) { | ||
const AWS = typeof s3 !== 'undefined' ? s3 : require('@aws-sdk/client-s3'); | ||
const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); | ||
// magic number, given from error message. | ||
this.DATA_SIZE_THRESHOLD = 67108864; | ||
/** | ||
* Create an AWS S3 client using an AWS token. | ||
* | ||
* @param {Object} stageInfo | ||
* | ||
* @returns {AWS.S3} | ||
*/ | ||
@@ -60,3 +57,2 @@ this.createClient = function (stageInfo, useAccelerateEndpoint) { | ||
const securityToken = stageCredentials['AWS_TOKEN']; | ||
// if GS sends us an endpoint, it's likely for FIPS. Use it. | ||
@@ -77,34 +73,15 @@ let endPoint = null; | ||
endpoint: endPoint, | ||
useAccelerateEndpoint: useAccelerateEndpoint, | ||
useAccelerateEndpoint: useAccelerateEndpoint | ||
}; | ||
return new AWS.S3(config); | ||
}; | ||
/** | ||
* Extract the bucket name and path from the metadata's stage location. | ||
* | ||
* @param {String} stageLocation | ||
* | ||
* @returns {Object} | ||
*/ | ||
this.extractBucketNameAndPath = function (stageLocation) { | ||
// expand '~' and '~user' expressions | ||
if (process.platform !== 'win32') { | ||
stageLocation = expandTilde(stageLocation); | ||
const proxy = connectionConfig.getProxy(); | ||
if (proxy) { | ||
const proxyAgent = getProxyAgent(proxy, new URL(connectionConfig.accessUrl), SNOWFLAKE_S3_DESTINATION); | ||
config.requestHandler = new NodeHttpHandler({ | ||
httpAgent: proxyAgent, | ||
httpsAgent: proxyAgent | ||
}); | ||
} | ||
let bucketName = stageLocation; | ||
let s3path; | ||
// split stage location as bucket name and path | ||
if (stageLocation.includes('/')) { | ||
bucketName = stageLocation.substring(0, stageLocation.indexOf('/')); | ||
s3path = stageLocation.substring(stageLocation.indexOf('/') + 1, stageLocation.length); | ||
if (s3path && !s3path.endsWith('/')) { | ||
s3path += '/'; | ||
} | ||
} | ||
return S3Location(bucketName, s3path); | ||
return new AWS.S3(config); | ||
}; | ||
@@ -123,3 +100,3 @@ | ||
const client = this.createClient(stageInfo); | ||
const s3location = this.extractBucketNameAndPath(stageInfo['location']); | ||
const s3location = extractBucketNameAndPath(stageInfo['location']); | ||
@@ -206,3 +183,3 @@ const params = { | ||
const s3location = this.extractBucketNameAndPath(meta['stageInfo']['location']); | ||
const s3location = extractBucketNameAndPath(meta['stageInfo']['location']); | ||
@@ -248,3 +225,3 @@ const params = { | ||
const s3location = this.extractBucketNameAndPath(meta['stageInfo']['location']); | ||
const s3location = extractBucketNameAndPath(meta['stageInfo']['location']); | ||
@@ -287,2 +264,30 @@ const params = { | ||
module.exports = S3Util; | ||
/** | ||
* Extract the bucket name and path from the metadata's stage location. | ||
* | ||
* @param {String} stageLocation | ||
* | ||
* @returns {Object} | ||
*/ | ||
function extractBucketNameAndPath(stageLocation) { | ||
// expand '~' and '~user' expressions | ||
if (process.platform !== 'win32') { | ||
stageLocation = expandTilde(stageLocation); | ||
} | ||
let bucketName = stageLocation; | ||
let s3path; | ||
// split stage location as bucket name and path | ||
if (stageLocation.includes('/')) { | ||
bucketName = stageLocation.substring(0, stageLocation.indexOf('/')); | ||
s3path = stageLocation.substring(stageLocation.indexOf('/') + 1, stageLocation.length); | ||
if (s3path && !s3path.endsWith('/')) { | ||
s3path += '/'; | ||
} | ||
} | ||
return S3Location(bucketName, s3path); | ||
} | ||
module.exports = { S3Util, SNOWFLAKE_S3_DESTINATION, DATA_SIZE_THRESHOLD, extractBucketNameAndPath }; |
@@ -7,3 +7,2 @@ /* | ||
const Util = require('../util'); | ||
const Errors = require('../errors'); | ||
const Logger = require('../logger'); | ||
@@ -26,6 +25,2 @@ const axios = require('axios'); | ||
HttpClient.prototype.getConnectionConfig = function () { | ||
return this._connectionConfig; | ||
}; | ||
/** | ||
@@ -180,8 +175,5 @@ * Issues an HTTP request. | ||
* | ||
* @param {String} url | ||
* @param {Object} proxy | ||
* | ||
* @returns {*} | ||
*/ | ||
HttpClient.prototype.getAgent = function (url, proxy, mock) { | ||
HttpClient.prototype.getAgent = function () { | ||
return null; | ||
@@ -188,0 +180,0 @@ }; |
@@ -16,3 +16,3 @@ /* | ||
function BrowserHttpClient(connectionConfig) { | ||
Base.apply(this, arguments); | ||
Base.apply(this, [connectionConfig]); | ||
} | ||
@@ -19,0 +19,0 @@ |
@@ -8,7 +8,6 @@ /* | ||
const HttpsAgent = require('../agent/https_ocsp_agent'); | ||
const HttpsProxyAgent = require('../agent/https_proxy_ocsp_agent'); | ||
const HttpsProxyAgent = require('../agent/https_proxy_agent'); | ||
const HttpAgent = require('http').Agent; | ||
const GlobalConfig = require('../../lib/global_config'); | ||
const Logger = require('../logger'); | ||
const Url = require('url'); | ||
@@ -40,3 +39,3 @@ /** | ||
function NodeHttpClient(connectionConfig) { | ||
Base.apply(this, arguments); | ||
Base.apply(this, [connectionConfig]); | ||
} | ||
@@ -48,9 +47,5 @@ | ||
function getFromCacheOrCreate(agentClass, options, parsedUrl) { | ||
const protocol = parsedUrl.protocol; | ||
const port = parsedUrl.port || (protocol === 'http:' ? '80' : '443'); | ||
const agentId = `${protocol}//${parsedUrl.hostname}:${port}-${options.keepAlive ? 'keepAlive' : 'noKeepAlive'}`; | ||
function getFromCacheOrCreate(agentClass, options, agentId) { | ||
let agent = {}; | ||
function createAgent(agentClass, agentOptions) { | ||
function createAgent(agentClass, agentOptions, agentId) { | ||
const agent = agentClass(agentOptions); | ||
@@ -76,3 +71,3 @@ httpsAgentCache.set(agentId, agent); | ||
} else { | ||
agent = createAgent(agentClass, options); | ||
agent = createAgent(agentClass, options, agentId); | ||
} | ||
@@ -82,3 +77,3 @@ return agent; | ||
function prepareProxyAgentOptions(agentOptions, proxy) { | ||
function enrichAgentOptionsWithProxyConfig(agentOptions, proxy) { | ||
agentOptions.host = proxy.host; | ||
@@ -93,3 +88,3 @@ agentOptions.port = proxy.port; | ||
function isBypassProxy(proxy, url, bypassProxy) { | ||
function isBypassProxy(proxy, destination) { | ||
if (proxy && proxy.noProxy) { | ||
@@ -100,5 +95,5 @@ const bypassList = proxy.noProxy.split('|'); | ||
host = host.replace('*', '.*?'); | ||
const matches = url.match(host); | ||
const matches = destination.match(host); | ||
if (matches) { | ||
Logger.getInstance().debug('bypassing proxy for %s', url); | ||
Logger.getInstance().debug('bypassing proxy for %s', destination); | ||
return true; | ||
@@ -115,3 +110,12 @@ } | ||
NodeHttpClient.prototype.getAgent = function (parsedUrl, proxy, mock) { | ||
const agentOptions = { keepAlive: GlobalConfig.getKeepAlive() }; | ||
return getProxyAgent(proxy, parsedUrl, parsedUrl.href, mock); | ||
}; | ||
function getProxyAgent(proxyOptions, parsedUrl, destination, mock) { | ||
const agentOptions = { | ||
protocol: parsedUrl.protocol, | ||
hostname: parsedUrl.hostname, | ||
keepAlive: GlobalConfig.getKeepAlive() | ||
}; | ||
if (mock) { | ||
@@ -124,22 +128,27 @@ const mockAgent = mock.agentClass(agentOptions); | ||
const bypassProxy = isBypassProxy(proxy, parsedUrl.href); | ||
let agent = {}; | ||
const isHttps = parsedUrl.protocol === 'https:'; | ||
const agentId = createAgentId(agentOptions.protocol, agentOptions.hostname, agentOptions.keepAlive); | ||
const bypassProxy = isBypassProxy(proxyOptions, destination); | ||
let agent; | ||
const isHttps = agentOptions.protocol === 'https:'; | ||
if (isHttps) { | ||
if (proxy && !bypassProxy) { | ||
prepareProxyAgentOptions(agentOptions, proxy); | ||
agent = getFromCacheOrCreate(HttpsProxyAgent, agentOptions, parsedUrl); | ||
if (proxyOptions && !bypassProxy) { | ||
enrichAgentOptionsWithProxyConfig(agentOptions, proxyOptions); | ||
agent = getFromCacheOrCreate(HttpsProxyAgent, agentOptions, agentId); | ||
} else { | ||
agent = getFromCacheOrCreate(HttpsAgent, agentOptions, parsedUrl); | ||
agent = getFromCacheOrCreate(HttpsAgent, agentOptions, agentId); | ||
} | ||
} else if (proxy && !bypassProxy) { | ||
prepareProxyAgentOptions(agentOptions, proxy); | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, parsedUrl); | ||
} else if (proxyOptions && !bypassProxy) { | ||
enrichAgentOptionsWithProxyConfig(agentOptions, proxyOptions); | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, agentId); | ||
} else { | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, parsedUrl); | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, agentId); | ||
} | ||
return agent; | ||
}; | ||
} | ||
module.exports = NodeHttpClient; | ||
function createAgentId(protocol, hostname, keepAlive) { | ||
return `${protocol}//${hostname}-${keepAlive ? 'keepAlive' : 'noKeepAlive'}`; | ||
} | ||
module.exports = { NodeHttpClient, getProxyAgent }; |
@@ -70,5 +70,6 @@ /* | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.error = function (message) { | ||
common.error.apply(common, arguments); | ||
this.error = function (message, ...params) { | ||
common.error.apply(common, [message, ...params]); | ||
}; | ||
@@ -80,5 +81,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.warn = function (message) { | ||
common.warn.apply(common, arguments); | ||
this.warn = function (message, ...params) { | ||
common.warn.apply(common, [message, ...params]); | ||
}; | ||
@@ -90,5 +92,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.info = function (message) { | ||
common.info.apply(common, arguments); | ||
this.info = function (message, ...params) { | ||
common.info.apply(common, [message, ...params]); | ||
}; | ||
@@ -100,5 +103,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.debug = function (message) { | ||
common.debug.apply(common, arguments); | ||
this.debug = function (message, ...params) { | ||
common.debug.apply(common, [message, ...params]); | ||
}; | ||
@@ -110,5 +114,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.trace = function (message) { | ||
common.trace.apply(common, arguments); | ||
this.trace = function (message, ...params ) { | ||
common.trace.apply(common, [message, ...params]); | ||
}; | ||
@@ -115,0 +120,0 @@ |
@@ -135,2 +135,3 @@ /* | ||
let localFilePath; | ||
let localAdditionalLogToConsole; | ||
@@ -147,2 +148,3 @@ // if an options argument is specified | ||
localFilePath = options.filePath; | ||
localAdditionalLogToConsole = options.additionalLogToConsole; | ||
} | ||
@@ -188,4 +190,4 @@ | ||
if (Util.exists(localFilePath) && Util.isFunction(reconfigureOperation)) { | ||
reconfigureOperation(localFilePath); | ||
if (Util.isFunction(reconfigureOperation)) { | ||
reconfigureOperation(localFilePath, localAdditionalLogToConsole); | ||
} | ||
@@ -192,0 +194,0 @@ }, |
@@ -11,2 +11,3 @@ /* | ||
const Logger = require('../logger'); | ||
const { isFileModeCorrect, exists } = require('../util'); | ||
const clientConfiguration = new ConfigurationUtil(); | ||
@@ -18,3 +19,2 @@ const getClientConfig = clientConfiguration.getClientConfig; | ||
/** | ||
* This is an experimental feature. Not ready to use yet. | ||
* @param {string} configFilePathFromConnectionString | ||
@@ -28,4 +28,6 @@ * @returns {Promise<void>} | ||
} | ||
Logger.getInstance().info('Trying to initialize Easy Logging'); | ||
const config = await getClientConfig(configFilePathFromConnectionString); | ||
if (!config) { | ||
Logger.getInstance().info('Easy Logging is disabled as no config has been found'); | ||
initTrialParameters = { | ||
@@ -39,5 +41,7 @@ configFilePathFromConnectionString: configFilePathFromConnectionString | ||
const logger = Logger.getInstance(); | ||
logger.info('Initializing Easy Logging with logPath=%s and logLevel=%s from file: %s', logPath, config.loggingConfig.logLevel, config.configPath); | ||
logger.configure({ | ||
level: logLevel, | ||
filePath: path.join(logPath, 'snowflake.log') | ||
filePath: path.join(logPath, 'snowflake.log'), | ||
additionalLogToConsole: false | ||
}); | ||
@@ -50,3 +54,3 @@ logger.easyLoggingConfigureCounter = (logger.easyLoggingConfigureCounter ?? 0) + 1; | ||
const error = new EasyLoggingError('Failed to initialize easy logging', err); | ||
Logger.getInstance().error(error.toString(), error); | ||
Logger.getInstance().error(error); | ||
throw error; | ||
@@ -56,5 +60,2 @@ } | ||
/** | ||
* This is an experimental feature. Not ready to use yet. | ||
*/ | ||
exports.reset = function () { | ||
@@ -71,3 +72,3 @@ initTrialParameters = undefined; | ||
if (!isAllowedToInitialize && initTrialParameters.configFilePathFromConnectionString !== configFilePathFromConnectionString) { | ||
Logger.getInstance().warn(`Easy logging will not be configured for CLIENT_CONFIG_FILE=${configFilePathFromConnectionString} because it was previously configured for a different client config`); | ||
Logger.getInstance().warn(`Easy logging will not be configured for CLIENT_CONFIG_FILE=${configFilePathFromConnectionString} because it was previously configured for a different client config`); | ||
} | ||
@@ -98,13 +99,20 @@ return isAllowedToInitialize; | ||
if (!logPath) { | ||
Logger.getInstance().warn('LogPath in client config not found. Using temporary directory as a default value'); | ||
logPath = os.tmpdir(); | ||
Logger.getInstance().warn('LogPath in client config not found. Using home directory as a default value'); | ||
logPath = os.homedir(); | ||
if (!exists(logPath)) { | ||
throw new EasyLoggingError('Home directory does not exist'); | ||
} | ||
} | ||
const pathWithNodeJsSubdirectory = path.join(logPath, 'nodejs'); | ||
await fsPromises.access(pathWithNodeJsSubdirectory, fs.constants.F_OK) | ||
.then(() => true) | ||
.catch(() => { | ||
.then(async () => { | ||
if (!(await isFileModeCorrect(pathWithNodeJsSubdirectory, 0o700, fsPromises))) { | ||
Logger.getInstance().warn('Log directory: %s could potentially be accessed by others', pathWithNodeJsSubdirectory); | ||
} | ||
}) | ||
.catch(async () => { | ||
try { | ||
return fsPromises.mkdir(pathWithNodeJsSubdirectory, { recursive: true }); | ||
await fsPromises.mkdir(pathWithNodeJsSubdirectory, { recursive: true, mode: 0o700 }); | ||
} catch (err) { | ||
throw new EasyLoggingError('Failed to create the directory for logs'); | ||
throw new EasyLoggingError(`Failed to create the directory for logs: ${pathWithNodeJsSubdirectory}`); | ||
} | ||
@@ -111,0 +119,0 @@ }); |
@@ -10,2 +10,4 @@ /* | ||
const DEFAULT_ADDITIONAL_LOG_TO_CONSOLE = true; | ||
/** | ||
@@ -22,2 +24,4 @@ * Creates a new Logger instance for when we're running in node. | ||
let filePath = getFilePath(options); | ||
let additionalLogToConsole = DEFAULT_ADDITIONAL_LOG_TO_CONSOLE; | ||
let transportLabels = []; | ||
@@ -50,3 +54,3 @@ this.setLogger = function (logger) { | ||
function closeTransport(transport, timeoutMillis) { | ||
function closeTransport(transport) { | ||
if (!transport.close) { | ||
@@ -58,5 +62,10 @@ return; | ||
function reconfigureWinstonLogger(filePathInput) { | ||
function reconfigureWinstonLogger(filePathInput, additionalLogToConsoleInput) { | ||
const currentWinstonLogger = winstonLogger; | ||
filePath = filePathInput ?? filePath; | ||
if (Util.isBoolean(additionalLogToConsoleInput)) { | ||
additionalLogToConsole = additionalLogToConsoleInput; | ||
} else { | ||
additionalLogToConsole = DEFAULT_ADDITIONAL_LOG_TO_CONSOLE; | ||
} | ||
winstonLogger = null; // it will be created for the first log operation | ||
@@ -68,2 +77,10 @@ if (currentWinstonLogger) { | ||
function setTransportLabels(transportLabelsInput) { | ||
transportLabels = transportLabelsInput; | ||
} | ||
this.getTransportLabels = function () { | ||
return transportLabels; | ||
}; | ||
/** | ||
@@ -75,14 +92,20 @@ * Logs a message at a given level. | ||
* @param {String} message the message to log. | ||
* @param {Number} bufferMaxLength the maximum size to which the message | ||
* buffer can grow. | ||
*/ | ||
const logMessage = function (levelTag, message, bufferMaxLength) { | ||
const logMessage = function (levelTag, message) { | ||
// initialize the winston logger if needed | ||
if (!winstonLogger) { | ||
const transports = 'STDOUT' === filePath.toUpperCase() | ||
? [new (winston.transports.Console)()] | ||
: [ | ||
new (winston.transports.Console)(), | ||
new (winston.transports.File)({ filename: filePath }) | ||
]; | ||
let transports; | ||
let transportLabels; | ||
if ('STDOUT' === filePath.toUpperCase()) { | ||
transports = [new (winston.transports.Console)()]; | ||
transportLabels = ['Console']; | ||
} else if (additionalLogToConsole === true) { | ||
transports = [new (winston.transports.Console)(), new (winston.transports.File)({ filename: filePath })]; | ||
transportLabels = ['Console', 'File']; | ||
} else { | ||
transports = [new (winston.transports.File)({ filename: filePath })]; | ||
transportLabels = ['File']; | ||
} | ||
winstonLogger = new winston.createLogger( | ||
@@ -94,2 +117,3 @@ { | ||
}); | ||
setTransportLabels(transportLabels); | ||
} | ||
@@ -127,2 +151,7 @@ | ||
this.configure = function (options) { | ||
if (Util.isBoolean(options.additionalLogToConsole)) { | ||
additionalLogToConsole = options.additionalLogToConsole; | ||
} else { | ||
additionalLogToConsole = DEFAULT_ADDITIONAL_LOG_TO_CONSOLE; | ||
} | ||
common.configure(options); | ||
@@ -144,5 +173,6 @@ }; | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.error = function (message) { | ||
common.error.apply(common, arguments); | ||
this.error = function (message, ...params) { | ||
common.error.apply(common, [message, ...params]); | ||
}; | ||
@@ -154,5 +184,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.warn = function (message) { | ||
common.warn.apply(common, arguments); | ||
this.warn = function (message, ...params) { | ||
common.warn.apply(common, [message, ...params]); | ||
}; | ||
@@ -164,5 +195,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.info = function (message) { | ||
common.info.apply(common, arguments); | ||
this.info = function (message, ...params) { | ||
common.info.apply(common, [message, ...params]); | ||
}; | ||
@@ -174,5 +206,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.debug = function (message) { | ||
common.debug.apply(common, arguments); | ||
this.debug = function (message, ...params) { | ||
common.debug.apply(common, [message, ...params]); | ||
}; | ||
@@ -184,5 +217,6 @@ | ||
* @param {String} message | ||
* @param params | ||
*/ | ||
this.trace = function (message) { | ||
common.trace.apply(common, arguments); | ||
this.trace = function (message, ...params) { | ||
common.trace.apply(common, [message, ...params]); | ||
}; | ||
@@ -189,0 +223,0 @@ |
@@ -47,3 +47,2 @@ /* | ||
const axios = require('axios'); | ||
const { v4: uuidv4 } = require('uuid'); | ||
@@ -60,2 +59,4 @@ const EventEmitter = require('events').EventEmitter; | ||
const Logger = require('../logger'); | ||
const { getCurrentAuth } = require('../authentication/authentication'); | ||
const AuthOkta = require('../authentication/auth_okta'); | ||
@@ -130,3 +131,2 @@ function isRetryableNetworkError(err) { | ||
let currentState; | ||
const isStageCreated = false; | ||
@@ -297,3 +297,3 @@ /** | ||
function OperationConnect(options) { | ||
OperationAbstract.apply(this, arguments); | ||
OperationAbstract.apply(this, [options]); | ||
} | ||
@@ -329,3 +329,3 @@ | ||
function OperationContinue(options) { | ||
OperationAbstract.apply(this, arguments); | ||
OperationAbstract.apply(this, [options]); | ||
} | ||
@@ -361,3 +361,3 @@ | ||
function OperationRequest(options) { | ||
OperationAbstract.apply(this, arguments); | ||
OperationAbstract.apply(this, [options]); | ||
} | ||
@@ -405,3 +405,3 @@ | ||
function OperationDestroy(options) { | ||
OperationAbstract.apply(this, arguments); | ||
OperationAbstract.apply(this, [options]); | ||
} | ||
@@ -638,3 +638,3 @@ | ||
} | ||
// if we have an error, clear the body | ||
@@ -870,6 +870,5 @@ if (err) { | ||
* Enters this state. | ||
* @param {Object} [context] | ||
* @abstract | ||
*/ | ||
StateAbstract.prototype.enter = function (context) { | ||
StateAbstract.prototype.enter = function () { | ||
}; | ||
@@ -887,6 +886,5 @@ | ||
* | ||
* @param {Object} options | ||
* @abstract | ||
*/ | ||
StateAbstract.prototype.connect = function (options) { | ||
StateAbstract.prototype.connect = function () { | ||
}; | ||
@@ -897,6 +895,5 @@ | ||
* | ||
* @param {Object} [options] | ||
* @abstract | ||
*/ | ||
StateAbstract.prototype.continue = function (options) { | ||
StateAbstract.prototype.continue = function () { | ||
}; | ||
@@ -907,6 +904,5 @@ | ||
* | ||
* @param {Object} options | ||
* @abstract | ||
*/ | ||
StateAbstract.prototype.request = function (options) { | ||
StateAbstract.prototype.request = function () { | ||
}; | ||
@@ -917,6 +913,5 @@ | ||
* | ||
* @param {Object} options | ||
* @abstract | ||
*/ | ||
StateAbstract.prototype.destroy = function (options) { | ||
StateAbstract.prototype.destroy = function () { | ||
}; | ||
@@ -1093,5 +1088,5 @@ | ||
Date.now() : 'FIXEDTIMESTAMP'; | ||
let numRetries = 1; | ||
let numRetries = 0; | ||
let sleep = connectionConfig.getRetrySfStartingSleepTime(); | ||
let totalTimeout = sleep; | ||
let totalElapsedTime = 0; | ||
Logger.getInstance().debug('Total retryTimeout is for the retries = ' + maxRetryTimeout === 0 ? | ||
@@ -1124,7 +1119,7 @@ 'unlimited' : maxRetryTimeout); | ||
isRetryableNetworkError(err) || isRetryableHttpError(err)) && | ||
(maxRetryTimeout === 0 || totalTimeout < maxRetryTimeout)) { | ||
(maxRetryTimeout === 0 || totalElapsedTime < maxRetryTimeout)) { | ||
numRetries++; | ||
const jitter = Util.getJitteredSleepTime(numRetries, sleep, totalTimeout, maxRetryTimeout); | ||
const jitter = Util.getJitteredSleepTime(numRetries, sleep, totalElapsedTime, maxRetryTimeout); | ||
sleep = jitter.sleep; | ||
totalTimeout = jitter.totalTimeout; | ||
totalElapsedTime = jitter.totalElapsedTime; | ||
@@ -1135,4 +1130,21 @@ if (sleep <= 0) { | ||
} | ||
setTimeout(sendRequest, sleep * 1000); | ||
return; | ||
const auth = getCurrentAuth(); | ||
if (auth instanceof AuthOkta) { | ||
Logger.getInstance().debug('OKTA authentication requires token refresh.'); | ||
const retryOption = { | ||
totalElapsedTime, | ||
numRetries, | ||
}; | ||
auth.reauthenticate(context.options.json, retryOption).then(() => { | ||
numRetries = retryOption.numRetries; | ||
totalElapsedTime = retryOption.totalElapsedTime; | ||
setTimeout(sendRequest, sleep * 1000); | ||
return; | ||
}); | ||
} else { | ||
setTimeout(sendRequest, sleep * 1000); | ||
return; | ||
} | ||
} else { | ||
@@ -1270,3 +1282,3 @@ Logger.getInstance().debug('Failed to all retries to SF.'); | ||
if (Util.isFunction(callbackOrig)) { | ||
await callbackOrig.apply(scopeOrig, arguments); | ||
await callbackOrig.apply(scopeOrig, [err, body]); | ||
} | ||
@@ -1305,3 +1317,3 @@ } else { | ||
if (Util.isFunction(callbackOrig)) { | ||
callbackOrig.apply(scopeOrig, arguments); | ||
callbackOrig.apply(scopeOrig, [err, body]); | ||
} | ||
@@ -1328,3 +1340,3 @@ } | ||
scope: this, | ||
callback: function (err, body) { | ||
callback: function (err) { | ||
// if the destroy request succeeded or the session already expired, we're disconnected | ||
@@ -1355,3 +1367,3 @@ if (!err || err.code === GSErrors.code.GONE_SESSION || err.code === GSErrors.code.SESSION_TOKEN_EXPIRED) { | ||
*/ | ||
StateRenewing.prototype.enter = function (context) { | ||
StateRenewing.prototype.enter = function () { | ||
// send out a master token request to renew the current session token | ||
@@ -1358,0 +1370,0 @@ this.createMasterTokenRequest( |
@@ -21,3 +21,3 @@ /* | ||
{ | ||
httpClientClass: require('./http/node'), | ||
httpClientClass: require('./http/node').NodeHttpClient, | ||
loggerClass: require('./logger/node'), | ||
@@ -24,0 +24,0 @@ client: |
@@ -8,2 +8,3 @@ /* | ||
const Url = require('url'); | ||
const os = require('os'); | ||
@@ -24,3 +25,3 @@ /** | ||
exports.inherits = function (constructor, superConstructor) { | ||
return util.inherits.apply(util, arguments); | ||
return util.inherits.apply(util, [constructor, superConstructor]); | ||
}; | ||
@@ -56,4 +57,4 @@ | ||
*/ | ||
exports.format = function (format) { | ||
return util.format.apply(util, arguments); | ||
exports.format = function (format, ...params) { | ||
return util.format.apply(util, [format, ...params]); | ||
}; | ||
@@ -332,3 +333,3 @@ | ||
} | ||
return retryUrl; | ||
@@ -399,13 +400,13 @@ } | ||
* | ||
* @param {Number} numofRetries | ||
* @param {Number} currentSleepTime | ||
* @param {Number} totalTimeout | ||
* @param {Number} numofRetries | ||
* @param {Number} currentSleepTime | ||
* @param {Number} totalElapsedTime | ||
* @param {Number} maxRetryTimeout | ||
* @returns {JSON} return next sleep Time and totalTime. | ||
*/ | ||
exports.getJitteredSleepTime = function (numofRetries, currentSleepTime, totalTimeout, maxRetryTimeout) { | ||
exports.getJitteredSleepTime = function (numofRetries, currentSleepTime, totalElapsedTime, maxRetryTimeout) { | ||
const nextsleep = getNextSleepTime(numofRetries, currentSleepTime); | ||
const sleep = maxRetryTimeout !== 0 ? Math.min((maxRetryTimeout - totalTimeout), nextsleep) : nextsleep; | ||
totalTimeout += sleep; | ||
return { sleep, totalTimeout }; | ||
const sleep = maxRetryTimeout !== 0 ? Math.min((maxRetryTimeout - totalElapsedTime), nextsleep) : nextsleep; | ||
totalElapsedTime += sleep; | ||
return { sleep, totalElapsedTime }; | ||
}; | ||
@@ -416,7 +417,7 @@ | ||
* | ||
* @param {Number} firstNumber | ||
* @param {Number} secondNumber | ||
* @param {Number} firstNumber | ||
* @param {Number} secondNumber | ||
* @returns {Number} return a random number between two numbers. | ||
*/ | ||
function chooseRandom(firstNumber, secondNumber) { | ||
function chooseRandom(firstNumber, secondNumber) { | ||
return Math.random() * (firstNumber - secondNumber) + secondNumber; | ||
@@ -429,4 +430,4 @@ } | ||
* return the next sleep Time. | ||
* @param {Number} numofRetries | ||
* @param {Number} currentSleepTime | ||
* @param {Number} numofRetries | ||
* @param {Number} currentSleepTime | ||
* @returns {Number} return jitter. | ||
@@ -443,3 +444,3 @@ */ | ||
* return the jitter value. | ||
* @param {Number} currentSleepTime | ||
* @param {Number} currentSleepTime | ||
* @returns {Number} return jitter. | ||
@@ -669,2 +670,47 @@ */ | ||
return input.toString().replace(/(^\w+:|^)\/\//, ''); | ||
}; | ||
}; | ||
exports.shouldPerformGCPBucket = function (accessToken) { | ||
return !!accessToken && process.env.SNOWFLAKE_FORCE_GCP_USE_DOWNSCOPED_CREDENTIAL !== 'true'; | ||
}; | ||
/** | ||
* Checks if the provided file or directory permissions are correct. | ||
* @param filePath | ||
* @param expectedMode | ||
* @param fsPromises | ||
* @returns {Promise<boolean>} resolves always to true for Windows | ||
*/ | ||
exports.isFileModeCorrect = async function (filePath, expectedMode, fsPromises) { | ||
if (os.platform() === 'win32') { | ||
return true; | ||
} | ||
return await fsPromises.stat(filePath).then((stats) => { | ||
// we have to limit the number of LSB bits to 9 with the mask, as the stats.mode starts with the file type, | ||
// e.g. the directory with permissions 755 will have stats.mask of 40755. | ||
const mask = (1 << 9) - 1; | ||
return (stats.mode & mask) === expectedMode; | ||
}); | ||
}; | ||
/** | ||
* Checks if the provided file or directory is writable only by the user. | ||
* @param configFilePath | ||
* @param fsPromises | ||
* @returns {Promise<boolean>} resolves always to true for Windows | ||
*/ | ||
exports.isFileNotWritableByGroupOrOthers = async function (configFilePath, fsPromises) { | ||
if (os.platform() === 'win32') { | ||
return true; | ||
} | ||
const stats = await fsPromises.stat(configFilePath); | ||
return (stats.mode & (1 << 4)) === 0 && (stats.mode & (1 << 1)) === 0; | ||
}; | ||
exports.shouldRetryOktaAuth = function ({ maxRetryTimeout, maxRetryCount, numRetries, startTime, remainingTimeout }) { | ||
return (maxRetryTimeout === 0 || Date.now() < startTime + remainingTimeout) && numRetries <= maxRetryCount; | ||
}; | ||
exports.getDriverDirectory = function () { | ||
return __dirname; | ||
}; |
{ | ||
"name": "snowflake-sdk", | ||
"version": "1.9.3", | ||
"version": "1.10.0", | ||
"description": "Node.js driver for Snowflake", | ||
"dependencies": { | ||
"@aws-sdk/client-s3": "^3.388.0", | ||
"@aws-sdk/node-http-handler": "^3.374.0", | ||
"@azure/storage-blob": "^12.11.0", | ||
"@google-cloud/storage": "^6.9.3", | ||
"@google-cloud/storage": "^7.7.0", | ||
"@techteamer/ocsp": "1.0.1", | ||
@@ -10,0 +11,0 @@ "agent-base": "^6.0.2", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
545080
15616
33
40
+ Added@aws-sdk/node-http-handler@3.374.0(transitive)
+ Added@google-cloud/paginator@5.0.2(transitive)
+ Added@google-cloud/projectify@4.0.0(transitive)
+ Added@google-cloud/promisify@4.0.0(transitive)
+ Added@google-cloud/storage@7.14.0(transitive)
+ Added@smithy/abort-controller@1.1.0(transitive)
+ Added@smithy/node-http-handler@1.1.0(transitive)
+ Added@smithy/protocol-http@1.2.0(transitive)
+ Added@smithy/querystring-builder@1.1.0(transitive)
+ Added@smithy/types@1.2.0(transitive)
+ Added@smithy/util-uri-escape@1.1.0(transitive)
+ Added@types/caseless@0.12.5(transitive)
+ Added@types/node@22.10.1(transitive)
+ Added@types/request@2.48.12(transitive)
+ Added@types/tough-cookie@4.0.5(transitive)
+ Addedform-data@2.5.2(transitive)
+ Addedgaxios@6.7.1(transitive)
+ Addedgcp-metadata@6.1.0(transitive)
+ Addedgoogle-auth-library@9.15.0(transitive)
+ Addedgtoken@7.1.0(transitive)
+ Addedhtml-entities@2.5.2(transitive)
+ Addedretry-request@7.0.2(transitive)
+ Addedteeny-request@9.0.0(transitive)
+ Addedundici-types@6.20.0(transitive)
- Removed@google-cloud/paginator@3.0.7(transitive)
- Removed@google-cloud/projectify@3.0.0(transitive)
- Removed@google-cloud/promisify@3.0.1(transitive)
- Removed@google-cloud/storage@6.12.0(transitive)
- Removedcompressible@2.0.18(transitive)
- Removedent@2.2.1(transitive)
- Removedfast-text-encoding@1.0.6(transitive)
- Removedgaxios@5.1.3(transitive)
- Removedgcp-metadata@5.3.0(transitive)
- Removedgoogle-auth-library@8.9.0(transitive)
- Removedgoogle-p12-pem@4.0.1(transitive)
- Removedgtoken@6.1.2(transitive)
- Removedlru-cache@6.0.0(transitive)
- Removedmime-db@1.53.0(transitive)
- Removednode-forge@1.3.1(transitive)
- Removedpunycode@1.4.1(transitive)
- Removedretry-request@5.0.2(transitive)
- Removedteeny-request@8.0.3(transitive)
- Removedyallist@4.0.0(transitive)
Updated@google-cloud/storage@^7.7.0