snowflake-sdk
Advanced tools
Comparing version 1.15.0 to 2.0.0
@@ -28,3 +28,3 @@ /* | ||
ERR_GLOBAL_CONFIGURE_INVALID_LOG_LEVEL = 403001, | ||
ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT = 403002, | ||
ERR_GLOBAL_CONFIGURE_INVALID_DISABLE_OCSP_CHECKS = 403002, | ||
ERR_GLOBAL_CONFIGURE_INVALID_OCSP_MODE = 403003, | ||
@@ -223,5 +223,5 @@ ERR_GLOBAL_CONFIGURE_INVALID_JSON_PARSER = 403004, | ||
/** | ||
* Check the ocsp checking is off. | ||
* The option to turn off the OCSP check. | ||
*/ | ||
insecureConnect?: boolean; | ||
disableOCSPChecks?: boolean; | ||
@@ -621,2 +621,10 @@ /** | ||
/** | ||
* The request GUID is a unique identifier of an HTTP request issued to Snowflake. | ||
* Unlike the requestId, it is regenerated even when the request is resend with the retry mechanism. | ||
* If not specified, request GUIDs are attached to all requests to Snowflake for better traceability. | ||
* In the majority of cases it should not be set or filled with false value. | ||
*/ | ||
excludeGuid?: string; | ||
/** | ||
* Use different rest endpoints based on whether the query id is available. | ||
@@ -623,0 +631,0 @@ */ |
@@ -118,3 +118,4 @@ /* | ||
let sync = true; | ||
const maxNumRetries = GlobalConfig.getOcspMode() === GlobalConfig.ocspModes.FAIL_CLOSED ? 5 : 1; | ||
const isFailClosed = GlobalConfig.getOcspMode() === GlobalConfig.ocspModes.FAIL_CLOSED; | ||
const maxNumRetries = isFailClosed ? 2 : 1; | ||
@@ -194,2 +195,11 @@ function done(err, data) { | ||
if (err) { | ||
//This error message is from @techteamer/ocsp (ocsp.utils.getAuthorityInfo) | ||
if (err.message === 'AuthorityInfoAccess not found in extensions') { | ||
if (!isFailClosed) { | ||
Logger.getInstance().debug('OCSP Responder URL is missing from the certificate.'); | ||
return done(null); | ||
} else { | ||
Logger.getInstance().error('OCSP Responder URL is missing from the certificate, so cannot verify with OCSP. Aborting connection attempt due to OCSP being set to FAIL_CLOSE https://docs.snowflake.com/en/user-guide/ocsp#fail-close'); | ||
} | ||
} | ||
return done(err); | ||
@@ -196,0 +206,0 @@ } |
@@ -16,7 +16,2 @@ /* | ||
const ocspFailOpenWarning = | ||
'WARNING!!! using fail-open to connect. Driver is connecting to an HTTPS endpoint ' + | ||
'without OCSP based Certificated Revocation checking as it could not obtain a valid OCSP Response to use from ' + | ||
'the CA OCSP responder. Details: '; | ||
const socketSecuredEvent = 'secureConnect'; | ||
@@ -124,3 +119,3 @@ | ||
// for non-snowflake endpoints and the host is a non-snowflake endpoint | ||
return GlobalConfig.isInsecureConnect() || | ||
return GlobalConfig.isOCSPChecksDisabled() || | ||
(Parameters.getValue(Parameters.names.JS_DRIVER_DISABLE_OCSP_FOR_NON_SF_ENDPOINTS) && | ||
@@ -163,3 +158,3 @@ !REGEX_SNOWFLAKE_ENDPOINT.test(host)); | ||
// any of the errors is NOT good/revoked/unknown | ||
Logger.getInstance().warn(ocspFailOpenWarning + err); | ||
Logger.getInstance().debug(`OCSP responder didn't respond correctly. Assuming certificate is not revoked. Details: ${err}`); | ||
return null; | ||
@@ -166,0 +161,0 @@ } else if (err && err.code === ErrorCodes.ERR_OCSP_REVOKED) { |
@@ -262,3 +262,3 @@ /* | ||
async function searchForConfigInDefaultDirectories() { | ||
Logger.getInstance().debug(`Searching for config in default directories: ${defaultDirectories}`); | ||
Logger.getInstance().debug(`Searching for config in default directories: ${JSON.stringify(defaultDirectories)}`); | ||
for (const directory of defaultDirectories) { | ||
@@ -265,0 +265,0 @@ const configPath = await searchForConfigInDictionary(directory.dir, directory.dirDescription); |
@@ -8,2 +8,3 @@ /* | ||
const Util = require('../util'); | ||
const ProxyUtil = require('../proxy_util'); | ||
const Errors = require('../errors'); | ||
@@ -20,2 +21,3 @@ const ConnectionConstants = require('../constants/connection_constants'); | ||
const Logger = require('../logger'); | ||
const LoggingUtil = require('../logger/logging_util'); | ||
const WAIT_FOR_BROWSER_ACTION_TIMEOUT = 120000; | ||
@@ -227,3 +229,3 @@ const DEFAULT_PARAMS = | ||
}; | ||
Util.validateProxy(proxy); | ||
ProxyUtil.validateProxy(proxy); | ||
} | ||
@@ -841,2 +843,29 @@ | ||
/** | ||
* Returns attributes of Connection Config object that can be used to identify | ||
* the connection, when ID is not available in the scope. This is not sufficient set, | ||
* since multiple connections can be instantiated for the same config, but can be treated as a hint. | ||
* | ||
* @returns {string} | ||
*/ | ||
this.describeIdentityAttributes = function () { | ||
return `host: ${this.host}, account: ${this.account}, accessUrl: ${this.accessUrl}, ` | ||
+ `user: ${this.username}, role: ${this.getRole()}, database: ${this.getDatabase()}, ` | ||
+ `schema: ${this.getSchema()}, warehouse: ${this.getWarehouse()}, ` + this.describeProxy(); | ||
}; | ||
/** | ||
* @returns {string} | ||
*/ | ||
this.describeProxy = function () { | ||
const proxy = this.getProxy(); | ||
if (Util.exists(proxy)) { | ||
return `proxyHost: ${proxy.host}, proxyPort: ${proxy.port}, proxyUser: ${proxy.user}, ` | ||
+ `proxyPassword is ${LoggingUtil.describePresence(proxy.password)}, ` | ||
+ `proxyProtocol: ${proxy.protocol}, noProxy: ${proxy.noProxy}`; | ||
} else { | ||
return 'proxy was not configured'; | ||
} | ||
}; | ||
// save config options | ||
@@ -843,0 +872,0 @@ this.username = options.username; |
@@ -10,3 +10,3 @@ /* | ||
const LoggingUtil = require('../logger/logging_utils'); | ||
const LoggingUtil = require('../logger/logging_util'); | ||
const Util = require('../util'); | ||
@@ -47,11 +47,7 @@ const Errors = require('../errors'); | ||
Logger.getInstance().info( | ||
'Creating Connection[id: %s] with host: %s, account: %s, accessUrl: %s, user: %s, ' | ||
+ 'password is %s, role: %s, database: %s, schema: %s, warehouse: %s, region: %s, ' | ||
'Creating Connection[id: %s] with %s, password is %s, region: %s, ' | ||
+ 'authenticator: %s, ocsp mode: %s, os: %s, os version: %s', | ||
id, | ||
connectionConfig.host, connectionConfig.account, | ||
connectionConfig.accessUrl, connectionConfig.username, | ||
connectionConfig.describeIdentityAttributes(), | ||
LoggingUtil.describePresence(connectionConfig.password), | ||
connectionConfig.getRole(), connectionConfig.getDatabase(), | ||
connectionConfig.getSchema(), connectionConfig.getWarehouse(), | ||
connectionConfig.region, connectionConfig.getAuthenticator(), | ||
@@ -158,3 +154,3 @@ connectionConfig.getClientEnvironment().OCSP_MODE, | ||
Logger.getInstance().trace('Issuing heartbeat call'); | ||
const requestID = uuidv4(); | ||
const requestId = uuidv4(); | ||
@@ -169,3 +165,3 @@ services.sf.request( | ||
{ | ||
requestId: requestID | ||
requestId: requestId | ||
}) | ||
@@ -204,3 +200,3 @@ }), | ||
} catch (e) { | ||
Logger.getInstance().debug('Connection[id: %s] - heartbeat failed: %s', this.getId(), JSON.stringify(e, Util.getCircularReplacer())); | ||
Logger.getInstance().debug('Connection[id: %s] - heartbeat failed: %s', this.getId(), JSON.stringify(e, Object.getOwnPropertyNames(e))); | ||
return false; | ||
@@ -207,0 +203,0 @@ } |
@@ -21,3 +21,3 @@ /* | ||
exports[403001] = 'Invalid logLevel. The specified value must be one of these five levels: error, warn, debug, info and trace.'; | ||
exports[403002] = 'Invalid insecureConnect option. The specified value must be a boolean.'; | ||
exports[403002] = 'Invalid disableOCSPChecks option. The specified value must be a boolean.'; | ||
exports[403003] = 'Invalid OCSP mode. The specified value must be FAIL_CLOSED, FAIL_OPEN, or INSECURE_MODE.'; | ||
@@ -24,0 +24,0 @@ exports[403004] = 'Invalid custom JSON parser. The specified value must be a function.'; |
@@ -190,3 +190,2 @@ /* | ||
if (logLevel != null || logFilePath) { | ||
Logger.getInstance().info('Configuring logger with level: %s, filePath: %s, additionalLogToConsole: %s', logLevel, logFilePath, additionalLogToConsole); | ||
Logger.getInstance().configure( | ||
@@ -198,12 +197,13 @@ { | ||
}); | ||
Logger.getInstance().info('Configuring logger with level: %s, filePath: %s, additionalLogToConsole: %s', logLevel, logFilePath, additionalLogToConsole); | ||
} | ||
const insecureConnect = options.insecureConnect; | ||
if (Util.exists(insecureConnect)) { | ||
const disableOCSPChecks = options.disableOCSPChecks; | ||
if (Util.exists(disableOCSPChecks)) { | ||
// check that the specified value is a boolean | ||
Errors.checkArgumentValid(Util.isBoolean(insecureConnect), | ||
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT); | ||
Errors.checkArgumentValid(Util.isBoolean(disableOCSPChecks), | ||
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_DISABLE_OCSP_CHECKS); | ||
GlobalConfig.setInsecureConnect(insecureConnect); | ||
Logger.getInstance().debug('Setting insecureConnect to value from core options: %s', insecureConnect); | ||
GlobalConfig.setDisableOCSPChecks(disableOCSPChecks); | ||
Logger.getInstance().debug('Setting disableOCSPChecks to value from core options: %s', disableOCSPChecks); | ||
} | ||
@@ -210,0 +210,0 @@ |
@@ -26,3 +26,3 @@ /* | ||
codes.ERR_GLOBAL_CONFIGURE_INVALID_LOG_LEVEL = 403001; | ||
codes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT = 403002; | ||
codes.ERR_GLOBAL_CONFIGURE_INVALID_DISABLE_OCSP_CHECKS = 403002; | ||
codes.ERR_GLOBAL_CONFIGURE_INVALID_OCSP_MODE = 403003; | ||
@@ -346,3 +346,3 @@ codes.ERR_GLOBAL_CONFIGURE_INVALID_JSON_PARSER = 403004; | ||
code: errorCode, | ||
data: { ...data, queryId: null }, | ||
data: data, | ||
message: message, | ||
@@ -349,0 +349,0 @@ sqlState: sqlState |
@@ -9,2 +9,5 @@ /* | ||
const resultStatus = require('./file_util').resultStatus; | ||
const ProxyUtil = require('../proxy_util'); | ||
const { isBypassProxy } = require('../http/node'); | ||
const Logger = require('../logger'); | ||
@@ -30,3 +33,3 @@ const EXPIRED_TOKEN = 'ExpiredToken'; | ||
*/ | ||
function AzureUtil(azure, filestream) { | ||
function AzureUtil(connectionConfig, azure, filestream) { | ||
const AZURE = typeof azure !== 'undefined' ? azure : require('@azure/storage-blob'); | ||
@@ -47,7 +50,19 @@ const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); | ||
const account = stageInfo['storageAccount']; | ||
const connectionString = `https://${account}.blob.core.windows.net${sasToken}`; | ||
let proxy = ProxyUtil.getProxy(connectionConfig.getProxy(), 'Azure Util'); | ||
if (proxy && !isBypassProxy(proxy, connectionString)) { | ||
Logger.getInstance().debug(`The destination host is: ${ProxyUtil.getHostFromURL(connectionString)} and the proxy host is: ${proxy.host}`); | ||
Logger.getInstance().trace(`Initializing the proxy information for the Azure Client: ${ProxyUtil.describeProxy(proxy)}`); | ||
proxy = ProxyUtil.getAzureProxy(proxy); | ||
Logger.getInstance().trace(connectionConfig.describe); | ||
} | ||
ProxyUtil.hideEnvironmentProxy(); | ||
const blobServiceClient = new AZURE.BlobServiceClient( | ||
`https://${account}.blob.core.windows.net${sasToken}` | ||
connectionString, null, | ||
{ | ||
proxyOptions: proxy, | ||
} | ||
); | ||
ProxyUtil.restoreEnvironmentProxy(); | ||
return blobServiceClient; | ||
@@ -209,3 +224,3 @@ }; | ||
} | ||
}); | ||
}); | ||
} catch (err) { | ||
@@ -222,3 +237,2 @@ if (err['statusCode'] === 403 && detectAzureTokenExpireError(err)) { | ||
} | ||
meta['dstFileSize'] = meta['uploadSize']; | ||
@@ -270,3 +284,2 @@ meta['resultStatus'] = resultStatus.UPLOADED; | ||
} | ||
meta['resultStatus'] = resultStatus.DOWNLOADED; | ||
@@ -291,3 +304,2 @@ }; | ||
} | ||
module.exports = AzureUtil; |
@@ -15,3 +15,25 @@ /* | ||
const BASE64 = 'base64'; | ||
const DEFAULT_AAD = Buffer.from(''); | ||
const AUTH_TAG_LENGTH_IN_BYTES = 16; | ||
const AES_CBC = { | ||
cipherName: function (keySizeInBytes) { | ||
return `aes-${keySizeInBytes * 8}-cbc`; | ||
}, | ||
ivSize: 16 | ||
}; | ||
const AES_ECB = { | ||
cipherName: function (keySizeInBytes) { | ||
return `aes-${keySizeInBytes * 8}-ecb`; | ||
} | ||
}; | ||
const AES_GCM = { | ||
cipherName: function (keySizeInBytes) { | ||
return `aes-${keySizeInBytes * 8}-gcm`; | ||
}, | ||
ivSize: 12 | ||
}; | ||
// Material Descriptor | ||
@@ -27,18 +49,13 @@ function MaterialDescriptor(smkId, queryId, keySize) { | ||
// Encryption Material | ||
function EncryptionMetadata(key, iv, matDesc) { | ||
function EncryptionMetadata(key, dataIv, matDesc, keyIv, dataAad, keyAad) { | ||
return { | ||
'key': key, | ||
'iv': iv, | ||
'matDesc': matDesc | ||
'iv': dataIv, | ||
'matDesc': matDesc, | ||
'keyIv': keyIv, | ||
'dataAad': dataAad, | ||
'keyAad': keyAad | ||
}; | ||
} | ||
function aesCbc(keySizeInBytes) { | ||
return `aes-${keySizeInBytes * 8}-cbc`; | ||
} | ||
function aesEcb(keySizeInBytes) { | ||
return `aes-${keySizeInBytes * 8}-ecb`; | ||
} | ||
exports.EncryptionMetadata = EncryptionMetadata; | ||
@@ -102,2 +119,3 @@ | ||
const crypto = typeof encrypt !== 'undefined' ? encrypt : require('crypto'); | ||
// TODO: SNOW-1814883: Replace 'fs' with 'fs/promises' | ||
const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); | ||
@@ -131,123 +149,137 @@ const tmp = typeof temp !== 'undefined' ? temp : new TempFileGenerator(); | ||
function createEncryptionMetadata(encryptionMaterial, keySize, encryptedKey, dataIv, keyIv = null, dataAad = null, keyAad = null) { | ||
const matDesc = new MaterialDescriptor( | ||
encryptionMaterial.smkId, | ||
encryptionMaterial.queryId, | ||
keySize * 8 | ||
); | ||
return new EncryptionMetadata( | ||
encryptedKey.toString(BASE64), | ||
dataIv.toString(BASE64), | ||
matDescToUnicode(matDesc), | ||
keyIv ? keyIv.toString(BASE64) : null, | ||
dataAad ? dataAad.toString(BASE64) : null, | ||
keyAad ? keyAad.toString(BASE64) : null | ||
); | ||
} | ||
/** | ||
* Encrypt file stream using AES algorithm. | ||
* | ||
* @param {Object} encryptionMaterial | ||
* @param {String} fileStream | ||
* @param {String} tmpDir | ||
* @param {Number} chunkSize | ||
* | ||
* @returns {Object} | ||
*/ | ||
this.encryptFileStream = async function (encryptionMaterial, fileStream) { | ||
// Get decoded key from base64 encoded value | ||
const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKey.length; | ||
* Encrypt content using AES-CBC algorithm. | ||
*/ | ||
this.encryptFileStream = async function (encryptionMaterial, content) { | ||
return this.encryptDataCBC(encryptionMaterial, content); | ||
}; | ||
// Get secure random bytes with block size | ||
const ivData = getSecureRandom(blockSize); | ||
this.encryptDataCBC = function (encryptionMaterial, data) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKek.length; | ||
const dataIv = getSecureRandom(AES_CBC.ivSize); | ||
const fileKey = getSecureRandom(keySize); | ||
// Create cipher with file key, AES CBC, and iv data | ||
let cipher = crypto.createCipheriv(aesCbc(keySize), fileKey, ivData); | ||
const encrypted = cipher.update(fileStream); | ||
const final = cipher.final(); | ||
const encryptedData = Buffer.concat([encrypted, final]); | ||
const dataCipher = crypto.createCipheriv(AES_CBC.cipherName(keySize), fileKey, dataIv); | ||
const encryptedData = performCrypto(dataCipher, data); | ||
// Create key cipher with decoded key and AES ECB | ||
cipher = crypto.createCipheriv(aesEcb(keySize), decodedKey, null); | ||
const keyCipher = crypto.createCipheriv(AES_ECB.cipherName(keySize), decodedKek, null); | ||
const encryptedKey = performCrypto(keyCipher, fileKey); | ||
// Encrypt with file key | ||
const encKek = Buffer.concat([ | ||
cipher.update(fileKey), | ||
cipher.final() | ||
]); | ||
return { | ||
encryptionMetadata: createEncryptionMetadata(encryptionMaterial, keySize, encryptedKey, dataIv), | ||
dataStream: encryptedData | ||
}; | ||
}; | ||
const matDesc = MaterialDescriptor( | ||
encryptionMaterial.smkId, | ||
encryptionMaterial.queryId, | ||
keySize * 8 | ||
); | ||
//TODO: SNOW-940981: Add proper usage when feature is ready | ||
this.encryptDataGCM = function (encryptionMaterial, data) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKek.length; | ||
const metadata = EncryptionMetadata( | ||
encKek.toString(BASE64), | ||
ivData.toString(BASE64), | ||
matDescToUnicode(matDesc) | ||
); | ||
const dataIv = getSecureRandom(AES_GCM.ivSize); | ||
const fileKey = getSecureRandom(keySize); | ||
const encryptedData = this.encryptGCM(data, fileKey, dataIv, DEFAULT_AAD); | ||
const keyIv = getSecureRandom(AES_GCM.ivSize); | ||
const encryptedKey = this.encryptGCM(fileKey, decodedKek, keyIv, DEFAULT_AAD); | ||
return { | ||
encryptionMetadata: metadata, | ||
encryptionMetadata: createEncryptionMetadata(encryptionMaterial, keySize, encryptedKey, dataIv, keyIv, DEFAULT_AAD, DEFAULT_AAD), | ||
dataStream: encryptedData | ||
}; | ||
}; | ||
this.encryptGCM = function (data, key, iv, aad) { | ||
const cipher = crypto.createCipheriv(AES_GCM.cipherName(key.length), key, iv, { authTagLength: AUTH_TAG_LENGTH_IN_BYTES }); | ||
if (aad) { | ||
cipher.setAAD(aad); | ||
} | ||
const encryptedData = performCrypto(cipher, data); | ||
return Buffer.concat([encryptedData, cipher.getAuthTag()]); | ||
}; | ||
this.decryptGCM = function (data, key, iv, aad) { | ||
const decipher = crypto.createDecipheriv(AES_GCM.cipherName(key.length), key, iv, { authTagLength: AUTH_TAG_LENGTH_IN_BYTES }); | ||
if (aad) { | ||
decipher.setAAD(aad); | ||
} | ||
// last 16 bytes of data is the authentication tag | ||
const authTag = data.slice(data.length - AUTH_TAG_LENGTH_IN_BYTES, data.length); | ||
const cipherText = data.slice(0, data.length - AUTH_TAG_LENGTH_IN_BYTES); | ||
decipher.setAuthTag(authTag); | ||
return performCrypto(decipher, cipherText); | ||
}; | ||
/** | ||
* Encrypt file using AES algorithm. | ||
* | ||
* @param {Object} encryptionMaterial | ||
* @param {String} inFileName | ||
* @param {String} tmpDir | ||
* @param {Number} chunkSize | ||
* | ||
* @returns {Object} | ||
*/ | ||
this.encryptFile = async function (encryptionMaterial, inFileName, | ||
* Encrypt file using AES algorithm. | ||
*/ | ||
this.encryptFile = async function (encryptionMaterial, inputFilePath, | ||
tmpDir = null, chunkSize = blockSize * 4 * 1024) { | ||
// Get decoded key from base64 encoded value | ||
const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKey.length; | ||
return await this.encryptFileCBC(encryptionMaterial, inputFilePath, tmpDir, chunkSize); | ||
}; | ||
// Get secure random bytes with block size | ||
const ivData = getSecureRandom(blockSize); | ||
this.encryptFileCBC = async function (encryptionMaterial, inputFilePath, | ||
tmpDir = null, chunkSize = blockSize * 4 * 1024) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKek.length; | ||
const dataIv = getSecureRandom(AES_CBC.ivSize); | ||
const fileKey = getSecureRandom(keySize); | ||
const dataCipher = crypto.createCipheriv(AES_CBC.cipherName(keySize), fileKey, dataIv); | ||
const encryptedFilePath = await performFileStreamCrypto(dataCipher, tmpDir, inputFilePath, chunkSize); | ||
// Create cipher with file key, AES CBC, and iv data | ||
let cipher = crypto.createCipheriv(aesCbc(keySize), fileKey, ivData); | ||
const keyCipher = crypto.createCipheriv(AES_ECB.cipherName(keySize), decodedKek, null); | ||
const encryptedKey = performCrypto(keyCipher, fileKey); | ||
// Create temp file | ||
const tmpobj = tmp.fileSync({ dir: tmpDir, prefix: path.basename(inFileName) + '#' }); | ||
const tempOutputFileName = tmpobj.name; | ||
const tempFd = tmpobj.fd; | ||
return { | ||
encryptionMetadata: createEncryptionMetadata(encryptionMaterial, keySize, encryptedKey, dataIv), | ||
dataFile: encryptedFilePath | ||
}; | ||
}; | ||
await new Promise(function (resolve) { | ||
const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); | ||
const outfile = fs.createWriteStream(tempOutputFileName); | ||
//TODO: SNOW-940981: Add proper usage when feature is ready | ||
this.encryptFileGCM = async function (encryptionMaterial, inputFilePath, tmpDir = null) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
infile.on('data', function (chunk) { | ||
// Encrypt chunk using cipher | ||
const encrypted = cipher.update(chunk); | ||
// Write to temp file | ||
outfile.write(encrypted); | ||
const dataIv = getSecureRandom(AES_GCM.ivSize); | ||
const fileKey = getSecureRandom(decodedKek.length); | ||
const fileContent = await new Promise((resolve, reject) => { | ||
fs.readFile(inputFilePath, (err, data) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(data); | ||
} | ||
}); | ||
infile.on('close', function () { | ||
outfile.write(cipher.final()); | ||
outfile.close(resolve); | ||
}); | ||
}); | ||
// Create key cipher with decoded key and AES ECB | ||
cipher = crypto.createCipheriv(aesEcb(keySize), decodedKey, null); | ||
const encryptedData = this.encryptGCM(fileContent, fileKey, dataIv, DEFAULT_AAD); | ||
const encryptedFilePath = await writeContentToFile(tmpDir, path.basename(inputFilePath) + '#', encryptedData); | ||
// Encrypt with file key | ||
const encKek = Buffer.concat([ | ||
cipher.update(fileKey), | ||
cipher.final() | ||
]); | ||
const keyIv = getSecureRandom(AES_GCM.ivSize); | ||
const encryptedKey = this.encryptGCM(fileKey, decodedKek, keyIv, DEFAULT_AAD); | ||
const matDesc = MaterialDescriptor( | ||
encryptionMaterial.smkId, | ||
encryptionMaterial.queryId, | ||
keySize * 8 | ||
); | ||
const metadata = EncryptionMetadata( | ||
encKek.toString(BASE64), | ||
ivData.toString(BASE64), | ||
matDescToUnicode(matDesc) | ||
); | ||
// Close temp file | ||
fs.closeSync(tempFd); | ||
return { | ||
encryptionMetadata: metadata, | ||
dataFile: tempOutputFileName | ||
encryptionMetadata: createEncryptionMetadata(encryptionMaterial, fileKey.length, encryptedKey, dataIv, keyIv, DEFAULT_AAD, DEFAULT_AAD), | ||
dataFile: encryptedFilePath | ||
}; | ||
@@ -257,79 +289,120 @@ }; | ||
/** | ||
* Decrypt file using AES algorithm. | ||
* | ||
* @param {Object} encryptionMaterial | ||
* @param {String} inFileName | ||
* @param {String} tmpDir | ||
* @param {Number} chunkSize | ||
* | ||
* @returns {String} | ||
*/ | ||
this.decryptFile = async function (metadata, encryptionMaterial, inFileName, | ||
* Decrypt file using AES algorithm. | ||
*/ | ||
this.decryptFile = async function (metadata, encryptionMaterial, inputFilePath, | ||
tmpDir = null, chunkSize = blockSize * 4 * 1024) { | ||
// Get key and iv from metadata | ||
const keyBase64 = metadata.key; | ||
const ivBase64 = metadata.iv; | ||
return await this.decryptFileCBC(metadata, encryptionMaterial, inputFilePath, tmpDir, chunkSize); | ||
}; | ||
// Get decoded key from base64 encoded value | ||
const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keySize = decodedKey.length; | ||
this.decryptFileCBC = async function (metadata, encryptionMaterial, inputFilePath, | ||
tmpDir = null, chunkSize = blockSize * 4 * 1024) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keyBytes = new Buffer.from(metadata.key, BASE64); | ||
const ivBytes = new Buffer.from(metadata.iv, BASE64); | ||
const keyDecipher = crypto.createDecipheriv(AES_ECB.cipherName(decodedKek.length), decodedKek, null); | ||
const fileKey = performCrypto(keyDecipher, keyBytes); | ||
// Get key bytes and iv bytes from base64 encoded value | ||
const keyBytes = new Buffer.from(keyBase64, BASE64); | ||
const ivBytes = new Buffer.from(ivBase64, BASE64); | ||
const dataDecipher = crypto.createDecipheriv(AES_CBC.cipherName(fileKey.length), fileKey, ivBytes); | ||
return await performFileStreamCrypto(dataDecipher, tmpDir, inputFilePath, chunkSize); | ||
}; | ||
// Create temp file | ||
let tempOutputFileName; | ||
let tempFd; | ||
await new Promise((resolve, reject) => { | ||
tmp.file({ dir: tmpDir, prefix: path.basename(inFileName) + '#' }, (err, path, fd) => { | ||
//TODO: SNOW-940981: Add proper usage when feature is ready | ||
this.decryptFileGCM = async function (metadata, encryptionMaterial, inputFilePath, tmpDir = null) { | ||
const decodedKek = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); | ||
const keyBytes = new Buffer.from(metadata.key, BASE64); | ||
const keyIvBytes = new Buffer.from(metadata.keyIv, BASE64); | ||
const dataIvBytes = new Buffer.from(metadata.iv, BASE64); | ||
const dataAadBytes = new Buffer.from(metadata.dataAad, BASE64); | ||
const keyAadBytes = new Buffer.from(metadata.keyAad, BASE64); | ||
const fileKey = this.decryptGCM(keyBytes, decodedKek, keyIvBytes, keyAadBytes); | ||
const fileContent = await new Promise((resolve, reject) => { | ||
fs.readFile(inputFilePath, (err, data) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(data); | ||
} | ||
tempOutputFileName = path; | ||
tempFd = fd; | ||
resolve(); | ||
}); | ||
}); | ||
// Create key decipher with decoded key and AES ECB | ||
let decipher = crypto.createDecipheriv(aesEcb(keySize), decodedKey, null); | ||
const fileKey = Buffer.concat([ | ||
decipher.update(keyBytes), | ||
decipher.final() | ||
]); | ||
const decryptedData = this.decryptGCM(fileContent, fileKey, dataIvBytes, dataAadBytes); | ||
return await writeContentToFile(tmpDir, path.basename(inputFilePath) + '#', decryptedData); | ||
}; | ||
function performCrypto(cipherOrDecipher, data) { | ||
const encryptedOrDecrypted = cipherOrDecipher.update(data); | ||
const final = cipherOrDecipher.final(); | ||
return Buffer.concat([encryptedOrDecrypted, final]); | ||
} | ||
// Create decipher with file key, iv bytes, and AES CBC | ||
decipher = crypto.createDecipheriv(aesCbc(keySize), fileKey, ivBytes); | ||
async function performFileStreamCrypto(cipherOrDecipher, tmpDir, inputFilePath, chunkSize) { | ||
const outputFile = await new Promise((resolve, reject) => { | ||
tmp.file({ dir: tmpDir, prefix: path.basename(inputFilePath) + '#' }, (err, path, fd) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve({ path, fd }); | ||
} | ||
}); | ||
}); | ||
await new Promise(function (resolve) { | ||
const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); | ||
const outfile = fs.createWriteStream(tempOutputFileName); | ||
const inputStream = fs.createReadStream(inputFilePath, { highWaterMark: chunkSize }); | ||
const outputStream = fs.createWriteStream(outputFile.path); | ||
infile.on('data', function (chunk) { | ||
// Dncrypt chunk using decipher | ||
const decrypted = decipher.update(chunk); | ||
// Write to temp file | ||
outfile.write(decrypted); | ||
inputStream.on('data', function (chunk) { | ||
const encrypted = cipherOrDecipher.update(chunk); | ||
outputStream.write(encrypted); | ||
}); | ||
infile.on('close', function () { | ||
outfile.write(decipher.final()); | ||
outfile.close(resolve); | ||
inputStream.on('close', function () { | ||
outputStream.write(cipherOrDecipher.final()); | ||
outputStream.close(resolve); | ||
}); | ||
}); | ||
// Close temp file | ||
await new Promise((resolve, reject) => { | ||
fs.close(tempFd, (err) => { | ||
fs.close(outputFile.fd, (err) => { | ||
if (err) { | ||
reject(err); | ||
reject(err); | ||
} else { | ||
resolve(); | ||
} | ||
resolve(); | ||
}); | ||
}); | ||
return outputFile.path; | ||
} | ||
return tempOutputFileName; | ||
}; | ||
async function writeContentToFile(tmpDir, prefix, content,) { | ||
const outputFile = await new Promise((resolve, reject) => { | ||
tmp.file({ dir: tmpDir, prefix: prefix }, (err, path, fd) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve({ path, fd }); | ||
} | ||
}); | ||
}); | ||
await new Promise((resolve, reject) => { | ||
fs.writeFile(outputFile.path, content, err => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(); | ||
} | ||
}); | ||
}); | ||
await new Promise((resolve, reject) => { | ||
fs.close(outputFile.fd, (err) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(); | ||
} | ||
}); | ||
}); | ||
return outputFile.path; | ||
} | ||
} | ||
exports.EncryptUtil = EncryptUtil; |
@@ -72,4 +72,11 @@ /* | ||
const storage = new Storage({ interceptors_: interceptors }); | ||
//TODO: SNOW-1789759 hardcoded region will be replaced in the future | ||
const isRegionalUrlEnabled = (stageInfo.region).toLowerCase() === 'me-central2' || stageInfo.useRegionalUrl; | ||
let endPoint = null; | ||
if (stageInfo['endPoint']) { | ||
endPoint = stageInfo['endPoint']; | ||
} else if (isRegionalUrlEnabled) { | ||
endPoint = `storage.${stageInfo.region.toLowerCase()}.rep.googleapis.com`; | ||
} | ||
const storage = endPoint ? new Storage({ interceptors_: interceptors, apiEndpoint: endPoint }) : new Storage({ interceptors_: interceptors }); | ||
client = { gcsToken: gcsToken, gcsClient: storage }; | ||
@@ -76,0 +83,0 @@ } else { |
@@ -47,3 +47,3 @@ /* | ||
} else if (type === 'AZURE') { | ||
return new SnowflakeAzureUtil(); | ||
return new SnowflakeAzureUtil(connectionConfig); | ||
} else if (type === 'GCS') { | ||
@@ -50,0 +50,0 @@ return new SnowflakeGCSUtil(); |
@@ -5,3 +5,3 @@ /* | ||
const { NodeHttpHandler } = require('@aws-sdk/node-http-handler'); | ||
const { NodeHttpHandler } = require('@smithy/node-http-handler'); | ||
const EncryptionMetadata = require('./encrypt_util').EncryptionMetadata; | ||
@@ -11,5 +11,3 @@ const FileHeader = require('./file_util').FileHeader; | ||
const getProxyAgent = require('../http/node').getProxyAgent; | ||
const Util = require('../util'); | ||
const Logger = require('../logger'); | ||
const GlobalConfig = require('../global_config'); | ||
const ProxyUtil = require('../proxy_util'); | ||
@@ -59,8 +57,15 @@ const AMZ_IV = 'x-amz-iv'; | ||
const securityToken = stageCredentials['AWS_TOKEN']; | ||
const isRegionalUrlEnabled = stageInfo.useRegionalUrl || stageInfo.useS3RegionalUrl; | ||
// if GS sends us an endpoint, it's likely for FIPS. Use it. | ||
let endPoint = null; | ||
if (stageInfo['endPoint']) { | ||
endPoint = 'https://' + stageInfo['endPoint']; | ||
endPoint = `https://${stageInfo['endPoint']}`; | ||
} else { | ||
if (stageInfo.region && isRegionalUrlEnabled) { | ||
const domainSuffixForRegionalUrl = (stageInfo.region).toLowerCase().startsWith('cn-') ? 'amazonaws.com.cn' : 'amazonaws.com'; | ||
endPoint = `https://s3.${stageInfo.region}.${domainSuffixForRegionalUrl}`; | ||
} | ||
} | ||
const config = { | ||
@@ -78,11 +83,5 @@ apiVersion: '2006-03-01', | ||
let proxy = connectionConfig.getProxy(); | ||
if (!proxy && GlobalConfig.isEnvProxyActive()) { | ||
proxy = Util.getProxyFromEnv(); | ||
if (proxy) { | ||
Logger.getInstance().debug(`S3 Util loads the proxy info from the environment variable host: ${proxy.host}`); | ||
} | ||
} | ||
const proxy = ProxyUtil.getProxy(connectionConfig.getProxy(), 'S3 Util'); | ||
if (proxy) { | ||
const proxyAgent = getProxyAgent(proxy, new URL(connectionConfig.accessUrl), SNOWFLAKE_S3_DESTINATION); | ||
const proxyAgent = getProxyAgent(proxy, new URL(connectionConfig.accessUrl), endPoint || SNOWFLAKE_S3_DESTINATION); | ||
config.requestHandler = new NodeHttpHandler({ | ||
@@ -89,0 +88,0 @@ httpAgent: proxyAgent, |
@@ -14,23 +14,23 @@ /* | ||
let insecureConnect = false; | ||
let disableOCSPChecks = false; | ||
/** | ||
* Updates the value of the 'insecureConnect' parameter. | ||
* Updates the value of the 'disableOCSPChecks' parameter. | ||
* | ||
* @param {boolean} value | ||
*/ | ||
exports.setInsecureConnect = function (value) { | ||
exports.setDisableOCSPChecks = function (value) { | ||
// validate input | ||
Errors.assertInternal(Util.isBoolean(value)); | ||
insecureConnect = value; | ||
disableOCSPChecks = value; | ||
}; | ||
/** | ||
* Returns the value of the 'insecureConnect' parameter. | ||
* Returns the value of the 'disableOCSPChecks' parameter. | ||
* | ||
* @returns {boolean} | ||
*/ | ||
exports.isInsecureConnect = function () { | ||
return insecureConnect; | ||
exports.isOCSPChecksDisabled = function () { | ||
return disableOCSPChecks; | ||
}; | ||
@@ -75,3 +75,3 @@ | ||
exports.getOcspMode = function () { | ||
if (insecureConnect) { | ||
if (disableOCSPChecks) { | ||
return ocspModes.INSECURE; | ||
@@ -78,0 +78,0 @@ } else if (!ocspFailOpen) { |
@@ -8,4 +8,6 @@ /* | ||
const Logger = require('../logger'); | ||
const ExecutionTimer = require('../logger/execution_timer'); | ||
const axios = require('axios'); | ||
const URL = require('node:url').URL; | ||
const requestUtil = require('./request_util'); | ||
@@ -22,2 +24,4 @@ const DEFAULT_REQUEST_TIMEOUT = 360000; | ||
// save the connection config | ||
Logger.getInstance().trace('Initializing base HttpClient with Connection Config[%s]', | ||
connectionConfig.describeIdentityAttributes()); | ||
this._connectionConfig = connectionConfig; | ||
@@ -34,19 +38,30 @@ } | ||
HttpClient.prototype.request = function (options) { | ||
let request; | ||
Logger.getInstance().trace('Request%s - preparing for sending.', requestUtil.describeRequestFromOptions(options)); | ||
let requestPromise; | ||
const requestOptions = prepareRequestOptions.call(this, options); | ||
let sendRequest = async function sendRequest() { | ||
request = axios.request(requestOptions).then(response => { | ||
Logger.getInstance().trace('Request%s - sending.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
const timer = new ExecutionTimer().start(); | ||
requestPromise = axios.request(requestOptions).then(response => { | ||
const httpResponseTime = timer.getDuration(); | ||
Logger.getInstance().debug('Request%s - response received after %s milliseconds with status %s.', requestUtil.describeRequestFromOptions(requestOptions), httpResponseTime, response.status); | ||
sanitizeAxiosResponse(response); | ||
if (Util.isFunction(options.callback)) { | ||
Logger.getInstance().trace('Request%s - calling callback function.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
return options.callback(null, normalizeResponse(response), response.data); | ||
} else { | ||
Logger.getInstance().trace(`Callback function was not provided for the call to ${options.url}`); | ||
Logger.getInstance().trace('Request%s - callback function was not provided.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
return null; | ||
} | ||
}).catch(err => { | ||
const httpResponseTime = timer.getDuration(); | ||
Logger.getInstance().debug('Request%s - failed after %s milliseconds.', requestUtil.describeRequestFromOptions(requestOptions), httpResponseTime); | ||
sanitizeAxiosError(err); | ||
if (Util.isFunction(options.callback)) { | ||
if (err.response) { // axios returns error for not 2xx responses - let's unwrap it | ||
Logger.getInstance().trace('Request%s - calling callback function for error from response. Received code: ', requestUtil.describeRequestFromOptions(requestOptions), err.response.status); | ||
options.callback(null, normalizeResponse(err.response), err.response.data); | ||
} else { | ||
Logger.getInstance().trace('Request%s - calling callback function for error without response.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
options.callback(err, normalizeResponse(null), null); | ||
@@ -56,2 +71,3 @@ } | ||
} else { | ||
Logger.getInstance().warn('Request%s - callback function was not provided. Error will be re-raised.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
throw err; | ||
@@ -63,3 +79,3 @@ } | ||
Logger.getInstance().trace(`CALL ${requestOptions.method} with timeout ${requestOptions.timeout}: ${requestOptions.url}`); | ||
Logger.getInstance().trace('Request%s - issued for the next tick.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
process.nextTick(sendRequest); | ||
@@ -71,4 +87,6 @@ | ||
abort: function () { | ||
if (request) { | ||
request.abort(); | ||
if (requestPromise) { | ||
Logger.getInstance().trace('Request%s - aborting.', requestUtil.describeRequestFromOptions(requestOptions)); | ||
// TODO: This line won't work - promise has no method called abort | ||
requestPromise.abort(); | ||
} | ||
@@ -87,12 +105,17 @@ } | ||
HttpClient.prototype.requestAsync = async function (options) { | ||
Logger.getInstance().trace('Request%s - preparing for async sending.', requestUtil.describeRequestFromOptions(options)); | ||
const timer = new ExecutionTimer(); | ||
try { | ||
const requestOptions = prepareRequestOptions.call(this, options); | ||
timer.start(); | ||
const response = await axios.request(requestOptions); | ||
if (Util.isString(response['data']) && | ||
response['headers']['content-type'] === 'application/json') { | ||
response['data'] = JSON.parse(response['data']); | ||
} | ||
const httpResponseTime = timer.getDuration(); | ||
Logger.getInstance().debug('Request%s - response received after %s milliseconds with status %s.', requestUtil.describeRequestFromOptions(requestOptions), httpResponseTime, response.status); | ||
parseResponseData(response); | ||
sanitizeAxiosResponse(response); | ||
return response; | ||
} catch (err) { | ||
const httpResponseTime = timer.getDuration(); | ||
Logger.getInstance().debug('Request%s - failed after %s milliseconds. Error will be re-raised.', requestUtil.describeRequestFromOptions(options), httpResponseTime); | ||
sanitizeAxiosError(err); | ||
@@ -103,2 +126,14 @@ throw err; | ||
function parseResponseData(response) { | ||
Logger.getInstance().trace('Request%s - parsing response data.', requestUtil.describeRequestFromResponse(response)); | ||
parseIfJSONData(response); | ||
} | ||
function parseIfJSONData(response) { | ||
if (Util.isString(response['data']) && | ||
response['headers']['content-type'] === 'application/json') { | ||
response['data'] = JSON.parse(response['data']); | ||
} | ||
} | ||
/** | ||
@@ -195,2 +230,3 @@ * Issues an HTTP POST request. | ||
function sanitizeAxiosResponse(response) { | ||
Logger.getInstance().trace('Request%s - sanitizing response data.', requestUtil.describeRequestFromResponse(response)); | ||
response.request = undefined; | ||
@@ -207,2 +243,3 @@ if (response.config) { | ||
if (error.response) { | ||
Logger.getInstance().trace('Request%s - sanitizing response error data.', requestUtil.describeRequestFromResponse(error.response)); | ||
sanitizeAxiosResponse(error.response); | ||
@@ -213,2 +250,3 @@ } | ||
function prepareRequestOptions(options) { | ||
Logger.getInstance().trace('Request%s - constructing options.', requestUtil.describeRequestFromOptions(options)); | ||
const headers = normalizeHeaders(options.headers) || {}; | ||
@@ -229,4 +267,7 @@ | ||
headers['Content-Encoding'] = 'gzip'; | ||
Logger.getInstance().debug('Request%s - original buffer length: %d bytes. Compressed buffer length: %d bytes.', requestUtil.describeRequestFromOptions(options), bufferUncompressed.buffer.byteLength, bufferCompressed.buffer.byteLength); | ||
} else { | ||
Logger.getInstance().warn('Could not compress request data.'); | ||
// Logging 'err' variable value should not be done, since it may contain compressed customer's data. | ||
// It can be added only for debugging purposes. | ||
Logger.getInstance().warn('Request%s - could not compress request data.', requestUtil.describeRequestFromOptions(options)); | ||
} | ||
@@ -269,2 +310,3 @@ }); | ||
Logger.getInstance().debug('Request%s - options - timeout: %s, retryDelay: %s, responseType: %s', requestUtil.describeRequestFromOptions(options), requestOptions.timeout, requestOptions.retryDelay, requestOptions.responseType); | ||
return requestOptions; | ||
@@ -282,6 +324,5 @@ } | ||
function normalizeHeaders(headers) { | ||
let ret = headers; | ||
Logger.getInstance().trace('Normalizing headers'); | ||
if (Util.isObject(headers)) { | ||
ret = { | ||
const normalizedHeaders = { | ||
'user-agent': Util.userAgent | ||
@@ -305,11 +346,15 @@ }; | ||
(headerNameLowerCase === 'content-type')) { | ||
ret[headerNameLowerCase] = headers[headerName]; | ||
normalizedHeaders[headerNameLowerCase] = headers[headerName]; | ||
} else { | ||
ret[headerName] = headers[headerName]; | ||
normalizedHeaders[headerName] = headers[headerName]; | ||
} | ||
} | ||
} | ||
Logger.getInstance().trace('Headers were normalized'); | ||
return normalizedHeaders; | ||
} else { | ||
Logger.getInstance().trace('Headers were not an object. Original value will be returned.'); | ||
return headers; | ||
} | ||
return ret; | ||
} | ||
@@ -329,2 +374,3 @@ | ||
if (response && !response.getResponseHeader) { | ||
Logger.getInstance().trace('Request%s - normalizing.', requestUtil.describeRequestFromResponse(response)); | ||
response.getResponseHeader = function (header) { | ||
@@ -342,2 +388,2 @@ return response.headers && response.headers[ | ||
return response; | ||
} | ||
} |
@@ -8,2 +8,3 @@ /* | ||
const Base = require('./base'); | ||
const Logger = require('../logger'); | ||
@@ -17,2 +18,4 @@ /** | ||
function BrowserHttpClient(connectionConfig) { | ||
Logger.getInstance().trace('Initializing BrowserHttpClient with Connection Config[%s]', | ||
connectionConfig.describeIdentityAttributes()); | ||
Base.apply(this, [connectionConfig]); | ||
@@ -19,0 +22,0 @@ } |
@@ -6,2 +6,3 @@ /* | ||
const Util = require('../util'); | ||
const ProxyUtil = require('../proxy_util'); | ||
const Base = require('./base'); | ||
@@ -13,19 +14,22 @@ const HttpsAgent = require('../agent/https_ocsp_agent'); | ||
const Logger = require('../logger'); | ||
const RequestUtil = require('../http/request_util'); | ||
/** | ||
* Returns the delay time calculated by exponential backoff with | ||
* decorrelated jitter. | ||
* for more details, check out: | ||
* decorrelated jitter. For more details, check out: | ||
* http://www.awsarchitectureblog.com/2015/03/backoff.html | ||
* @param base minimum seconds | ||
* @param cap maximum seconds | ||
* @param previousSleep previous sleep time | ||
* @return {Number} number of milliseconds to wait before retrying again the request. | ||
*/ | ||
NodeHttpClient.prototype.constructExponentialBackoffStrategy = function () { | ||
let sleep = this._connectionConfig.getRetrySfStartingSleepTime(); | ||
Logger.getInstance().trace('Calculating exponential backoff strategy'); | ||
const previousSleepTime = this._connectionConfig.getRetrySfStartingSleepTime(); | ||
// maximum seconds | ||
const cap = this._connectionConfig.getRetrySfMaxSleepTime(); | ||
// minimum seconds | ||
const base = 1; | ||
sleep = Util.nextSleepTime(base, cap, sleep); | ||
return sleep * 1000; | ||
const nextSleepTime = Util.nextSleepTime(base, cap, previousSleepTime); | ||
const nextSleepTimeInMilliseconds = nextSleepTime * 1000; | ||
Logger.getInstance().trace('Calculated exponential backoff strategy sleep time: %d', nextSleepTimeInMilliseconds); | ||
return nextSleepTimeInMilliseconds; | ||
}; | ||
@@ -40,2 +44,4 @@ | ||
function NodeHttpClient(connectionConfig) { | ||
Logger.getInstance().trace('Initializing NodeHttpClient with Connection Config[%s]', | ||
connectionConfig.describeIdentityAttributes()); | ||
Base.apply(this, [connectionConfig]); | ||
@@ -49,15 +55,17 @@ } | ||
function getFromCacheOrCreate(agentClass, options, agentId) { | ||
Logger.getInstance().trace('Agent[id: %s] - trying to retrieve from cache or create.', agentId); | ||
let agent = {}; | ||
function createAgent(agentClass, agentOptions, agentId) { | ||
Logger.getInstance().trace('Agent[id: %s] - creating a new agent instance.', agentId); | ||
const agent = agentClass(agentOptions); | ||
httpsAgentCache.set(agentId, agent); | ||
Logger.getInstance().trace(`Create and add to cache new agent ${agentId}`); | ||
Logger.getInstance().trace('Agent[id: %s] - new instance stored in cache.', agentId); | ||
// detect and log PROXY envvar + agent proxy settings | ||
const compareAndLogEnvAndAgentProxies = Util.getCompareAndLogEnvAndAgentProxies(agentOptions); | ||
Logger.getInstance().debug(`Proxy settings used in requests:${compareAndLogEnvAndAgentProxies.messages}`); | ||
const compareAndLogEnvAndAgentProxies = ProxyUtil.getCompareAndLogEnvAndAgentProxies(agentOptions); | ||
Logger.getInstance().debug('Agent[id: %s] - proxy settings used in requests: %s', agentId, compareAndLogEnvAndAgentProxies.messages); | ||
// if there's anything to warn on (e.g. both envvar + agent proxy used, and they are different) | ||
// log warnings on them | ||
if (compareAndLogEnvAndAgentProxies.warnings) { | ||
Logger.getInstance().warn(`${compareAndLogEnvAndAgentProxies.warnings}`); | ||
Logger.getInstance().warn('Agent[id: %s] - %s', agentId, compareAndLogEnvAndAgentProxies.warnings); | ||
} | ||
@@ -69,3 +77,3 @@ | ||
if (httpsAgentCache.has(agentId)) { | ||
Logger.getInstance().trace(`Get agent with id: ${agentId} from cache`); | ||
Logger.getInstance().trace('Agent[id: %s] - retrieving an agent instance from cache.', agentId); | ||
agent = httpsAgentCache.get(agentId); | ||
@@ -88,3 +96,3 @@ } else { | ||
function isBypassProxy(proxy, destination) { | ||
function isBypassProxy(proxy, destination, agentId) { | ||
if (proxy && proxy.noProxy) { | ||
@@ -97,3 +105,3 @@ const bypassList = proxy.noProxy.split('|'); | ||
if (matches) { | ||
Logger.getInstance().debug('bypassing proxy for %s', destination); | ||
Logger.getInstance().debug('Agent[id: %s] - bypassing proxy allowed for destination: %s', agentId, destination); | ||
return true; | ||
@@ -110,7 +118,8 @@ } | ||
NodeHttpClient.prototype.getAgent = function (parsedUrl, proxy, mock) { | ||
Logger.getInstance().trace('Agent[url: %s] - getting an agent instance.', RequestUtil.describeURL(parsedUrl.href)); | ||
if (!proxy && GlobalConfig.isEnvProxyActive()) { | ||
const isHttps = parsedUrl.protocol === 'https:'; | ||
proxy = Util.getProxyFromEnv(isHttps); | ||
proxy = ProxyUtil.getProxyFromEnv(isHttps); | ||
if (proxy) { | ||
Logger.getInstance().debug(`Load the proxy info from the environment variable host: ${proxy.host} in getAgent`); | ||
Logger.getInstance().debug('Agent[url: %s] - proxy info loaded from the environment variable. Proxy host: %s', RequestUtil.describeURL(parsedUrl.href), proxy.host); | ||
} | ||
@@ -122,2 +131,3 @@ } | ||
function getProxyAgent(proxyOptions, parsedUrl, destination, mock) { | ||
Logger.getInstance().trace('Agent[url: %s] - getting a proxy agent instance.', RequestUtil.describeURL(parsedUrl.href)); | ||
const agentOptions = { | ||
@@ -132,2 +142,3 @@ protocol: parsedUrl.protocol, | ||
if (mockAgent.protocol === parsedUrl.protocol) { | ||
Logger.getInstance().debug('Agent[url: %s] - the mock agent will be used.', RequestUtil.describeURL(parsedUrl.href)); | ||
return mockAgent; | ||
@@ -137,7 +148,7 @@ } | ||
const destHost = Util.getHostFromURL(destination); | ||
Logger.getInstance().debug(`The destination host is: ${destHost}`); | ||
const destHost = ProxyUtil.getHostFromURL(destination); | ||
const agentId = createAgentId(agentOptions.protocol, agentOptions.hostname, destHost, agentOptions.keepAlive); | ||
Logger.getInstance().debug('Agent[id: %s] - the destination host is: %s.', agentId, destHost); | ||
const agentId = createAgentId(agentOptions.protocol, agentOptions.hostname, destHost, agentOptions.keepAlive); | ||
const bypassProxy = isBypassProxy(proxyOptions, destination); | ||
const bypassProxy = isBypassProxy(proxyOptions, destination, agentId); | ||
let agent; | ||
@@ -148,11 +159,15 @@ const isHttps = agentOptions.protocol === 'https:'; | ||
if (proxyOptions && !bypassProxy) { | ||
Logger.getInstance().trace('Agent[id: %s] - using HTTPS agent enriched with proxy options.', agentId); | ||
enrichAgentOptionsWithProxyConfig(agentOptions, proxyOptions); | ||
agent = getFromCacheOrCreate(HttpsProxyAgent, agentOptions, agentId); | ||
} else { | ||
Logger.getInstance().trace('Agent[id: %s] - using HTTPS agent without proxy.', agentId); | ||
agent = getFromCacheOrCreate(HttpsAgent, agentOptions, agentId); | ||
} | ||
} else if (proxyOptions && !bypassProxy) { | ||
Logger.getInstance().trace('Agent[id: %s] - using HTTP agent enriched with proxy options.', agentId); | ||
enrichAgentOptionsWithProxyConfig(agentOptions, proxyOptions); | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, agentId); | ||
} else { | ||
Logger.getInstance().trace('Agent[id: %s] - using HTTP agent without proxy.', agentId); | ||
agent = getFromCacheOrCreate(HttpAgent, agentOptions, agentId); | ||
@@ -172,2 +187,2 @@ } | ||
module.exports = { NodeHttpClient, getProxyAgent, getAgentCacheSize }; | ||
module.exports = { NodeHttpClient, getProxyAgent, getAgentCacheSize, isBypassProxy }; |
@@ -67,3 +67,5 @@ /* | ||
'gim'); | ||
const CONNECTION_TOKEN_PATTERN = new RegExp(String.raw`(token|assertion content)([\'\"\s:=]+)([a-z0-9=/_\-\+]{8,})`, | ||
// Colon in the group ([a-z0-9=/:_%-+]{8,}) was added to detect tokens that contain additional details before the actual token. | ||
// Such as version or hint (token=ver:1-hint:1233-realToken...). | ||
const CONNECTION_TOKEN_PATTERN = new RegExp(String.raw`(token|assertion content)([\'\"\s:=]+)([a-z0-9=/:_\%\-\+]{8,})`, | ||
'gi'); | ||
@@ -70,0 +72,0 @@ const PASSWORD_PATTERN = new RegExp( |
@@ -68,4 +68,4 @@ /* | ||
// if we're running in DEBUG loglevel, probably we want to see the full error too | ||
const logErr = err ? JSON.stringify(err, Util.getCircularReplacer()) | ||
: `status: ${JSON.stringify(response.status)} ${JSON.stringify(response.statusText)}` | ||
const logErr = err ? JSON.stringify(err, Object.getOwnPropertyNames(err)) | ||
: `status: ${JSON.stringify(response.status)} ${JSON.stringify(response.statusText)}` | ||
+ ` headers: ${JSON.stringify(response.headers)}`; | ||
@@ -72,0 +72,0 @@ Logger.getInstance().debug('Encountered an error when getting data from cloud storage: ' + logErr); |
@@ -63,2 +63,3 @@ /* | ||
const Authenticator = require('../authentication/authentication'); | ||
const sfParams = require('../constants/sf_params'); | ||
@@ -582,6 +583,12 @@ function isRetryableNetworkError(err) { | ||
* @param {Object} httpClient | ||
* | ||
* @param {Object} auth | ||
* @returns {Object} the http request object. | ||
*/ | ||
function sendHttpRequest(requestOptions, httpClient, auth) { | ||
const params = requestOptions.params || {}; | ||
if (!requestOptions.excludeGuid) { | ||
addGuidToParams(params); | ||
} | ||
const realRequestOptions = | ||
@@ -594,2 +601,3 @@ { | ||
json: requestOptions.json, | ||
params: params, | ||
callback: async function (err, response, body) { | ||
@@ -600,3 +608,3 @@ // if we got an error, wrap it into a network error | ||
Logger.getInstance().debug('Encountered an error when sending the request. Details: ' | ||
+ JSON.stringify(err, Util.getCircularReplacer())); | ||
+ JSON.stringify(err, Object.getOwnPropertyNames(err))); | ||
@@ -686,4 +694,4 @@ err = Errors.createNetworkError( | ||
if (requestOptions.retry > 2) { | ||
const includeParam = requestOptions.url.includes('?'); | ||
realRequestOptions.url += (includeParam ? '&' : '?'); | ||
const includesParam = requestOptions.url.includes('?'); | ||
realRequestOptions.url += (includesParam ? '&' : '?'); | ||
realRequestOptions.url += | ||
@@ -711,3 +719,3 @@ ('clientStartTime=' + requestOptions.startTime | ||
/** | ||
* Creates a new Request. | ||
* Creates a new Request to Snowflake. | ||
* | ||
@@ -730,2 +738,6 @@ * @param {Object} requestOptions | ||
const params = this.requestOptions.params || {}; | ||
if (!this.requestOptions.excludeGuid) { | ||
addGuidToParams(params); | ||
} | ||
const options = | ||
@@ -736,9 +748,18 @@ { | ||
url: this.requestOptions.absoluteUrl, | ||
json: this.requestOptions.json | ||
json: this.requestOptions.json, | ||
params: params | ||
}; | ||
// issue the async http request | ||
//TODO: this should be wrapped with the same operations, as in the synchronous send method's callback. | ||
return await httpClient.requestAsync(options); | ||
}; | ||
function addGuidToParams(params) { | ||
// In case of repeated requests for the same request ID, | ||
// the Global UID is added for better traceability. | ||
const guid = uuidv4(); | ||
params[sfParams.paramsNames.SF_REQUEST_GUID] = guid; | ||
} | ||
/** | ||
@@ -776,2 +797,4 @@ * Sends out the request. | ||
requestOptions.absoluteUrl = this.buildFullUrl(requestOptions.url); | ||
requestOptions.excludeGuid = !Util.exists(requestOptions.excludeGuid) ? false : requestOptions.excludeGuid; | ||
}; | ||
@@ -1268,3 +1291,3 @@ | ||
if (!connectionConfig.isQaMode()) { | ||
// no requestId is attached to login-request in test mode. | ||
// No requestId is attached to login-request in test mode. | ||
queryStringObject.requestId = uuidv4(); | ||
@@ -1271,0 +1294,0 @@ } |
167
lib/util.js
@@ -11,3 +11,2 @@ /* | ||
const Errors = require('./errors'); | ||
const ErrorCodes = Errors.codes; | ||
@@ -622,62 +621,2 @@ /** | ||
/** | ||
* Try to get the PROXY environmental variables | ||
* On Windows, envvar name is case-insensitive, but on *nix, it's case-sensitive | ||
* | ||
* Compare them with the proxy specified on the Connection, if any | ||
* Return with the log constructed from the components detection and comparison | ||
* If there's something to warn the user about, return that too | ||
* | ||
* @param the agentOptions object from agent creation | ||
* @returns {object} | ||
*/ | ||
exports.getCompareAndLogEnvAndAgentProxies = function (agentOptions) { | ||
const envProxy = {}; | ||
const logMessages = { 'messages': '', 'warnings': '' }; | ||
envProxy.httpProxy = process.env.HTTP_PROXY || process.env.http_proxy; | ||
envProxy.httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy; | ||
envProxy.noProxy = process.env.NO_PROXY || process.env.no_proxy; | ||
envProxy.logHttpProxy = envProxy.httpProxy ? | ||
'HTTP_PROXY: ' + envProxy.httpProxy : 'HTTP_PROXY: <unset>'; | ||
envProxy.logHttpsProxy = envProxy.httpsProxy ? | ||
'HTTPS_PROXY: ' + envProxy.httpsProxy : 'HTTPS_PROXY: <unset>'; | ||
envProxy.logNoProxy = envProxy.noProxy ? | ||
'NO_PROXY: ' + envProxy.noProxy : 'NO_PROXY: <unset>'; | ||
// log PROXY envvars | ||
if (envProxy.httpProxy || envProxy.httpsProxy) { | ||
logMessages.messages = logMessages.messages + ' // PROXY environment variables: ' | ||
+ `${envProxy.logHttpProxy} ${envProxy.logHttpsProxy} ${envProxy.logNoProxy}.`; | ||
} | ||
// log proxy config on Connection, if any set | ||
if (agentOptions.host) { | ||
const proxyHostAndPort = agentOptions.host + ':' + agentOptions.port; | ||
const proxyProtocolHostAndPort = agentOptions.protocol ? | ||
' protocol=' + agentOptions.protocol + ' proxy=' + proxyHostAndPort | ||
: ' proxy=' + proxyHostAndPort; | ||
const proxyUsername = agentOptions.user ? ' user=' + agentOptions.user : ''; | ||
logMessages.messages = logMessages.messages + ` // Proxy configured in Agent:${proxyProtocolHostAndPort}${proxyUsername}`; | ||
// check if both the PROXY envvars and Connection proxy config is set | ||
// generate warnings if they are, and are also different | ||
if (envProxy.httpProxy && | ||
this.removeScheme(envProxy.httpProxy).toLowerCase() !== this.removeScheme(proxyHostAndPort).toLowerCase()) { | ||
logMessages.warnings = logMessages.warnings + ` Using both the HTTP_PROXY (${envProxy.httpProxy})` | ||
+ ` and the proxyHost:proxyPort (${proxyHostAndPort}) settings to connect, but with different values.` | ||
+ ' If you experience connectivity issues, try unsetting one of them.'; | ||
} | ||
if (envProxy.httpsProxy && | ||
this.removeScheme(envProxy.httpsProxy).toLowerCase() !== this.removeScheme(proxyHostAndPort).toLowerCase()) { | ||
logMessages.warnings = logMessages.warnings + ` Using both the HTTPS_PROXY (${envProxy.httpsProxy})` | ||
+ ` and the proxyHost:proxyPort (${proxyHostAndPort}) settings to connect, but with different values.` | ||
+ ' If you experience connectivity issues, try unsetting one of them.'; | ||
} | ||
} | ||
logMessages.messages = logMessages.messages ? logMessages.messages : ' none.'; | ||
return logMessages; | ||
}; | ||
exports.buildCredentialCacheKey = function (host, username, credType) { | ||
@@ -715,11 +654,2 @@ if (!host || !username || !credType) { | ||
/** | ||
* remove http:// or https:// from the input, e.g. used with proxy URL | ||
* @param input | ||
* @returns {string} | ||
*/ | ||
exports.removeScheme = function (input) { | ||
return input.toString().replace(/(^\w+:|^)\/\//, ''); | ||
}; | ||
exports.buildCredentialCacheKey = function (host, username, credType) { | ||
@@ -816,49 +746,2 @@ if (!host || !username || !credType) { | ||
exports.validateProxy = function (proxy) { | ||
const { host, port, noProxy, user, password } = proxy; | ||
// check for missing proxyHost | ||
Errors.checkArgumentExists(this.exists(host), | ||
ErrorCodes.ERR_CONN_CREATE_MISSING_PROXY_HOST); | ||
// check for invalid proxyHost | ||
Errors.checkArgumentValid(this.isString(host), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_PROXY_HOST); | ||
// check for missing proxyPort | ||
Errors.checkArgumentExists(this.exists(port), | ||
ErrorCodes.ERR_CONN_CREATE_MISSING_PROXY_PORT); | ||
// check for invalid proxyPort | ||
Errors.checkArgumentValid(this.isNumber(port), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_PROXY_PORT); | ||
if (this.exists(noProxy)) { | ||
// check for invalid noProxy | ||
Errors.checkArgumentValid(this.isString(noProxy), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_NO_PROXY); | ||
} | ||
if (this.exists(user) || this.exists(password)) { | ||
// check for missing proxyUser | ||
Errors.checkArgumentExists(this.exists(user), | ||
ErrorCodes.ERR_CONN_CREATE_MISSING_PROXY_USER); | ||
// check for invalid proxyUser | ||
Errors.checkArgumentValid(this.isString(user), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_PROXY_USER); | ||
// check for missing proxyPassword | ||
Errors.checkArgumentExists(this.exists(password), | ||
ErrorCodes.ERR_CONN_CREATE_MISSING_PROXY_PASS); | ||
// check for invalid proxyPassword | ||
Errors.checkArgumentValid(this.isString(password), | ||
ErrorCodes.ERR_CONN_CREATE_INVALID_PROXY_PASS); | ||
} else { | ||
delete proxy.user; | ||
delete proxy.password; | ||
} | ||
}; | ||
exports.validateEmptyString = function (value) { | ||
@@ -868,48 +751,2 @@ return value !== '' ? value : undefined; | ||
exports.getProxyFromEnv = function (isHttps = true) { | ||
const protocol = isHttps ? 'https' : 'http'; | ||
let proxyFromEnv = this.getEnvVar(`${protocol}_proxy`); | ||
if (!proxyFromEnv){ | ||
return null; | ||
} | ||
Logger.getInstance().debug(`Util.getProxyEnv: Using ${protocol.toUpperCase()}_PROXY from the environment variable`); | ||
if (proxyFromEnv.indexOf('://') === -1) { | ||
Logger.getInstance().info('Util.getProxyEnv: the protocol was missing from the environment proxy. Use the HTTP protocol.'); | ||
proxyFromEnv = 'http' + '://' + proxyFromEnv; | ||
} | ||
proxyFromEnv = new URL(proxyFromEnv); | ||
const proxy = { | ||
host: this.validateEmptyString(proxyFromEnv.hostname), | ||
port: Number(this.validateEmptyString(proxyFromEnv.port)), | ||
user: this.validateEmptyString(proxyFromEnv.username), | ||
password: this.validateEmptyString(proxyFromEnv.password), | ||
protocol: this.validateEmptyString(proxyFromEnv.protocol), | ||
noProxy: this.getNoProxyEnv(), | ||
}; | ||
this.validateProxy(proxy); | ||
return proxy; | ||
}; | ||
exports.getNoProxyEnv = function () { | ||
const noProxy = this.getEnvVar('no_proxy'); | ||
if (noProxy) { | ||
return noProxy.split(',').join('|'); | ||
} | ||
return undefined; | ||
}; | ||
exports.getHostFromURL = function (destination) { | ||
if (destination.indexOf('://') === -1) { | ||
destination = 'https' + '://' + destination; | ||
} | ||
try { | ||
return new URL(destination).hostname; | ||
} catch (err) { | ||
Logger.getInstance().error(`Failed to parse the destination to URL with the error: ${err}. Return destination as the host: ${destination}`); | ||
return destination; | ||
} | ||
}; | ||
exports.isNotEmptyAsString = function (variable) { | ||
@@ -920,2 +757,6 @@ if (typeof variable === 'string') { | ||
return exports.exists(variable); | ||
}; | ||
exports.isWindows = function () { | ||
return os.platform() === 'win32'; | ||
}; |
{ | ||
"name": "snowflake-sdk", | ||
"version": "1.15.0", | ||
"version": "2.0.0", | ||
"description": "Node.js driver for Snowflake", | ||
"dependencies": { | ||
"@aws-sdk/client-s3": "^3.388.0", | ||
"@aws-sdk/node-http-handler": "^3.374.0", | ||
"@smithy/node-http-handler": "^3.2.5", | ||
"@azure/storage-blob": "12.18.x", | ||
@@ -13,3 +13,3 @@ "@google-cloud/storage": "^7.7.0", | ||
"asn1.js-rfc5280": "^3.0.0", | ||
"axios": "^1.6.8", | ||
"axios": "^1.7.7", | ||
"big-integer": "^1.6.43", | ||
@@ -65,2 +65,3 @@ "bignumber.js": "^9.1.2", | ||
"test": "mocha -timeout 180000 --recursive --full-trace test/unit/**/*.js test/unit/*.js", | ||
"test:authentication": "mocha --exit -timeout 180000 --recursive --full-trace test/authentication/**/*.js test/authentication/*.js", | ||
"test:integration": "mocha -timeout 180000 --recursive --full-trace test/integration/**/*.js test/integration/*.js", | ||
@@ -71,5 +72,5 @@ "test:single": "mocha -timeout 180000 --full-trace", | ||
"test:unit:coverage": "nyc npm run test:unit", | ||
"test:ci": "mocha -timeout 180000 --recursive --full-trace test/**/*.js", | ||
"test:ci": "mocha -timeout 180000 --recursive --full-trace 'test/{unit,integration}/**/*.js'", | ||
"test:ci:coverage": "nyc npm run test:ci", | ||
"test:ci:withSystemTests": "mocha -timeout 180000 --recursive --full-trace test/**/*.js system_test/*.js", | ||
"test:ci:withSystemTests": "mocha -timeout 180000 --recursive --full-trace 'test/{unit,integration}/**/*.js' system_test/*.js", | ||
"test:ci:withSystemTests:coverage": "nyc npm run test:ci:withSystemTests", | ||
@@ -76,0 +77,0 @@ "test:manual": "mocha -timeout 180000 --full-trace --full-trace test/integration/testManualConnection.js" |
@@ -29,5 +29,5 @@ ******************************************************************************** | ||
This driver currently does not support GCP regional endpoints. Please ensure that any workloads using through this driver do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support. | ||
This driver starts supporting the GCS regional endpoint starting from version 2.0.0. Please ensure that any workloads using through this driver | ||
below the version 2.0.0 do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support. | ||
Test | ||
@@ -70,3 +70,3 @@ ====================================================================== | ||
To run single test file use `test:single` script, e.g. run tests in `test/unit/snowflake_test.js` only: | ||
To run a single test file use `test:single` script, e.g. run tests in `test/unit/snowflake_test.js` only: | ||
@@ -73,0 +73,0 @@ ``` |
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 2 instances in 1 package
677723
81
18386
52
+ Added@smithy/abort-controller@3.1.9(transitive)
+ Added@smithy/node-http-handler@3.3.3(transitive)
+ Added@smithy/protocol-http@4.1.8(transitive)
+ Added@smithy/querystring-builder@3.0.11(transitive)
+ Added@smithy/types@3.7.2(transitive)
+ Added@smithy/util-uri-escape@3.0.0(transitive)
- Removed@aws-sdk/node-http-handler@^3.374.0
- Removed@aws-sdk/node-http-handler@3.374.0(transitive)
- Removed@smithy/abort-controller@1.1.0(transitive)
- Removed@smithy/node-http-handler@1.1.0(transitive)
- Removed@smithy/protocol-http@1.2.0(transitive)
- Removed@smithy/querystring-builder@1.1.0(transitive)
- Removed@smithy/types@1.2.0(transitive)
- Removed@smithy/util-uri-escape@1.1.0(transitive)
Updatedaxios@^1.7.7