snowflake-sdk
Advanced tools
Comparing version 1.13.1 to 1.14.0
@@ -189,3 +189,3 @@ /* | ||
export type RowMode = "object" | "array" | "object_with_renamed_duplicated_columns"; | ||
export type LogLevel = "ERROR" | "WARN" | "INFO" | "DEBUG" | "TRACE"; | ||
export type LogLevel = "ERROR" | "WARN" | "INFO" | "DEBUG" | "TRACE" | "OFF"; | ||
export type DataType = "String" | "Boolean" | "Number" | "Date" | "JSON" | "Buffer"; | ||
@@ -301,3 +301,3 @@ export type QueryStatus = "RUNNING" | "ABORTING" | "SUCCESS" | "FAILED_WITH_ERROR" | "ABORTED" | "QUEUED" | "FAILED_WITH_INCIDENT" | "DISCONNECTED" | "RESUMING_WAREHOUSE" | "QUEUED_REPARING_WAREHOUSE" | "RESTARTED" | "BLOCKED" | "NO_DATA"; | ||
/** | ||
* Specifies the lists of hosts that the driver should connect to directly, bypassing the proxy server (e.g. *.amazonaws.com to bypass Amazon S3 access). For multiple hosts, separate the hostnames with a pipe symbol (|). | ||
* Specifies the lists of hosts that the driver should connect to directly, bypassing the proxy server (e.g. *.amazonaws.com to bypass Amazon S3 access). For multiple hosts, separate the hostnames with a pipe symbol (|). | ||
* You can also use an asterisk as a wild card. For example: noProxy: "*.amazonaws.com|*.my_company.com" | ||
@@ -479,3 +479,3 @@ */ | ||
/** | ||
* Turn on the validation function which checks whether all the connection configuration from users are valid or not. | ||
* Turn on the validation function which checks whether all the connection configuration from users are valid or not. | ||
*/ | ||
@@ -491,2 +491,12 @@ validateDefaultParameters?: boolean; | ||
/** | ||
* The option to enable the MFA token. The default value is false. | ||
*/ | ||
clientRequestMFAToken?: boolean; | ||
/** | ||
* The option to enable the SSO token. The default value is false. | ||
*/ | ||
clientStoreTemporaryCredential?: boolean; | ||
/** | ||
* The option to include the passcode from DUO into the password. | ||
@@ -499,3 +509,3 @@ */ | ||
*/ | ||
passcode?: string | ||
passcode?: string; | ||
} | ||
@@ -569,2 +579,7 @@ | ||
/** | ||
* Returns the value of the SERVICE_NAME parameter | ||
*/ | ||
getServiceName(): string; | ||
/** | ||
* Checks whether the given status is currently running. | ||
@@ -824,2 +839,7 @@ */ | ||
/** | ||
* Returns true if this column is type MAP. | ||
*/ | ||
isMap(): boolean; | ||
/** | ||
* Returns the value of this column in a row. | ||
@@ -826,0 +846,0 @@ */ |
@@ -27,2 +27,3 @@ /* | ||
const driverDirectory = getDriverDirectory(); | ||
Logger.getInstance().debug(`Detected driver directory: ${driverDirectory}`); | ||
@@ -41,2 +42,3 @@ if (driverDirectory) { | ||
const homedir = os.homedir(); | ||
Logger.getInstance().debug(`Detected home directory: ${homedir}`); | ||
@@ -51,5 +53,6 @@ if (exists(homedir)) { | ||
} else { | ||
Logger.getInstance().warn('Home directory of the user is not present'); | ||
Logger.getInstance().warn('Home directory of the user is not defined'); | ||
} | ||
Logger.getInstance().debug(`Detected default directories: ${driverDirectory}`); | ||
return directories; | ||
@@ -97,2 +100,4 @@ } | ||
if (!allLevels.includes(level)) { | ||
Logger.getInstance().error(`Tried to create unsupported log level from string: ${value}`); | ||
throw new Error('Unknown log level: ' + value); | ||
@@ -117,4 +122,7 @@ } | ||
this.getClientConfig = async function (configFilePath) { | ||
Logger.getInstance().debug('Retrieving client config'); | ||
const path = await findConfig(configFilePath); | ||
if (!exists(path) || path === '') { | ||
Logger.getInstance().info('No config file path found. Client config will not be used.'); | ||
return null; | ||
@@ -124,2 +132,3 @@ } | ||
const isFileOk = await isFileNotWritableByGroupOrOthers(path, fsPromises).catch(err => { | ||
Logger.getInstance().warn('Failed to inspect config file path permissions. Client config will not be used.'); | ||
throw new ConfigurationError('Finding client configuration failed', err); | ||
@@ -129,6 +138,10 @@ }); | ||
if (!isFileOk) { | ||
Logger.getInstance().warn(`Config file path permissions are invalid. File: ${path} can be modified by group or others. Client config will not be used.`); | ||
throw new ConfigurationError(`Configuration file: ${path} can be modified by group or others`, 'IncorrectPerms'); | ||
} | ||
Logger.getInstance().debug(`Config file path permissions are valid. Path: ${path}`); | ||
const configFileContents = await readFileConfig(path); | ||
Logger.getInstance().info('Using client configuration from path: %s', path); | ||
return configFileContents == null ? null : parseConfigFile(path, configFileContents); | ||
@@ -138,3 +151,6 @@ }; | ||
function readFileConfig(filePath) { | ||
Logger.getInstance().debug(`Reading config file. Path: ${filePath}`); | ||
if (!filePath) { | ||
Logger.getInstance().trace(`Path of config file is not specified. Nothing to read. Path: ${filePath}`); | ||
return Promise.resolve(null); | ||
@@ -144,2 +160,3 @@ } | ||
.catch(err => { | ||
Logger.getInstance().debug(`Reading configuration from the file failed. Path: ${filePath}`); | ||
throw new ConfigurationError('Finding client configuration failed', err); | ||
@@ -150,7 +167,13 @@ }); | ||
function parseConfigFile(path, configurationJson) { | ||
Logger.getInstance().debug('Parsing config file: %s', path); | ||
try { | ||
const parsedConfiguration = JSON.parse(configurationJson); | ||
Logger.getInstance().trace('Config file contains correct JSON structure. Validating the input.'); | ||
checkUnknownEntries(parsedConfiguration); | ||
validate(parsedConfiguration); | ||
return new ClientConfig( | ||
Logger.getInstance().debug('Config file contains valid configuration input.'); | ||
const clientConfig = new ClientConfig( | ||
path, | ||
@@ -162,3 +185,8 @@ new ClientLoggingConfig( | ||
); | ||
Logger.getInstance().info('Client Configuration created with Log Level: %s and Log Path: %s', clientConfig.loggingConfig.logLevel, clientConfig.loggingConfig.logPath); | ||
return clientConfig; | ||
} catch (err) { | ||
Logger.getInstance().error('Parsing client configuration failed. Used config file from path: %s', path); | ||
throw new ConfigurationError('Parsing client configuration failed', err); | ||
@@ -184,6 +212,9 @@ } | ||
if (logLevel == null) { | ||
Logger.getInstance().debug('Log level is not specified.'); | ||
return; | ||
} | ||
if (!isString(logLevel)) { | ||
throw new Error('Log level is not a string'); | ||
const errorMessage = 'Log level is not a string.'; | ||
Logger.getInstance().error(errorMessage); | ||
throw new Error(errorMessage); | ||
} | ||
@@ -196,6 +227,9 @@ levelFromString(logLevel); | ||
if (logPath == null) { | ||
Logger.getInstance().debug('Log path is not specified'); | ||
return; | ||
} | ||
if (!isString(logPath)) { | ||
throw new Error('Log path is not a string'); | ||
const errorMessage = 'Log path is not a string.'; | ||
Logger.getInstance().error(errorMessage); | ||
throw new Error(errorMessage); | ||
} | ||
@@ -213,4 +247,5 @@ } | ||
async function findConfig(filePathFromConnectionString) { | ||
Logger.getInstance().trace(`findConfig() called with param: ${filePathFromConnectionString}`); | ||
if (exists(filePathFromConnectionString)) { | ||
Logger.getInstance().info('Using client configuration path from a connection string: %s', filePathFromConnectionString); | ||
Logger.getInstance().info('Found client configuration path in a connection string. Path: %s', filePathFromConnectionString); | ||
return filePathFromConnectionString; | ||
@@ -220,3 +255,3 @@ } | ||
if (exists(filePathFromEnvVariable)) { | ||
Logger.getInstance().info('Using client configuration path from an environment variable: %s', filePathFromEnvVariable); | ||
Logger.getInstance().info('Found client configuration path in an environment variable. Path: %s', filePathFromEnvVariable); | ||
return filePathFromEnvVariable; | ||
@@ -226,6 +261,6 @@ } | ||
if (exists(fileFromDefDirs)) { | ||
Logger.getInstance().info('Using client configuration path from %s directory: %s', fileFromDefDirs.dirDescription, fileFromDefDirs.configPath); | ||
Logger.getInstance().info('Found client configuration path in %s directory. Path: %s', fileFromDefDirs.dirDescription, fileFromDefDirs.configPath); | ||
return fileFromDefDirs.configPath; | ||
} | ||
Logger.getInstance().info('No client config file found in default directories'); | ||
Logger.getInstance().info('No client config detected.'); | ||
return null; | ||
@@ -243,8 +278,11 @@ } | ||
async function searchForConfigInDefaultDirectories() { | ||
Logger.getInstance().debug(`Searching for config in default directories: ${defaultDirectories}`); | ||
for (const directory of defaultDirectories) { | ||
const configPath = await searchForConfigInDictionary(directory.dir, directory.dirDescription); | ||
if (exists(configPath)) { | ||
Logger.getInstance().debug(`Config found in the default directory: ${directory.dir}. Path: ${configPath}`); | ||
return { configPath: configPath, dirDescription: directory.dirDescription }; | ||
} | ||
} | ||
Logger.getInstance().debug('Unable to find config in any default directory.'); | ||
return null; | ||
@@ -251,0 +289,0 @@ } |
@@ -35,2 +35,3 @@ /* | ||
if (!fixedConfiguration.token) { | ||
Logger.getInstance().error('The token does not exist or has empty value.'); | ||
throw new Error('The token does not exist or has empty value'); | ||
@@ -43,10 +44,16 @@ } | ||
function loadConnectionConfiguration() { | ||
Logger.getInstance().trace('Loading connection configuration from the local files...'); | ||
const snowflakeConfigDir = defaultIfNotSet(process.env.SNOWFLAKE_HOME, path.join(os.homedir(), '.snowflake')); | ||
Logger.getInstance().trace('Looking for connection file in directory %s', snowflakeConfigDir); | ||
const filePath = path.join(snowflakeConfigDir, 'connections.toml'); | ||
const resolvedPath = fs.realpathSync(filePath); | ||
Logger.getInstance().trace('Connection configuration file found under the path %s. Validating file access.', resolvedPath); | ||
validateOnlyUserReadWritePermission(resolvedPath); | ||
const str = fs.readFileSync(resolvedPath, { encoding: 'utf8' }); | ||
const configurationChecksum = generateChecksum(str); | ||
Logger.getInstance().info('Connection configuration file is read from file: %s. Checksum: %s', resolvedPath, configurationChecksum); | ||
Logger.getInstance().info('Connection configuration file is read from path: %s. Checksum: %s', resolvedPath, configurationChecksum); | ||
Logger.getInstance().trace('Trying to parse the config file'); | ||
const parsingResult = toml.parse(str); | ||
const configurationName = defaultIfNotSet(process.env.SNOWFLAKE_DEFAULT_CONNECTION_NAME, 'default'); | ||
@@ -57,2 +64,3 @@ | ||
if (shouldReadTokenFromFile(fixedConfiguration)) { | ||
Logger.getInstance().info('Trying to read token from config file.'); | ||
readTokenFromFile(fixedConfiguration); | ||
@@ -62,2 +70,3 @@ } | ||
} else { | ||
Logger.getInstance().error('Connection configuration with name %s does not exist in the file %s', configurationName, resolvedPath); | ||
throw new Error(`Connection configuration with name ${configurationName} does not exist`); | ||
@@ -68,2 +77,3 @@ } | ||
function fixUserKey(parsingResult) { | ||
Logger.getInstance().trace('Empty Username field will be filled with \'User\' field value.'); | ||
if (parsingResult['username'] === undefined && parsingResult['user'] !== undefined){ | ||
@@ -70,0 +80,0 @@ parsingResult['username'] = parsingResult['user']; |
@@ -179,3 +179,3 @@ /* | ||
if (Parameters.getValue(Parameters.names.CLIENT_SESSION_KEEP_ALIVE)) { | ||
self.keepalive = setInterval(self.heartbeat, Parameters.getValue(Parameters.names.CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY) * 1000, self); | ||
self.keepalive = setInterval(self.heartbeat, Parameters.getValue(Parameters.names.CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY) * 1000, callback); | ||
} | ||
@@ -309,3 +309,4 @@ if (Util.isFunction(callback)) { | ||
// Get authenticator to use | ||
const auth = services.sf.getAuthenticator(); | ||
const auth = Authenticator.getAuthenticator(connectionConfig, context.getHttpClient()); | ||
services.sf.authenticator = auth; | ||
@@ -312,0 +313,0 @@ try { |
@@ -13,3 +13,7 @@ /* | ||
const SqlTypes = require('./data_types').SqlTypes; | ||
const dateTimeFormatConverter = require('./datetime_format_converter'); | ||
const bigInt = require('big-integer'); | ||
const moment = require('moment'); | ||
const momentTimezone = require('moment-timezone'); | ||
const util = require('../../util'); | ||
@@ -32,2 +36,3 @@ /** | ||
const precision = options.precision; | ||
const fieldsMetadata = options.fields; | ||
@@ -109,5 +114,6 @@ /** | ||
this.isTimestampTz = createFnIsColumnOfType(type, SqlTypes.isTimestampTz, SqlTypes); | ||
this.isVariant = createFnIsColumnOfType(type, SqlTypes.isVariant, SqlTypes); | ||
this.isObject = createFnIsColumnOfType(type, SqlTypes.isObject, SqlTypes); | ||
this.isArray = createFnIsColumnOfType(type, SqlTypes.isArray, SqlTypes); | ||
this.isVariant = createFnIsColumnOfType(type, (type) => SqlTypes.isVariant(type, fieldsMetadata), SqlTypes); | ||
this.isObject = createFnIsColumnOfType(type, (type) => SqlTypes.isObject(type, fieldsMetadata), SqlTypes); | ||
this.isArray = createFnIsColumnOfType(type, (type) => SqlTypes.isArray(type, fieldsMetadata), SqlTypes); | ||
this.isMap = createFnIsColumnOfType(type, (type) => SqlTypes.isMap(type, fieldsMetadata), SqlTypes); | ||
@@ -173,3 +179,12 @@ let convert; | ||
convert = convertRawVariant; | ||
toString = toStringFromVariant; | ||
toString = toStringFromRawValue; | ||
} else if (this.isObject()) { | ||
convert = convertRawStructuredType(convertJsonObject); | ||
toString = toStringFromRawValue; | ||
} else if (this.isArray()) { | ||
convert = convertRawStructuredType(convertJsonArray); | ||
toString = toStringFromRawValue; | ||
} else if (this.isMap()) { | ||
convert = convertRawStructuredType(convertJsonMap); | ||
toString = toStringFromRawValue; | ||
} else { | ||
@@ -190,3 +205,4 @@ // column is of type string, so leave value as is | ||
resultVersion: resultVersion, | ||
statementParameters: statementParameters | ||
statementParameters: statementParameters, | ||
fieldsMetadata: fieldsMetadata | ||
}; | ||
@@ -276,8 +292,9 @@ | ||
if ((rawColumnValue === '1') || (rawColumnValue === 'TRUE')) { | ||
if (rawColumnValue === true || (rawColumnValue === '1') || (rawColumnValue.toUpperCase() === 'TRUE')) { | ||
ret = true; | ||
} else if ((rawColumnValue === '0') || (rawColumnValue === 'FALSE')) { | ||
} else if (rawColumnValue === false || (rawColumnValue === '0') || (rawColumnValue.toUpperCase() === 'FALSE')) { | ||
ret = false; | ||
} else { | ||
throw new Error(`Value could not be converted to boolean: ${rawColumnValue}`); | ||
} | ||
return ret; | ||
@@ -287,2 +304,216 @@ } | ||
/** | ||
* Converts a raw column value of structured type object to javascript Object | ||
* | ||
* @param {Object} json | ||
* @param {Object} context | ||
* | ||
* @returns {Object} | ||
*/ | ||
function convertJsonObject(json, context) { | ||
if (context.fieldsMetadata){ | ||
context.fieldsMetadata = context.fieldsMetadata.reduce(function (map, obj) { | ||
map[obj.name] = obj; | ||
return map; | ||
}, {}); | ||
const result = {}; | ||
Object.keys(json).forEach(function (key) { | ||
const fieldMetadata = context.fieldsMetadata[key]; | ||
result[key] = mapStructuredTypeValue(json[key], context, fieldMetadata); | ||
}); | ||
return result; | ||
} else { | ||
return json; | ||
} | ||
} | ||
/** | ||
* Converts a raw column value of structured type array to javascript Object | ||
* | ||
* @param {Object} json | ||
* @param {Object} context | ||
* | ||
* @returns {Object} | ||
*/ | ||
function convertJsonArray(json, context) { | ||
if (context.fieldsMetadata) { | ||
const result = []; | ||
json.forEach(function (value) { | ||
result.push(mapStructuredTypeValue(value, context, context.fieldsMetadata[0])); | ||
}); | ||
return result; | ||
} else { | ||
return json; | ||
} | ||
} | ||
/** | ||
* Converts a raw column value of structured type map to javascript Object | ||
* | ||
* @param {Object} json | ||
* @param {Object} context | ||
* | ||
* @returns {Object} | ||
*/ | ||
function convertJsonMap(json, context) { | ||
if (Array.isArray(context.fieldsMetadata) && context.fieldsMetadata.length === 2) { | ||
const result = new Map; | ||
const keyMetadata = context.fieldsMetadata[0]; | ||
const valueMetadata = context.fieldsMetadata[1]; | ||
Object.keys(json).forEach(function (key) { | ||
const convertedKey = mapStructuredTypeValue(key, context, keyMetadata); | ||
const convertedValue = mapStructuredTypeValue(json[key], context, valueMetadata); | ||
result.set(convertedKey, convertedValue); | ||
}); | ||
return result; | ||
} else { | ||
return json; | ||
} | ||
} | ||
/** | ||
* Converts a raw column value of structured type OBJECT to javascript Object | ||
* | ||
* @param {String} rawColumnValue | ||
* @param {Object} column | ||
* @param {Object} context | ||
* | ||
* @returns {Object} | ||
*/ | ||
const convertRawStructuredType = (convertJsonFn) => (rawColumnValue, column, context) => { | ||
if (Util.string.isNotNullOrEmpty(rawColumnValue)) { | ||
try { | ||
const json = JSON.parse(rawColumnValue); | ||
return convertJsonFn(json, context); | ||
} catch (jsonParseError) { | ||
Logger.getInstance().debug('Column %s raw value cannot be parsed as JSON: %s ', column.name, jsonParseError.message); | ||
throw new Error(util.format('Column [%s] raw value cannot be parsed as JSON: %s ', column.name, jsonParseError.message)); | ||
} | ||
} else { | ||
throw new Error(util.format('Column %s raw value is null or empty ', column.name)); | ||
} | ||
}; | ||
function mapStructuredTypeValue(columnValue, context, metadataField) { | ||
const formatLtz = context.statementParameters['TIMESTAMP_LTZ_OUTPUT_FORMAT'] ?? context.statementParameters['TIMESTAMP_OUTPUT_FORMAT']; | ||
const formatTz = context.statementParameters['TIMESTAMP_TZ_OUTPUT_FORMAT'] ?? context.statementParameters['TIMESTAMP_OUTPUT_FORMAT']; | ||
const formatNtz = context.statementParameters['TIMESTAMP_NTZ_OUTPUT_FORMAT']; | ||
let value; | ||
switch (metadataField.type) { | ||
case 'text': | ||
value = columnValue; | ||
break; | ||
case 'real': | ||
value = toValueFromNumber(convertRawNumber(columnValue)); | ||
break; | ||
case 'fixed': | ||
value = toValueFromNumber(convertRawNumber(columnValue)); | ||
break; | ||
case 'boolean': | ||
value = convertRawBoolean(columnValue); | ||
break; | ||
case 'timestamp_ltz': | ||
value = convertTimestampTzString(columnValue, formatLtz, context.statementParameters['TIMEZONE'], metadataField.scale).toSfDate(); | ||
break; | ||
case 'timestamp_ntz': | ||
value = convertTimestampNtzString(columnValue, formatNtz, moment.tz.zone('UTC'), metadataField.scale).toSfDate(); | ||
break; | ||
case 'timestamp_tz': | ||
value = convertTimestampTzString(columnValue, formatTz, context.statementParameters['TIMEZONE'], metadataField.scale).toSfDate(); | ||
break; | ||
case 'date': { | ||
context.format = context.statementParameters['DATE_OUTPUT_FORMAT']; | ||
value = convertDateString(columnValue, context.format ); | ||
break; | ||
} | ||
case 'time': | ||
context.format = context.statementParameters['TIME_OUTPUT_FORMAT']; | ||
value = convertTimeString(columnValue, context.format, moment.tz.zone('UTC'), metadataField.scale).toSfTime(); | ||
break; | ||
case 'binary': | ||
context.format = context.statementParameters['BINARY_OUTPUT_FORMAT']; | ||
value = convertRawBinary(columnValue, this, context).toJSON().data; | ||
break; | ||
case 'object': { | ||
const internalContext = { | ||
convert: convertRawStructuredType(convertJsonObject), | ||
toValue: noop, | ||
toString: toString, | ||
format: toStringFromRawValue, | ||
resultVersion: context.resultVersion, | ||
statementParameters: context.statementParameters, | ||
fieldsMetadata: metadataField.fields | ||
}; | ||
value = convertJsonObject(columnValue, internalContext); | ||
break; | ||
} | ||
case 'array': { | ||
const internalArrayContext = { | ||
convert: convertRawStructuredType(convertJsonArray), | ||
toValue: noop, | ||
toString: toString, | ||
format: toStringFromRawValue, | ||
resultVersion: context.resultVersion, | ||
statementParameters: context.statementParameters, | ||
fieldsMetadata: metadataField.fields | ||
}; | ||
value = convertJsonArray(columnValue, internalArrayContext); | ||
break; | ||
} | ||
case 'map': { | ||
const internalMapContext = { | ||
convert: convertRawStructuredType(convertJsonMap), | ||
toValue: noop, | ||
toString: toString, | ||
format: toStringFromRawValue, | ||
resultVersion: context.resultVersion, | ||
statementParameters: context.statementParameters, | ||
fieldsMetadata: metadataField.fields | ||
}; | ||
value = convertJsonMap(columnValue, internalMapContext); | ||
break; | ||
} | ||
default: | ||
Logger.getInstance().info(`Column type not supported: ${context.fieldsMetadata.type}`); | ||
throw new Error(`Column type not supported: ${context.fieldsMetadata.type}`); | ||
} | ||
return value; | ||
} | ||
const convertTimestampTzString = function (stringValue, formatSql, timezone, scale) { | ||
const formatMoment = dateTimeFormatConverter.convertSnowflakeFormatToMomentFormat(formatSql, scale); | ||
const epochSeconds = momentTimezone(stringValue, formatMoment).unix(); | ||
return new SfTimestamp(epochSeconds, 0, scale, timezone, formatSql); | ||
}; | ||
const convertTimestampNtzString = function (stringValue, formatSql, timezone, scale) { | ||
const formatMoment = dateTimeFormatConverter.convertSnowflakeFormatToMomentFormat(formatSql, scale); | ||
const epochSeconds = momentTimezone.utc(stringValue, formatMoment).unix(); | ||
return new SfTimestamp(epochSeconds, 0, scale, timezone, formatSql); | ||
}; | ||
const convertDateString = function (stringValue, formatSql) { | ||
const formatMoment = dateTimeFormatConverter.convertSnowflakeFormatToMomentFormat(formatSql, 0); | ||
const epochSeconds = momentTimezone.utc(stringValue, formatMoment).unix(); | ||
const date = new SfTimestamp( | ||
epochSeconds, // convert to seconds | ||
0, // no nano seconds | ||
0, // no scale required | ||
'UTC', // use utc as the timezone | ||
context.format); | ||
date._valueAsString = stringValue; | ||
return date.toSfDate(); | ||
}; | ||
const convertTimeString = function (stringValue, formatSql, timezone, scale) { | ||
const formatMoment = dateTimeFormatConverter.convertSnowflakeFormatToMomentFormat(formatSql, scale); | ||
const moment = momentTimezone(stringValue, formatMoment); | ||
const epochSeconds = moment.hours() * 3600 + moment.minutes() * 60 + moment.seconds(); | ||
const time = new SfTimestamp(epochSeconds, 0, scale, timezone, formatSql); | ||
time._valueAsString = stringValue; | ||
return time; | ||
}; | ||
/** | ||
* Converts a raw column value of type Date to a Snowflake Date. | ||
@@ -637,3 +868,3 @@ * | ||
*/ | ||
function toStringFromVariant(columnValue) { | ||
function toStringFromRawValue(columnValue) { | ||
return (columnValue !== null) ? JSON.stringify(columnValue) : DataTypes.getNullValue(); | ||
@@ -640,0 +871,0 @@ } |
@@ -24,3 +24,4 @@ /* | ||
OBJECT: 'object', | ||
ARRAY: 'array' | ||
ARRAY: 'array', | ||
MAP: 'map' | ||
}, | ||
@@ -147,6 +148,7 @@ | ||
*/ | ||
isVariant: function (sqlType) { | ||
isVariant: function (sqlType, fieldsMetadata) { | ||
return (sqlType === this.values.VARIANT) || | ||
(sqlType === this.values.OBJECT) || | ||
(sqlType === this.values.ARRAY); | ||
(sqlType === this.values.OBJECT && fieldsMetadata == null) || | ||
(sqlType === this.values.ARRAY && fieldsMetadata == null) || | ||
(sqlType === this.values.MAP && fieldsMetadata == null); | ||
}, | ||
@@ -161,4 +163,4 @@ | ||
*/ | ||
isObject: function (sqlType) { | ||
return (sqlType === this.values.OBJECT); | ||
isObject: function (sqlType, fieldsMetadata) { | ||
return (sqlType === this.values.OBJECT && fieldsMetadata != null); | ||
}, | ||
@@ -173,4 +175,15 @@ | ||
*/ | ||
isArray: function (sqlType) { | ||
return (sqlType === this.values.ARRAY); | ||
isArray: function (sqlType, fieldsMetadata) { | ||
return (sqlType === this.values.ARRAY && fieldsMetadata != null); | ||
}, | ||
/** | ||
* Determines if a column's SQL type is Map. | ||
* | ||
* @param {Object} sqlType | ||
* | ||
* @returns {Boolean} | ||
*/ | ||
isMap: function (sqlType, fieldsMetadata) { | ||
return (sqlType === this.values.MAP && fieldsMetadata != null); | ||
} | ||
@@ -188,3 +201,6 @@ }; | ||
JSON: 'JSON', | ||
BUFFER: 'BUFFER' | ||
BUFFER: 'BUFFER', | ||
OBJECT: 'OBJECT', | ||
ARRAY: 'ARRAY', | ||
MAP: 'MAP' | ||
}, | ||
@@ -254,4 +270,5 @@ | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.VARIANT] = nativeTypeValues.JSON; | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.OBJECT] = nativeTypeValues.JSON; | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.ARRAY] = nativeTypeValues.JSON; | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.OBJECT] = nativeTypeValues.OBJECT; | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.ARRAY] = nativeTypeValues.ARRAY; | ||
MAP_SQL_TO_NATIVE[sqlTypeValues.MAP] = nativeTypeValues.MAP; | ||
@@ -258,0 +275,0 @@ exports.SqlTypes = sqlTypes; |
@@ -7,33 +7,4 @@ /* | ||
const Util = require('../../util'); | ||
const datetimeFormatConverter = require('./datetime_format_converter'); | ||
/** | ||
* An array of tag mappings to convert a sql format to a moment.js format. If | ||
* the 2nd element is empty, special code is needed. | ||
*/ | ||
const CONST_TAGS = | ||
[ | ||
// proper mappings | ||
['YYYY', 'YYYY'], | ||
['YY', 'YY'], | ||
['MM', 'MM'], | ||
['MON', 'MMM'], | ||
['DD', 'DD'], | ||
['DY', 'ddd'], | ||
['HH24', 'HH'], | ||
['HH12', 'hh'], | ||
['HH', 'HH'], | ||
['AM', 'A'], | ||
['PM', 'A'], | ||
['MI', 'mm'], | ||
['SS', 'ss'], | ||
['TZH:TZM', 'Z'], | ||
['TZHTZM', 'ZZ'], | ||
// special code needed | ||
['TZH', ''], | ||
['TZM', ''], | ||
['FF', ''] | ||
]; | ||
/** | ||
* Creates a new SfTimestamp instance. | ||
@@ -98,3 +69,3 @@ * | ||
const tags = CONST_TAGS; | ||
const tags = datetimeFormatConverter.formatTagsMap(); | ||
@@ -165,6 +136,13 @@ // iterate over the format string | ||
} | ||
// format the moment and cache the result | ||
this._valueAsString = moment.format(formatMoment); | ||
const timezone = this.timezone.name || this.timezone; | ||
if (timezone) { | ||
if (typeof timezone === 'number') { | ||
this._valueAsString = moment.utcOffset(timezone).format(formatMoment); | ||
} else { | ||
this._valueAsString = moment.tz(timezone).format(formatMoment); | ||
} | ||
} else { | ||
this._valueAsString = moment.format(formatMoment); | ||
} | ||
return this._valueAsString; | ||
@@ -171,0 +149,0 @@ }; |
@@ -250,2 +250,5 @@ /* | ||
DATE: { value: nativeTypeValues.DATE }, | ||
OBJECT: { value: nativeTypeValues.OBJECT }, | ||
ARRAY: { value: nativeTypeValues.ARRAY }, | ||
MAP: { value: nativeTypeValues.MAP }, | ||
JSON: { value: nativeTypeValues.JSON } | ||
@@ -252,0 +255,0 @@ }); |
@@ -1188,8 +1188,7 @@ /* | ||
auth.reauthenticate(context.options.json, retryOption).then(() => { | ||
numRetries = retryOption.numRetries; | ||
totalElapsedTime = retryOption.totalElapsedTime; | ||
setTimeout(sendRequest, sleep * 1000); | ||
return; | ||
}); | ||
await auth.reauthenticate(context.options.json, retryOption); | ||
numRetries = retryOption.numRetries; | ||
totalElapsedTime = retryOption.totalElapsedTime; | ||
setTimeout(sendRequest, sleep * 1000); | ||
return; | ||
} else { | ||
@@ -1196,0 +1195,0 @@ if (auth instanceof AuthKeypair) { |
@@ -11,3 +11,2 @@ /* | ||
const Errors = require('./errors'); | ||
const ErrorCodes = Errors.codes; | ||
@@ -543,3 +542,3 @@ /** | ||
exports.isPrivateLink = function (host) { | ||
Errors.checkArgumentExists(this.exists(host), ErrorCodes.ERR_CONN_CREATE_MISSING_HOST); | ||
Errors.checkArgumentExists(this.exists(host), Errors.codes.ERR_CONN_CREATE_MISSING_HOST); | ||
return host.toLowerCase().includes('privatelink.snowflakecomputing.'); | ||
@@ -546,0 +545,0 @@ }; |
{ | ||
"name": "snowflake-sdk", | ||
"version": "1.13.1", | ||
"version": "1.14.0", | ||
"description": "Node.js driver for Snowflake", | ||
@@ -40,2 +40,3 @@ "dependencies": { | ||
"async": "^3.2.3", | ||
"check-dts": "^0.8.2", | ||
"eslint": "^8.41.0", | ||
@@ -59,4 +60,4 @@ "mocha": "^10.2.0", | ||
"scripts": { | ||
"lint:check": "eslint", | ||
"lint:check:all": "eslint lib samples system_test test", | ||
"lint:check": "eslint && check-dts index.d.ts", | ||
"lint:check:all": "eslint lib samples system_test test && check-dts index.d.ts", | ||
"lint:check:all:errorsOnly": "npm run lint:check:all -- --quiet", | ||
@@ -82,2 +83,2 @@ "lint:fix": "eslint --fix", | ||
"license": "Apache-2.0" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
622125
74
17386
8