Socket
Socket
Sign inDemoInstall

@salesforce/core

Package Overview
Dependencies
Maintainers
25
Versions
499
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@salesforce/core - npm Package Compare versions

Comparing version 0.20.0 to 0.21.0

docs/@salesforce/core/0.21.0/Aliases.html

10

lib/authInfo.d.ts

@@ -18,4 +18,5 @@ /**

*/
import { Optional } from '@salesforce/ts-types';
import { Nullable, Optional } from '@salesforce/ts-types';
import { OAuth2Options } from 'jsforce';
import { Connection } from './connection';
export interface AuthFields {

@@ -44,2 +45,7 @@ accessToken?: string;

}
export declare type RefreshFn = (conn: Connection, callback: (err: Nullable<Error>, accessToken?: string, res?: object) => Promise<void>) => Promise<void>;
export declare type ConnectionOptions = AuthFields & {
oauth2?: Partial<OAuth2Options>;
refreshFn?: RefreshFn;
};
export declare enum SFDC_URLS {

@@ -175,3 +181,3 @@ sandbox = "https://test.salesforce.com",

*/
getConnectionOptions(): AuthFields;
getConnectionOptions(): ConnectionOptions;
/**

@@ -178,0 +184,0 @@ * Get the authorization fields.

69

lib/authInfo.js

@@ -56,5 +56,5 @@ "use strict";

const jsforce_1 = require("jsforce");
// @ts-ignore No typings directly available for jsforce/lib/transport
const Transport = require("jsforce/lib/transport");
const jwt = require("jsonwebtoken");
const _ = require("lodash");
const url_1 = require("url");

@@ -75,3 +75,4 @@ const authInfoConfig_1 = require("./config/authInfoConfig");

async jwtAuthorize(innerToken, callback) {
return super['_postParams']({
// @ts-ignore TODO: need better typings for jsforce
return super._postParams({
grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',

@@ -99,3 +100,3 @@ assertion: innerToken

const codeChallenge = base64UrlEscape(crypto_1.createHash('sha256').update(this.codeVerifier).digest('base64'));
_.set(params, 'code_challenge', codeChallenge);
kit_1.set(params, 'code_challenge', codeChallenge);
return super.getAuthorizationUrl(params);

@@ -115,4 +116,5 @@ }

async _postParams(params, callback) {
_.set(params, 'code_verifier', this.codeVerifier);
return super['_postParams'](params, callback);
kit_1.set(params, 'code_verifier', this.codeVerifier);
// @ts-ignore TODO: need better typings for jsforce
return super._postParams(params, callback);
}

@@ -127,3 +129,3 @@ }

function isInternalUrl(loginUrl = '') {
return loginUrl.startsWith('https://gs1.') || _.some(INTERNAL_URL_PARTS, part => loginUrl.includes(part));
return loginUrl.startsWith('https://gs1.') || INTERNAL_URL_PARTS.some(part => loginUrl.includes(part));
}

@@ -133,4 +135,4 @@ function getJwtAudienceUrl(options) {

let audienceUrl = SFDC_URLS.production;
const loginUrl = _.get(options, 'loginUrl', '');
const createdOrgInstance = _.get(options, 'createdOrgInstance', '').trim().toLowerCase();
const loginUrl = kit_1.get(options, 'loginUrl', '');
const createdOrgInstance = kit_1.get(options, 'createdOrgInstance', '').trim().toLowerCase();
if (process.env.SFDX_AUDIENCE_URL) {

@@ -179,3 +181,15 @@ audienceUrl = process.env.SFDX_AUDIENCE_URL;

_crypt(fields, method) {
return _.mapValues(fields, (val, key) => AuthInfoCrypto.encryptedFields.includes(key) ? this[method](val) : val);
const copy = {};
for (const key of ts_types_1.keysOf(fields)) {
const rawValue = fields[key];
if (rawValue !== undefined) {
if (ts_types_1.isString(rawValue) && AuthInfoCrypto.encryptedFields.includes(key)) {
copy[key] = this[method](ts_types_1.asString(rawValue));
}
else {
copy[key] = rawValue;
}
}
}
return copy;
}

@@ -189,3 +203,3 @@ }

// See https://toolsn.ietf.org/html/rfc4648#section-5
return _.replace(base64Encoded, /\+/g, '-').replace(/\//g, '_').replace(/=/g, '');
return base64Encoded.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '');
}

@@ -227,2 +241,4 @@ /**

this.fields = {};
// Possibly overridden in create
this.usingAccessToken = false;
this.fields.username = username;

@@ -249,3 +265,3 @@ }

// If the username is an access token, use that for auth and don't persist
const accessTokenMatch = kit_1.isString(username) && username.match(/^(00D\w{12,15})![\.\w]*$/);
const accessTokenMatch = ts_types_1.isString(username) && username.match(/^(00D\w{12,15})![\.\w]*$/);
if (accessTokenMatch) {

@@ -436,3 +452,3 @@ // Need to setup the logger and authInfoCrypto since we don't call init()

AuthInfo.cache.set(username, this.fields);
const dataToSave = _.clone(this.fields);
const dataToSave = kit_1.cloneJson(this.fields);
// Do not persist the default client ID and secret

@@ -458,3 +474,3 @@ if (dataToSave.clientId === DEFAULT_CONNECTED_APP_INFO.clientId) {

update(authData, encrypt = true) {
if (authData && kit_1.isPlainObject(authData)) {
if (authData && ts_types_1.isPlainObject(authData)) {
if (encrypt) {

@@ -474,3 +490,3 @@ authData = authInfoCrypto.encryptFields(authData);

getConnectionOptions() {
let json;
let opts;
const { accessToken, instanceUrl } = this.fields;

@@ -480,7 +496,7 @@ if (this.isAccessTokenFlow()) {

// Just auth with the accessToken
json = { accessToken, instanceUrl };
opts = { accessToken, instanceUrl };
}
else if (this.isJwt()) {
this.logger.info('Returning fields for a connection using JWT config.');
json = {
opts = {
accessToken,

@@ -498,3 +514,3 @@ instanceUrl,

// Decrypt a user provided client secret or use the default.
json = {
opts = {
oauth2: {

@@ -511,3 +527,3 @@ loginUrl: instanceUrl || 'https://login.salesforce.com',

// decrypt the fields
return authInfoCrypto.decryptFields(json);
return authInfoCrypto.decryptFields(opts);
}

@@ -550,3 +566,3 @@ /**

await this.save();
return await callback(null, authInfoCrypto.decrypt(this.fields.accessToken));
return await callback(null, authInfoCrypto.decrypt(ts_types_1.asString(this.fields.accessToken)));
}

@@ -559,5 +575,5 @@ catch (err) {

}
return callback(sfdxError_1.SfdxError.create(errConfig));
return await callback(sfdxError_1.SfdxError.create(errConfig));
}
return callback(err);
return await callback(err);
}

@@ -587,3 +603,3 @@ }

accessToken: ts_types_1.asString(_authFields.access_token),
orgId: _parseIdUrl(_authFields.id).orgId,
orgId: _parseIdUrl(ts_types_1.ensureString(_authFields.id)).orgId,
loginUrl: options.loginUrl,

@@ -620,4 +636,7 @@ privateKey: options.privateKey

accessToken: _authFields.access_token,
// @ts-ignore TODO: need better typings for jsforce
instanceUrl: _authFields.instance_url,
// @ts-ignore TODO: need better typings for jsforce
orgId: _parseIdUrl(_authFields.id).orgId,
// @ts-ignore TODO: need better typings for jsforce
loginUrl: options.loginUrl || _authFields.instance_url,

@@ -641,2 +660,3 @@ refreshToken: options.refreshToken,

}
// @ts-ignore TODO: need better typings for jsforce
const { userId, orgId } = _parseIdUrl(_authFields.id);

@@ -647,2 +667,3 @@ // Make a REST call for the username directly. Normally this is done via a connection

const apiVersion = 'v42.0'; // hardcoding to v42.0 just for this call is okay.
// @ts-ignore TODO: need better typings
const url = `${_authFields.instance_url}/services/data/${apiVersion}/sobjects/User/${userId}`;

@@ -654,3 +675,3 @@ const headers = Object.assign({ Authorization: `Bearer ${_authFields.access_token}` }, connection_1.SFDX_HTTP_HEADERS);

const response = await new Transport().httpRequest({ url, headers });
username = _.get(JSON.parse(response.body), 'Username');
username = kit_1.get(kit_1.parseJsonMap(response.body), 'Username');
}

@@ -662,5 +683,7 @@ catch (err) {

accessToken: _authFields.access_token,
// @ts-ignore TODO: need better typings for jsforce
instanceUrl: _authFields.instance_url,
orgId,
username,
// @ts-ignore TODO: need better typings for jsforce
loginUrl: options.loginUrl || _authFields.instance_url,

@@ -667,0 +690,0 @@ refreshToken: _authFields.refresh_token

@@ -54,3 +54,3 @@ /**

*/
validator: (value: any) => {};
validator: (value: ConfigValue) => boolean;
/**

@@ -140,3 +140,3 @@ * The message to return in the error if the validation fails.

private static propertyConfigMap;
private crypto;
private crypto?;
/**

@@ -143,0 +143,0 @@ * @returns {Promise<object>} Read, assign, and return the config contents.

@@ -30,4 +30,4 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
const _ = require("lodash");
const crypto_1 = require("../crypto");

@@ -76,3 +76,3 @@ const messages_1 = require("../messages");

// If a value is provided validate it otherwise no value is unset.
validator: value => value == null || sfdc_1.isSalesforceDomain(value),
validator: value => value == null || (ts_types_1.isString(value) && sfdc_1.isSalesforceDomain(value)),
failedMessage: Config.messages.getMessage('InvalidInstanceUrl')

@@ -86,3 +86,3 @@ }

// If a value is provided validate it otherwise no value is unset.
validator: sfdc_1.validateApiVersion,
validator: value => ts_types_1.isString(value) && sfdc_1.validateApiVersion(value),
failedMessage: Config.messages.getMessage('InvalidApiVersion')

@@ -100,3 +100,3 @@ }

input: {
validator: value => value.toString() === 'true' || value.toString() === 'false',
validator: value => value != null && ['true', 'false'].includes(value.toString()),
failedMessage: Config.messages.getMessage('InvalidBooleanConfigValue')

@@ -114,3 +114,3 @@ }

}
Config.propertyConfigMap = _.keyBy(Config.allowedProperties, 'key');
Config.propertyConfigMap = kit_1.keyBy(Config.allowedProperties, 'key');
return await super.create(options);

@@ -249,8 +249,11 @@ }

async cryptProperties(encrypt) {
const hasEncryptedProperties = _.some(this.entries(), ([key, val]) => !!Config.propertyConfigMap[key].encrypted);
const hasEncryptedProperties = this.entries().some(([key]) => {
return !!ts_types_1.ensure(Config.propertyConfigMap[key]).encrypted;
});
if (hasEncryptedProperties) {
await this.initCrypto();
const crypto = ts_types_1.ensure(this.crypto);
this.forEach((key, value) => {
if (this.getPropertyConfig(key).encrypted) {
this.set(key, ts_types_1.ensure(encrypt ? this.crypto.encrypt(value) : this.crypto.decrypt(value)));
if (this.getPropertyConfig(key).encrypted && ts_types_1.isString(value)) {
this.set(key, ts_types_1.ensure(encrypt ? crypto.encrypt(value) : crypto.decrypt(value)));
}

@@ -257,0 +260,0 @@ });

@@ -212,3 +212,3 @@ /**

* Sets the env variables.
* @param {object} envVars The env variables to set.
* @param {Dictionary<string>} envVars The env variables to set.
* @private

@@ -215,0 +215,0 @@ */

@@ -28,6 +28,6 @@ "use strict";

const kit_1 = require("@salesforce/kit");
const _ = require("lodash");
const ts_types_1 = require("@salesforce/ts-types");
const sfdxError_1 = require("../sfdxError");
const config_1 = require("./config");
const propertyToEnvName = property => `SFDX_${_.snakeCase(property).toUpperCase()}`;
const propertyToEnvName = (property) => `SFDX_${kit_1.snakeCase(property).toUpperCase()}`;
/**

@@ -85,2 +85,3 @@ * Aggregate global and local project config files, as well as environment variables for

if (this.getAllowedProperties().some(element => key === element.key)) {
// @ts-ignore TODO: Need to sort out object types on config stuff
return this.getConfig()[key];

@@ -151,6 +152,6 @@ }

}
if (_.get(this.getLocalConfig(), `contents[${key}]`) != null) {
if (kit_1.get(this.getLocalConfig(), `contents[${key}]`) != null) {
return this.getLocalConfig().getPath();
}
if (_.get(this.getGlobalConfig(), `contents[${key}]`) != null) {
if (kit_1.get(this.getGlobalConfig(), `contents[${key}]`) != null) {
return this.getGlobalConfig().getPath();

@@ -175,3 +176,3 @@ }

.filter((info) => !!info);
return _.sortBy(infos, 'key');
return kit_1.sortBy(infos, 'key');
}

@@ -206,3 +207,3 @@ /**

getEnvVars() {
return new Map(_.entries(this.envVars));
return new Map(ts_types_1.definiteEntries(this.envVars));
}

@@ -235,2 +236,3 @@ /**

this.setAllowedProperties(config_1.Config.getAllowedProperties());
const accumulator = {};
this.setEnvVars(this.getAllowedProperties().reduce((obj, property) => {

@@ -242,3 +244,3 @@ const val = process.env[propertyToEnvName(property.key)];

return obj;
}, {}));
}, accumulator));
// Global config must be read first so it is on the left hand of the

@@ -254,3 +256,5 @@ // object assign and is overwritten by the local config.

configs.push(this.envVars);
this.setConfig(_.reduce(configs.filter(kit_1.isObject), (result, configElement) => _.merge(result, configElement), {}));
const reduced = configs.filter(ts_types_1.isObject)
.reduce((result, configElement) => kit_1.merge(result, configElement), {});
this.setConfig(reduced);
}

@@ -299,3 +303,3 @@ /**

* Sets the env variables.
* @param {object} envVars The env variables to set.
* @param {Dictionary<string>} envVars The env variables to set.
* @private

@@ -302,0 +306,0 @@ */

@@ -18,3 +18,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
const fs_1 = require("fs");

@@ -75,3 +75,3 @@ const os_1 = require("os");

static async resolveRootFolder(isGlobal) {
if (!kit_1.isBoolean(isGlobal)) {
if (!ts_types_1.isBoolean(isGlobal)) {
throw new sfdxError_1.SfdxError('isGlobal must be a boolean', 'InvalidTypeForIsGlobal');

@@ -97,4 +97,4 @@ }

}
const _isGlobal = kit_1.isBoolean(config.options.isGlobal) && config.options.isGlobal;
const _isState = kit_1.isBoolean(config.options.isState) && config.options.isState;
const _isGlobal = ts_types_1.isBoolean(config.options.isGlobal) && config.options.isGlobal;
const _isState = ts_types_1.isBoolean(config.options.isState) && config.options.isState;
// Don't let users store config files in homedir without being in the

@@ -101,0 +101,0 @@ // state folder.

@@ -135,3 +135,3 @@ /**

* @param {string} key The key.
* @param {string} [group ='default'] The group. Defaults to the default group.
* @param {string} [group = 'default'] The group. Defaults to the default group.
* @returns {Optional<ConfigValue>}

@@ -147,6 +147,6 @@ */

/**
* Convert a JSON object to a {@link ConfigContents} and set it as the config contents.
* Convert an object to a {@link ConfigContents} and set it as the config contents.
* @param {object} obj The object.
*/
setContentsFromObject(obj: object): void;
setContentsFromObject<T extends object>(obj: T): void;
/**

@@ -153,0 +153,0 @@ * Sets the value for the key and group in the config object.

@@ -15,3 +15,2 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const _ = require("lodash");
const sfdxError_1 = require("../sfdxError");

@@ -87,3 +86,3 @@ const configFile_1 = require("./configFile");

await this.read();
_.forEach(newEntries, (val, key) => this.setInGroup(key, val, group || this.defaultGroup));
Object.entries(newEntries).forEach(([key, val]) => this.setInGroup(key, val, group || this.defaultGroup));
await this.write();

@@ -198,3 +197,3 @@ return newEntries;

* @param {string} key The key.
* @param {string} [group ='default'] The group. Defaults to the default group.
* @param {string} [group = 'default'] The group. Defaults to the default group.
* @returns {Optional<ConfigValue>}

@@ -214,6 +213,7 @@ */

toObject() {
return _.entries(this.getContents()).reduce((obj, entry) => {
obj[entry[0]] = _.entries(entry[1]).reduce((subobj, subentry) => {
subobj[subentry[0]] = subentry[1];
return subobj;
return Array.from(this.getContents().entries()).reduce((obj, entry) => {
obj[entry[0]] = Array.from(entry[1].entries()).reduce((sub, subentry) => {
// @ts-ignore TODO: refactor config to not intermingle js maps and json maps
sub[subentry[0]] = subentry[1];
return sub;
}, {});

@@ -224,9 +224,11 @@ return obj;

/**
* Convert a JSON object to a {@link ConfigContents} and set it as the config contents.
* Convert an object to a {@link ConfigContents} and set it as the config contents.
* @param {object} obj The object.
*/
setContentsFromObject(obj) {
const contents = new Map(_.entries(obj));
_.entries(contents).forEach(([key, value]) => {
contents.set(key, new Map(_.entries(value)));
const contents = new Map(Object.entries(obj));
Array.from(contents.entries()).forEach(([key, value]) => {
if (value) {
contents.set(key, new Map(Object.entries(value)));
}
});

@@ -249,3 +251,3 @@ this.setContents(contents);

content = content.get(group);
if (_.isUndefined(value)) {
if (value === undefined) {
content.delete(key);

@@ -252,0 +254,0 @@ }

@@ -42,5 +42,5 @@ /**

entries(): ConfigEntry[];
get(key: any): Optional<ConfigValue>;
get(key: string): Optional<ConfigValue>;
getKeysByValue(value: ConfigValue): string[];
has(key: any): boolean;
has(key: string): boolean;
keys(): string[];

@@ -149,6 +149,6 @@ set(key: string, value: ConfigValue): ConfigContents;

/**
* Convert a JSON object to a {@link ConfigContents} and set it as the config contents.
* @param {JsonMap} obj The object.
* Convert an object to a {@link ConfigContents} and set it as the config contents.
* @param {object} obj The object.
*/
setContentsFromObject(obj: JsonMap): void;
setContentsFromObject<T extends object>(obj: T): void;
}

@@ -25,3 +25,2 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const _ = require("lodash");
/**

@@ -43,3 +42,3 @@ * An abstract class that implements all the config management functions but

entries() {
return _.entries(this.contents);
return Array.from(this.contents.entries());
}

@@ -60,3 +59,3 @@ /**

getKeysByValue(value) {
const matchedEntries = _.filter(this.entries(), (entry) => entry[1] === value);
const matchedEntries = this.entries().filter((entry) => entry[1] === value);
// Only return the keys

@@ -77,3 +76,3 @@ return matchedEntries.map((entry) => entry[0]);

keys() {
return _.keys(this.contents);
return Array.from(this.contents.keys());
}

@@ -116,3 +115,3 @@ /**

values() {
return _.values(this.contents);
return Array.from(this.contents.values());
}

@@ -162,3 +161,4 @@ /**

toObject() {
return _.entries(this.contents).reduce((obj, entry) => {
return Array.from(this.contents.entries()).reduce((obj, entry) => {
// @ts-ignore TODO: refactor config to not intermingle js maps and json maps
obj[entry[0]] = entry[1];

@@ -169,7 +169,12 @@ return obj;

/**
* Convert a JSON object to a {@link ConfigContents} and set it as the config contents.
* @param {JsonMap} obj The object.
* Convert an object to a {@link ConfigContents} and set it as the config contents.
* @param {object} obj The object.
*/
setContentsFromObject(obj) {
this.contents = new Map(_.entries(obj));
if (obj instanceof Map) {
this.setContents(obj);
}
else {
this.contents = new Map(Object.entries(obj));
}
}

@@ -176,0 +181,0 @@ }

@@ -9,3 +9,3 @@ import { ConfigFile, ConfigOptions } from './configFile';

static getFileName(): string;
static getDefaultOptions(isGlobal?: boolean, filename?: any): ConfigOptions;
static getDefaultOptions(isGlobal?: boolean, filename?: string): ConfigOptions;
/**

@@ -12,0 +12,0 @@ * Write the config file with new contents. If no new contents are passed in

@@ -67,3 +67,3 @@ import { JsonMap, Optional } from '@salesforce/ts-types';

*/
request(request: RequestInfo | string, options?: any): Promise<object>;
request(request: RequestInfo | string, options?: object): Promise<object>;
/**

@@ -70,0 +70,0 @@ * Send REST API request with given HTTP request info, with connected session information

@@ -13,4 +13,2 @@ "use strict";

const jsforce_2 = require("jsforce");
const _ = require("lodash");
const lodash_1 = require("lodash");
const configAggregator_1 = require("./config/configAggregator");

@@ -24,3 +22,4 @@ const logger_1 = require("./logger");

*/
Promise.prototype['thenCall'] = jsforce_2.Promise.prototype.thenCall;
// @ts-ignore
Promise.prototype.thenCall = jsforce_2.Promise.prototype.thenCall;
const clientId = `sfdx toolbelt:${process.env.SFDX_SET_CLIENT_IDS || ''}`;

@@ -94,3 +93,3 @@ exports.SFDX_HTTP_HEADERS = {

async request(request, options) {
const _request = kit_1.isString(request) ? { method: 'GET', url: request } : request;
const _request = ts_types_1.isString(request) ? { method: 'GET', url: request } : request;
_request.headers = Object.assign({}, exports.SFDX_HTTP_HEADERS, _request.headers);

@@ -111,3 +110,3 @@ this.logger.debug(`request: ${JSON.stringify(_request)}`);

const _headers = this.accessToken ? { Authorization: `Bearer ${this.accessToken}` } : {};
_.merge(_headers, exports.SFDX_HTTP_HEADERS, request.headers);
kit_1.merge(_headers, exports.SFDX_HTTP_HEADERS, request.headers);
return this._transport.httpRequest({

@@ -134,3 +133,3 @@ method: request.method,

this.logger.debug(`response for org versions: ${versions}`);
const max = ts_types_1.ensure(lodash_1.maxBy(versions, version => version.version));
const max = ts_types_1.ensure(kit_1.maxBy(versions, version => version.version));
return max.version;

@@ -137,0 +136,0 @@ }

@@ -5,6 +5,6 @@ import { Optional } from '@salesforce/ts-types';

static create(): Promise<Crypto>;
private _key;
private messages;
private noResetOnClose;
private _key;
constructor(keyChain?: any);
constructor(keyChain?: import("./keyChainImpl").KeychainAccess | import("./keyChainImpl").GenericUnixKeychainAccess | import("./keyChainImpl").GenericWindowsKeychainAccess | undefined);
/**

@@ -20,15 +20,15 @@ * Initialize any crypto dependencies. In this case we need to generate an encryption key.

*
* @param {string} text The text to encrypt.
* @param {string} [text] The text to encrypt.
* @returns {Optional<string>} The encrypted string or undefined if no string was passed.
*/
encrypt(text: any): Optional<string>;
encrypt(text?: string): Optional<string>;
/**
* Decrypts text.
* @param text The text to decrypt.
* @param {string} [text] The text to decrypt.
* @returns {Optional<string>} If enableTokenEncryption is set to false or not defined in package.json then the text
* is simply returned. The text is then assumed to be unencrypted.
*/
decrypt(text: any): Optional<string>;
decrypt(text?: string): Optional<string>;
close(): void;
private getKeyChain;
}

@@ -9,2 +9,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const ts_types_1 = require("@salesforce/ts-types");
const crypto = require("crypto");

@@ -37,6 +38,5 @@ const os = require("os");

return new Promise((resolve, reject) => _keychain.getPassword({ service, account }, (err, password) => {
if (err) {
if (err)
return reject(err);
}
return resolve({ username: account, password });
return resolve({ username: account, password: ts_types_1.ensure(password) });
}));

@@ -51,6 +51,5 @@ },

setPassword(_keychain, service, account, password) {
return new Promise((resolve, reject) => _keychain.setPassword({ service, account, password }, err => {
if (err) {
return new Promise((resolve, reject) => _keychain.setPassword({ service, account, password }, (err) => {
if (err)
return reject(err);
}
return resolve({ username: account, password });

@@ -99,3 +98,3 @@ }));

// Create a new password in the KeyChain.
await keychainPromises.setPassword(this.keyChain, KEY_NAME, ACCOUNT, key);
await keychainPromises.setPassword(ts_types_1.ensure(this.keyChain), KEY_NAME, ACCOUNT, key);
return this.init('KEY_SET', platform);

@@ -111,3 +110,3 @@ }

*
* @param {string} text The text to encrypt.
* @param {string} [text] The text to encrypt.
* @returns {Optional<string>} The encrypted string or undefined if no string was passed.

@@ -117,3 +116,3 @@ */

if (text == null) {
return undefined;
return;
}

@@ -135,3 +134,3 @@ if (this._key == null) {

* Decrypts text.
* @param text The text to decrypt.
* @param {string} [text] The text to decrypt.
* @returns {Optional<string>} If enableTokenEncryption is set to false or not defined in package.json then the text

@@ -142,3 +141,3 @@ * is simply returned. The text is then assumed to be unencrypted.

if (text == null) {
return undefined;
return;
}

@@ -145,0 +144,0 @@ const tokens = text.split(TAG_DELIMITER);

@@ -21,3 +21,3 @@ export { Aliases, AliasGroup } from './config/aliases';

export { PollingOptions, DefaultPollingOptions, PollingClient } from './status/pollingClient';
export { CometClient, CometSubscription, DefaultStreamingOptions, StreamingClient, StreamingConnectionState, StreamingTimeoutError, StreamingOptions } from './status/streamingClient';
export { CometClient, CometSubscription, DefaultStreamingOptions, StreamingClient, StreamingConnectionState, StreamingTimeoutErrorType, StreamingOptions } from './status/streamingClient';
export { Time, TIME_UNIT } from './util/time';

@@ -24,0 +24,0 @@ export { DefaultUserFields, REQUIRED_FIELDS, User, UserFields } from './user';

@@ -64,3 +64,3 @@ "use strict";

exports.StreamingConnectionState = streamingClient_1.StreamingConnectionState;
exports.StreamingTimeoutError = streamingClient_1.StreamingTimeoutError;
exports.StreamingTimeoutErrorType = streamingClient_1.StreamingTimeoutErrorType;
var time_1 = require("./util/time");

@@ -67,0 +67,0 @@ exports.Time = time_1.Time;

@@ -9,2 +9,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const os = require("os");

@@ -44,3 +45,3 @@ const path = require("path");

static getEnvironmentMode() {
return Mode[(process.env.SFDX_ENV || Mode.PRODUCTION).toUpperCase()];
return Mode[kit_1.env.getKeyOf('SFDX_ENV', Mode, Mode.PRODUCTION, value => value.toUpperCase())];
}

@@ -47,0 +48,0 @@ /**

@@ -8,2 +8,2 @@ import { KeyChain } from './keyChainImpl';

*/
export declare const retrieveKeychain: (platform: any) => Promise<KeyChain>;
export declare const retrieveKeychain: (platform: string) => Promise<KeyChain>;

@@ -1,6 +0,14 @@

import { Optional } from '@salesforce/ts-types';
/// <reference types="node" />
import { Nullable } from '@salesforce/ts-types';
import * as childProcess from 'child_process';
import * as nodeFs from 'fs';
export declare type FsIfc = Pick<typeof nodeFs, 'statSync'>;
export interface PasswordStore {
getPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void, retryCount?: number): Promise<void>;
setPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void): Promise<void>;
}
/**
* @private
*/
export declare class KeychainAccess {
export declare class KeychainAccess implements PasswordStore {
private osImpl;

@@ -14,3 +22,3 @@ private fsIfc;

*/
constructor(osImpl: any, fsIfc: any);
constructor(osImpl: OsImpl, fsIfc: FsIfc);
validateProgram(): Promise<void>;

@@ -22,5 +30,4 @@ /**

* @param retryCount Used internally to track the number of retries for getting a password out of the keychain.
* @returns {Promise<Optional<string>>}
*/
getPassword(opts: any, fn: any, retryCount?: number): Promise<Optional<string>>;
getPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void, retryCount?: number): Promise<void>;
/**

@@ -31,11 +38,25 @@ * Sets a password using the native program for credential management.

*/
setPassword(opts: any, fn: any): Promise<void>;
setPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void): Promise<void>;
}
interface ProgramOpts {
account: string;
service: string;
password?: string;
}
interface OsImpl {
getProgram(): string;
getProgramOptions(opts: ProgramOpts): string[];
getCommandFunc(opts: ProgramOpts, fn: (program: string, opts: string[]) => childProcess.ChildProcess): childProcess.ChildProcess;
onGetCommandClose(code: number, stdout: string, stderr: string, opts: ProgramOpts, fn: (err: Nullable<Error>, result?: string) => void): Promise<void>;
setProgramOptions(opts: ProgramOpts): string[];
setCommandFunc(opts: ProgramOpts, fn: (program: string, opts: string[]) => childProcess.ChildProcess): childProcess.ChildProcess;
onSetCommandClose(code: number, stdout: string, stderr: string, opts: ProgramOpts, fn: (err: Nullable<Error>) => void): Promise<void>;
}
/**
* @private
*/
export declare class GenericKeychainAccess {
getPassword(opts: any, fn: any): Promise<any>;
setPassword(opts: any, fn: any): Promise<any>;
protected isValidFileAccess(cb: (val?: any) => Promise<void>): Promise<void>;
export declare class GenericKeychainAccess implements PasswordStore {
getPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void): Promise<void>;
setPassword(opts: ProgramOpts, fn: (error: Nullable<Error>, password?: string) => void): Promise<void>;
protected isValidFileAccess(cb: (error: Nullable<NodeJS.ErrnoException>) => Promise<void>): Promise<void>;
}

@@ -46,3 +67,3 @@ /**

export declare class GenericUnixKeychainAccess extends GenericKeychainAccess {
protected isValidFileAccess(cb: (val?: any) => Promise<void>): Promise<void>;
protected isValidFileAccess(cb: (error: Nullable<Error>) => Promise<void>): Promise<void>;
}

@@ -62,4 +83,5 @@ /**

linux: KeychainAccess;
validateProgram: (programPath: any, fsIfc: any, isExeIfc: any) => Promise<void>;
validateProgram: (programPath: string, fsIfc: Pick<typeof nodeFs, "statSync">, isExeIfc: (mode: number, gid: number, uid: number) => boolean) => Promise<void>;
};
export declare type KeyChain = GenericUnixKeychainAccess | GenericWindowsKeychainAccess | KeychainAccess;
export {};

@@ -87,3 +87,2 @@ "use strict";

* @param retryCount Used internally to track the number of retries for getting a password out of the keychain.
* @returns {Promise<Optional<string>>}
*/

@@ -146,4 +145,4 @@ async getPassword(opts, fn, retryCount = 0) {

let stderr = '';
credManager.stdout.on('data', data => { stdout += data; });
credManager.stderr.on('data', data => { stderr += data; });
credManager.stdout.on('data', (data) => { stdout += data; });
credManager.stderr.on('data', (data) => { stderr += data; });
credManager.on('close', async (code) => await this.osImpl.onSetCommandClose(code, stdout, stderr, opts, fn));

@@ -180,3 +179,4 @@ credManager.stdin.end();

stderr.includes('invalid or unencryptable secret')) {
error['retry'] = true;
// @ts-ignore TODO: make an error subclass with this field
error.retry = true;
// Throwing here allows us to perform a retry in KeychainAccess

@@ -240,3 +240,3 @@ throw error;

}
fn(err, null);
fn(err);
return;

@@ -247,4 +247,10 @@ }

if (/password/.test(stderr)) {
const password = stderr.match(/"(.*)"/, '')[1];
fn(null, password);
const match = stderr.match(/"(.*)"/);
if (!match || !match[1]) {
const errorConfig = new sfdxError_1.SfdxErrorConfig('@salesforce/core', 'encryption', 'PasswordNotFoundError', [`\n${stdout} - ${stderr}`], 'PasswordNotFoundErrorAction');
fn(sfdxError_1.SfdxError.create(errorConfig));
}
else {
fn(null, match[1]);
}
}

@@ -258,4 +264,8 @@ else {

setProgramOptions(opts) {
return ['add-generic-password', '-a', opts.account, '-s',
opts.service, '-w', opts.password];
const result = ['add-generic-password', '-a', opts.account, '-s',
opts.service];
if (opts.password) {
result.push('-w', opts.password);
}
return result;
},

@@ -280,3 +290,3 @@ setCommandFunc(opts, fn) {

config.set(SecretFields.ACCOUNT, opts.account);
config.set(SecretFields.KEY, opts.password);
config.set(SecretFields.KEY, opts.password || '');
config.set(SecretFields.SERVICE, opts.service);

@@ -310,3 +320,5 @@ await config.write();

if ((opts.service === config.get(SecretFields.SERVICE)) && (opts.account === config.get(SecretFields.ACCOUNT))) {
fn(null, config.get(SecretFields.KEY));
const key = config.get(SecretFields.KEY);
// @ts-ignore TODO: Remove this ignore if we ever factor out `object` from `ConfigValue`
fn(null, ts_types_1.asString(key));
}

@@ -383,3 +395,3 @@ else {

if (octalModeStr === EXPECTED_OCTAL_PERM_VALUE) {
await cb();
await cb(null);
}

@@ -386,0 +398,0 @@ else {

@@ -201,3 +201,3 @@ /// <reference types="node" />

*
* @param {function} filter A function with signature `(...args) => any[]` that transforms log message arguments.
* @param {function} filter A function with signature `(...args: any[]) => any[]` that transforms log message arguments.
*/

@@ -204,0 +204,0 @@ addFilter(filter: (...args: any[]) => any[]): void;

@@ -80,7 +80,9 @@ "use strict";

*/
// tslint:disable-next-line:ordered-imports
const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
// @ts-ignore No typings available for our copy of bunyan
const Bunyan = require("bunyan-sfdx-no-dtrace");
const createDebugUtil = require("debug");
const Debug = require("debug");
const EventEmitter = require("events");
const _ = require("lodash");
const os = require("os");

@@ -183,3 +185,3 @@ const path = require("path");

const debuggers = {};
debuggers['core'] = createDebugUtil(`${rootLogger.getName()}:core`);
debuggers.core = Debug(`${rootLogger.getName()}:core`);
rootLogger.addStream({

@@ -189,11 +191,12 @@ name: 'debug',

write: (chunk, encoding, next) => {
const json = JSON.parse(chunk.toString());
const json = kit_1.parseJsonMap(chunk.toString());
let debuggerName = 'core';
if (json['log']) {
debuggerName = json['log'];
if (ts_types_1.isString(json.log)) {
debuggerName = json.log;
if (!debuggers[debuggerName]) {
debuggers[debuggerName] = createDebugUtil(`${rootLogger.getName()}:${debuggerName}`);
debuggers[debuggerName] = Debug(`${rootLogger.getName()}:${debuggerName}`);
}
}
debuggers[debuggerName](`${LoggerLevel[json.level]} ${json.msg}`);
const level = LoggerLevel[ts_types_1.ensureNumber(json.level)];
ts_types_1.ensure(debuggers[debuggerName])(`${level} ${json.msg}`);
next();

@@ -238,7 +241,7 @@ }

static getLevelByName(levelName) {
const level = LoggerLevel[levelName && levelName.toUpperCase()];
if (level == null) {
levelName = levelName.toUpperCase();
if (!ts_types_1.isKeyOf(levelName, LoggerLevel)) {
throw new sfdxError_1.SfdxError('UnrecognizedLoggerLevelName');
}
return level;
return LoggerLevel[levelName];
}

@@ -280,3 +283,4 @@ /**

// avoid multiple streams to same log file
if (!this.bunyan.streams.find(stream => stream.type === 'file' && stream.path === logFile)) {
// tslint:disable-next-line:no-any No bunyan typings
if (!this.bunyan.streams.find((stream) => stream.type === 'file' && stream.path === logFile)) {
// TODO: rotating-file

@@ -391,2 +395,3 @@ // https://github.com/trentm/node-bunyan#stream-type-rotating-file

let content = '';
// tslint:disable-next-line:no-any No bunyan typings
this.bunyan.streams.forEach(async (stream) => {

@@ -403,3 +408,3 @@ if (stream.type === 'file') {

*
* @param {function} filter A function with signature `(...args) => any[]` that transforms log message arguments.
* @param {function} filter A function with signature `(...args: any[]) => any[]` that transforms log message arguments.
*/

@@ -421,3 +426,4 @@ addFilter(filter) {

try {
this.bunyan.streams.forEach(entry => {
// tslint:disable-next-line:no-any No bunyan typings
this.bunyan.streams.forEach((entry) => {
if (fn) {

@@ -533,3 +539,4 @@ fn(entry);

if (this.shouldLog(logLevel)) {
this.bunyan.filters.forEach(filter => args = filter(...args));
// tslint:disable-next-line:no-any No bunyan typings
this.bunyan.filters.forEach((filter) => args = filter(...args));
}

@@ -559,3 +566,3 @@ return args && args.length === 1 ? args[0] : args;

Logger.LEVEL_NAMES = Object.values(LoggerLevel)
.filter(v => kit_1.isString(v))
.filter(v => ts_types_1.isString(v))
.map(v => v.toLowerCase());

@@ -583,47 +590,49 @@ // Rollup all instance-specific process event listeners together to prevent global `MaxListenersExceededWarning`s.

// SFDX code and plugins should never show tokens or connect app information in the logs
const _filter = (...args) => args.map(arg => {
if (_.isArray(arg)) {
return _filter(...arg);
}
if (arg) {
let _arg = arg;
// Normalize all objects into a string. This include errors.
if (kit_1.isObject(arg)) {
_arg = JSON.stringify(arg);
const _filter = (...args) => {
return args.map(arg => {
if (ts_types_1.isArray(arg)) {
return _filter(...arg);
}
const HIDDEN = 'HIDDEN';
FILTERED_KEYS.forEach((key) => {
let expElement = key;
let expName = key;
// Filtered keys can be strings or objects containing regular expression components.
if (kit_1.isPlainObject(key)) {
expElement = key['regex'];
expName = key['name'];
if (arg) {
let _arg = arg;
// Normalize all objects into a string. This include errors.
if (ts_types_1.isObject(arg)) {
_arg = JSON.stringify(arg);
}
const hiddenAttrMessage = `"<${expName} - ${HIDDEN}>"`;
// Match all json attribute values case insensitive: ex. {" Access*^&(*()^* Token " : " 45143075913458901348905 \n\t" ...}
const regexTokens = new RegExp(`(['"][^'"]*${expElement}[^'"]*['"]\\s*:\\s*)['"][^'"]*['"]`, 'gi');
_arg = _arg.replace(regexTokens, `$1${hiddenAttrMessage}`);
// Match all key value attribute case insensitive: ex. {" key\t" : ' access_token ' , " value " : " dsafgasr431 " ....}
const keyRegex = new RegExp(`(['"]\\s*key\\s*['"]\\s*:)\\s*['"]\\s*${expElement}\\s*['"]\\s*.\\s*['"]\\s*value\\s*['"]\\s*:\\s*['"]\\s*[^'"]*['"]`, 'gi');
_arg = _arg.replace(keyRegex, `$1${hiddenAttrMessage}`);
});
// This is a jsforce message we are masking. This can be removed after the following pull request is committed
// and pushed to a jsforce release.
//
// Looking For: "Refreshed access token = ..."
// Related Jsforce pull requests:
// https://github.com/jsforce/jsforce/pull/598
// https://github.com/jsforce/jsforce/pull/608
// https://github.com/jsforce/jsforce/pull/609
const jsForceTokenRefreshRegEx = new RegExp('Refreshed(.*)access(.*)token(.*)=\\s*[^\'"\\s*]*');
_arg = _arg.replace(jsForceTokenRefreshRegEx, `<refresh_token - ${HIDDEN}>`);
_arg = _arg.replace(/sid=(.*)/, `sid=<${HIDDEN}>`);
// return an object if an object was logged; otherwise return the filtered string.
return kit_1.isObject(arg) ? JSON.parse(_arg) : _arg;
}
else {
return arg;
}
});
const HIDDEN = 'HIDDEN';
FILTERED_KEYS.forEach((key) => {
let expElement = key;
let expName = key;
// Filtered keys can be strings or objects containing regular expression components.
if (ts_types_1.isPlainObject(key)) {
expElement = key.regex;
expName = key.name;
}
const hiddenAttrMessage = `"<${expName} - ${HIDDEN}>"`;
// Match all json attribute values case insensitive: ex. {" Access*^&(*()^* Token " : " 45143075913458901348905 \n\t" ...}
const regexTokens = new RegExp(`(['"][^'"]*${expElement}[^'"]*['"]\\s*:\\s*)['"][^'"]*['"]`, 'gi');
_arg = _arg.replace(regexTokens, `$1${hiddenAttrMessage}`);
// Match all key value attribute case insensitive: ex. {" key\t" : ' access_token ' , " value " : " dsafgasr431 " ....}
const keyRegex = new RegExp(`(['"]\\s*key\\s*['"]\\s*:)\\s*['"]\\s*${expElement}\\s*['"]\\s*.\\s*['"]\\s*value\\s*['"]\\s*:\\s*['"]\\s*[^'"]*['"]`, 'gi');
_arg = _arg.replace(keyRegex, `$1${hiddenAttrMessage}`);
});
// This is a jsforce message we are masking. This can be removed after the following pull request is committed
// and pushed to a jsforce release.
//
// Looking For: "Refreshed access token = ..."
// Related Jsforce pull requests:
// https://github.com/jsforce/jsforce/pull/598
// https://github.com/jsforce/jsforce/pull/608
// https://github.com/jsforce/jsforce/pull/609
const jsForceTokenRefreshRegEx = new RegExp('Refreshed(.*)access(.*)token(.*)=\\s*[^\'"\\s*]*');
_arg = _arg.replace(jsForceTokenRefreshRegEx, `<refresh_token - ${HIDDEN}>`);
_arg = _arg.replace(/sid=(.*)/, `sid=<${HIDDEN}>`);
// return an object if an object was logged; otherwise return the filtered string.
return ts_types_1.isObject(arg) ? kit_1.parseJson(_arg) : _arg;
}
else {
return arg;
}
});
};
//# sourceMappingURL=logger.js.map

@@ -17,3 +17,2 @@ "use strict";

const fs = require("fs");
const _ = require("lodash");
const path = require("path");

@@ -132,3 +131,3 @@ const util = require("util");

json = JSON.parse(fileContents);
if (!_.isObject(json)) {
if (!ts_types_1.isObject(json)) {
// Bubble up

@@ -135,0 +134,0 @@ throw new Error(`Unexpected token. Found returned content type '${typeof json}'.`);

@@ -62,6 +62,6 @@ import { AnyJson, Dictionary, Optional } from '@salesforce/ts-types';

static create(connection?: Connection, aggregator?: ConfigAggregator, isDevHub?: boolean): Promise<Org>;
private status;
private configAggregator;
private logger;
private connection;
private status;
private configAggregator;
/**

@@ -88,3 +88,3 @@ * **Do not directly construct instances of this class -- use {@link Org.create} instead.**

*/
remove(throwWhenRemoveFails?: false): Promise<void>;
remove(throwWhenRemoveFails?: boolean): Promise<void>;
/**

@@ -91,0 +91,0 @@ * Check that this org is a scratch org by asking the dev hub if it knows about it.

@@ -32,3 +32,2 @@ "use strict";

const ts_types_1 = require("@salesforce/ts-types");
const lodash_1 = require("lodash");
const path_1 = require("path");

@@ -130,4 +129,4 @@ const authInfo_1 = require("./authInfo");

connection = isDevHub ?
ts_types_1.asString(lodash_1.get(_aggregator.getInfo(config_1.Config.DEFAULT_DEV_HUB_USERNAME), 'value')) :
ts_types_1.asString(lodash_1.get(_aggregator.getInfo(config_1.Config.DEFAULT_USERNAME), 'value'));
ts_types_1.asString(kit_1.get(_aggregator.getInfo(config_1.Config.DEFAULT_DEV_HUB_USERNAME), 'value')) :
ts_types_1.asString(kit_1.get(_aggregator.getInfo(config_1.Config.DEFAULT_USERNAME), 'value'));
if (!connection) {

@@ -137,3 +136,3 @@ throw new sfdxError_1.SfdxError(`No ${isDevHub ? 'default Devhub' : 'default'} username or Connection found.`, 'NoUsername');

}
if (kit_1.isString(connection)) {
if (ts_types_1.isString(connection)) {
org.logger.debug('connection type is string');

@@ -183,3 +182,3 @@ const aliasValue = await aliases_1.Aliases.fetch(connection);

*/
async remove(throwWhenRemoveFails) {
async remove(throwWhenRemoveFails = false) {
// If deleting via the access token there shouldn't be any auth config files

@@ -213,3 +212,3 @@ // so just return;

const orgUsers = await this.retrieveOrgUsersConfig();
this.manageDelete(async () => await orgUsers.unlink(), orgUsers.getPath(), throwWhenRemoveFails);
await this.manageDelete(async () => await orgUsers.unlink(), orgUsers.getPath(), throwWhenRemoveFails);
}

@@ -244,3 +243,3 @@ await aliases.write();

}
if (lodash_1.get(results, 'records.length') !== 1) {
if (kit_1.get(results, 'records.length') !== 1) {
throw new sfdxError_1.SfdxError('No results', 'NoResults');

@@ -313,3 +312,3 @@ }

}
const _auth = kit_1.isString(auth) ? await authInfo_1.AuthInfo.create(auth) : auth;
const _auth = ts_types_1.isString(auth) ? await authInfo_1.AuthInfo.create(auth) : auth;
this.logger.debug(`adding username ${_auth.getFields().username}`);

@@ -321,3 +320,3 @@ const orgConfig = await this.retrieveOrgUsersConfig();

const usernames = ts_types_1.asJsonArray(ts_types_1.asAnyJson(contents.get('usernames'))) || [];
if (!Array.isArray(usernames)) {
if (!ts_types_1.isArray(usernames)) {
throw new sfdxError_1.SfdxError('Usernames is not an array', 'UnexpectedDataFormat');

@@ -352,3 +351,3 @@ }

}
const _auth = kit_1.isString(auth) ? await authInfo_1.AuthInfo.create(auth) : auth;
const _auth = ts_types_1.isString(auth) ? await authInfo_1.AuthInfo.create(auth) : auth;
this.logger.debug(`removing username ${_auth.getFields().username}`);

@@ -358,3 +357,4 @@ const orgConfig = await this.retrieveOrgUsersConfig();

const targetUser = _auth.getFields().username;
contents.set('usernames', lodash_1.filter(contents.get('usernames'), username => username !== targetUser));
const usernames = (contents.get('usernames') || []);
contents.set('usernames', usernames.filter(username => username !== targetUser));
await orgConfig.write();

@@ -398,2 +398,3 @@ return this;

getField(key) {
// @ts-ignore TODO: Need to refactor storage of these values on both Org and AuthFields
return this[key] || this.getConnection().getAuthInfoFields()[key];

@@ -406,3 +407,6 @@ }

getFields(keys) {
return keys.reduce((map, key) => { map[key] = this.getField(key); return map; }, {});
return keys.reduce((map, key) => {
map[key] = this.getField(key);
return map;
}, {});
}

@@ -409,0 +413,0 @@ /**

@@ -10,3 +10,2 @@ "use strict";

const kit_1 = require("@salesforce/kit");
const _ = require("lodash");
const os_1 = require("os");

@@ -57,3 +56,3 @@ const logger_1 = require("./logger");

const result = await this.org.getConnection().query(query);
const permissionSetId = _.get(result, 'records[0].Id');
const permissionSetId = kit_1.get(result, 'records[0].Id');
if (!permissionSetId) {

@@ -73,3 +72,3 @@ if (nsPrefix) {

createResponse = await this.org.getConnection().sobject('PermissionSetAssignment')
.create(_.mapKeys(assignment, (value, key) => kit_1.upperFirst(key)));
.create(kit_1.mapKeys(assignment, (value, key) => kit_1.upperFirst(key)));
if (createResponse.length) {

@@ -88,3 +87,3 @@ throw sfdxError_1.SfdxError.create('@salesforce/core', 'permissionSetAssignment', 'unexpectedType');

message = `${message}:${os_1.EOL}`;
_.each(createResponse.errors, _message => {
errors.forEach(_message => {
message = `${message}${_message}${os_1.EOL}`;

@@ -91,0 +90,0 @@ });

@@ -10,3 +10,3 @@ import { AnyJson, JsonMap } from '@salesforce/ts-types';

private readonly logger;
private schema;
private schema?;
/**

@@ -13,0 +13,0 @@ * Creates a new `SchemaValidator` instance given a logger and path to a schema file.

@@ -10,2 +10,3 @@ "use strict";

const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
const validator = require("jsen");

@@ -87,3 +88,3 @@ const path = require("path");

(await Promise.all(promises)).forEach(externalSchema => {
if (kit_1.isString(externalSchema.id)) {
if (ts_types_1.isString(externalSchema.id)) {
externalSchemas[externalSchema.id] = externalSchema;

@@ -128,5 +129,11 @@ }

const property = error.path.match(/^([a-zA-Z0-9\.]+)\.([a-zA-Z0-9]+)$/);
const getPropValue = prop => {
const getPropValue = (prop) => {
const reducer = (obj, name) => {
return (obj.properties && obj.properties[name]) || (name === '0' && obj.items) || obj[name] || obj[prop];
if (!ts_types_1.isJsonMap(obj))
return;
if (ts_types_1.isJsonMap(obj.properties))
return obj.properties[name];
if (name === '0')
return ts_types_1.asJsonArray(obj.items);
return obj[name] || obj[prop];
};

@@ -136,4 +143,4 @@ return error.path.split('.').reduce(reducer, schema);

const getEnumValues = () => {
const enumSchema = getPropValue('enum');
return enumSchema && enumSchema.enum ? enumSchema.enum.join(', ') : '';
const enumSchema = ts_types_1.asJsonMap(getPropValue('enum'));
return enumSchema && ts_types_1.getAsJsonArray(enumSchema, 'enum', []).join(', ') || '';
};

@@ -140,0 +147,0 @@ switch (error.keyword) {

@@ -18,5 +18,5 @@ /// <reference types="node" />

export declare class SecureBuffer<T> {
private secret;
private key;
private iv;
private secret?;
/**

@@ -23,0 +23,0 @@ * Invokes a callback with a decrypted version of the buffer.

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const ts_types_1 = require("@salesforce/ts-types");
const crypto = require("crypto");

@@ -35,3 +36,3 @@ const cipherName = 'aes256';

const cipher = crypto.createDecipheriv(cipherName, this.key, this.iv);
const a = cipher.update(this.secret);
const a = cipher.update(ts_types_1.ensure(this.secret));
const b = cipher.final();

@@ -38,0 +39,0 @@ const c = Buffer.concat([a, b]);

@@ -24,12 +24,12 @@ import { NamedError } from '@salesforce/kit';

private errorTokens;
private messages;
private messages?;
private actions;
/**
* Create a new SfdxErrorConfig.
* @param packageName {string} The name of the package.
* @param bundleName {string} The message bundle.
* @param errorKey {string} The error message key.
* @param errorTokens {Tokens} The tokens to use when getting the error message.
* @param [actionKey] {string} The action message key.
* @param [actionTokens] {Tokens} The tokens to use when getting the action message(s).
* @param {string} packageName The name of the package.
* @param {string} bundleName The message bundle.
* @param {string} errorKey The error message key.
* @param {Tokens} errorTokens The tokens to use when getting the error message.
* @param {string} [actionKey] The action message key.
* @param {Tokens} [actionTokens] The tokens to use when getting the action message(s).
*/

@@ -121,6 +121,2 @@ constructor(packageName: string, bundleName: string, errorKey: string, errorTokens?: Tokens, actionKey?: string, actionTokens?: Tokens);

/**
* The error name
*/
name: string;
/**
* The message string. Error.message

@@ -140,3 +136,3 @@ */

*/
commandName: string;
commandName?: string;
data: any;

@@ -143,0 +139,0 @@ /**

@@ -21,8 +21,8 @@ "use strict";

* Create a new SfdxErrorConfig.
* @param packageName {string} The name of the package.
* @param bundleName {string} The message bundle.
* @param errorKey {string} The error message key.
* @param errorTokens {Tokens} The tokens to use when getting the error message.
* @param [actionKey] {string} The action message key.
* @param [actionTokens] {Tokens} The tokens to use when getting the action message(s).
* @param {string} packageName The name of the package.
* @param {string} bundleName The message bundle.
* @param {string} errorKey The error message key.
* @param {Tokens} errorTokens The tokens to use when getting the error message.
* @param {string} [actionKey] The action message key.
* @param {Tokens} [actionTokens] The tokens to use when getting the action message(s).
*/

@@ -98,3 +98,6 @@ constructor(packageName, bundleName, errorKey, errorTokens = [], actionKey, actionTokens) {

this.actions.forEach((tokens, key) => {
actions.push(this.messages.getMessage(key, tokens));
const messages = this.messages;
if (messages) {
actions.push(messages.getMessage(key, tokens));
}
});

@@ -138,3 +141,3 @@ return actions;

let errorConfig;
if (kit_1.isString(nameOrConfig)) {
if (ts_types_1.isString(nameOrConfig)) {
errorConfig = new SfdxErrorConfig(nameOrConfig, ts_types_1.ensure(bundleName), ts_types_1.ensure(key), tokens);

@@ -141,0 +144,0 @@ }

@@ -9,3 +9,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const _ = require("lodash");
const kit_1 = require("@salesforce/kit");
const configAggregator_1 = require("./config/configAggregator");

@@ -139,8 +139,8 @@ const configFile_1 = require("./config/configFile");

await local.read();
const defaults = {
const defaultValues = {
sfdcLoginUrl: 'https://login.salesforce.com'
};
this.projectConfig = _.defaults(local.toObject(), global.toObject(), defaults);
this.projectConfig = kit_1.defaults(local.toObject(), global.toObject(), defaultValues);
// Add fields in sfdx-config.json
_.assign(this.projectConfig, (await configAggregator_1.ConfigAggregator.create()).getConfig());
Object.assign(this.projectConfig, (await configAggregator_1.ConfigAggregator.create()).getConfig());
// LEGACY - Allow override of sfdcLoginUrl via env var FORCE_SFDC_LOGIN_URL

@@ -147,0 +147,0 @@ if (process.env.FORCE_SFDC_LOGIN_URL) {

@@ -63,4 +63,4 @@ import { Logger } from '../logger';

private options;
private timeout;
private interval;
private timeout?;
private interval?;
/**

@@ -67,0 +67,0 @@ * Constructor

@@ -40,3 +40,3 @@ /// <reference types="node" />

/**
* Subscribes to Comet topics. Subscribe should perform a handshake if one hasn't benn performed yet.
* Subscribes to Comet topics. Subscribe should perform a handshake if one hasn't been performed yet.
* @param {string} channel The topic to subscribe to.

@@ -126,5 +126,5 @@ * @param {function(message)} callback The callback to execute once a message has been received.

*/
export declare enum StreamingTimeoutError {
HANDSHAKE = "handshake",
SUBSCRIBE = "subscribe"
export declare enum StreamingTimeoutErrorType {
HANDSHAKE = "genericHandshakeTimeoutMessage",
SUBSCRIBE = "genericTimeoutMessage"
}

@@ -233,3 +233,3 @@ /**

* Simple inner log wrapper
* @param {string} message The message to log
* @param {any} message The message to log
* @private

@@ -236,0 +236,0 @@ */

@@ -9,6 +9,7 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
const events_1 = require("events");
// @ts-ignore No typings are available for faye
const Faye = require("faye");
const _ = require("lodash");
const logger_1 = require("../logger");

@@ -49,3 +50,3 @@ const sfdxError_1 = require("../sfdxError");

this.apiVersion = org.getConnection().getApiVersion();
if (_.startsWith(channel, '/system')) {
if (channel.startsWith('/system')) {
this.apiVersion = '36.0';

@@ -63,4 +64,4 @@ }

Faye.logger = {};
_.each(['info', 'error', 'fatal', 'warn', 'debug'], element => {
_.set(Faye.logger, element, logLine);
['info', 'error', 'fatal', 'warn', 'debug'].forEach(element => {
kit_1.set(Faye.logger, element, logLine);
});

@@ -109,7 +110,7 @@ }

*/
var StreamingTimeoutError;
(function (StreamingTimeoutError) {
StreamingTimeoutError["HANDSHAKE"] = "handshake";
StreamingTimeoutError["SUBSCRIBE"] = "subscribe";
})(StreamingTimeoutError = exports.StreamingTimeoutError || (exports.StreamingTimeoutError = {}));
var StreamingTimeoutErrorType;
(function (StreamingTimeoutErrorType) {
StreamingTimeoutErrorType["HANDSHAKE"] = "genericHandshakeTimeoutMessage";
StreamingTimeoutErrorType["SUBSCRIBE"] = "genericTimeoutMessage";
})(StreamingTimeoutErrorType = exports.StreamingTimeoutErrorType || (exports.StreamingTimeoutErrorType = {}));
/**

@@ -234,4 +235,3 @@ * Api wrapper to support Salesforce streaming. The client contains an internal implementation of a cometd specification.

timeout = setTimeout(() => {
const timeoutError = sfdxError_1.SfdxError.create('@salesforce/core', 'streaming', 'genericHandshakeTimeoutMessage', [this.targetUrl]);
timeoutError.name = StreamingTimeoutError.HANDSHAKE;
const timeoutError = sfdxError_1.SfdxError.create('@salesforce/core', 'streaming', StreamingTimeoutErrorType.HANDSHAKE, [this.targetUrl]);
this.doTimeout(timeout, timeoutError);

@@ -264,4 +264,3 @@ reject(timeoutError);

timeout = setTimeout(() => {
const timeoutError = sfdxError_1.SfdxError.create('@salesforce/core', 'streaming', 'genericTimeoutMessage');
timeoutError.name = StreamingTimeoutError.SUBSCRIBE;
const timeoutError = sfdxError_1.SfdxError.create('@salesforce/core', 'streaming', StreamingTimeoutErrorType.SUBSCRIBE);
this.doTimeout(timeout, timeoutError);

@@ -333,5 +332,7 @@ subscribeReject(timeoutError);

// has no clientId.
if (this.cometClient['_dispatcher']) {
// @ts-ignore
if (this.cometClient._dispatcher) {
this.log('Closing the faye dispatcher');
const dispatcher = this.cometClient['_dispatcher'];
// @ts-ignore
const dispatcher = this.cometClient._dispatcher;
this.log(`dispatcher.clientId: ${dispatcher.clientId}`);

@@ -348,3 +349,3 @@ if (!dispatcher.clientId) {

* Simple inner log wrapper
* @param {string} message The message to log
* @param {any} message The message to log
* @private

@@ -351,0 +352,0 @@ */

@@ -199,5 +199,5 @@ /// <reference types="node" />

testId: string;
alias: string;
alias?: string;
username: string;
devHubUsername: string;
devHubUsername?: string;
orgId: string;

@@ -204,0 +204,0 @@ loginUrl: string;

@@ -9,5 +9,5 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const crypto_1 = require("crypto");
const events_1 = require("events");
const lodash_1 = require("lodash");
const os_1 = require("os");

@@ -107,3 +107,3 @@ const path_1 = require("path");

*/
exports.testSetup = lodash_1.once((sinon) => {
exports.testSetup = kit_1.once((sinon) => {
if (!sinon) {

@@ -142,3 +142,3 @@ try {

testContext.SANDBOX.stub(logger_1.Logger, 'child').returns(Promise.resolve(testContext.TEST_LOGGER));
testContext.SANDBOXES.CONFIG.stub(configFile_1.ConfigFile, 'resolveRootFolder').callsFake(isGlobal => testContext.rootPathRetriever(isGlobal, testContext.id));
testContext.SANDBOXES.CONFIG.stub(configFile_1.ConfigFile, 'resolveRootFolder').callsFake((isGlobal) => testContext.rootPathRetriever(isGlobal, testContext.id));
// Mock out all config file IO for all tests. They can restore individually if they need original functionality.

@@ -187,3 +187,3 @@ testContext.SANDBOXES.CONFIG.stub(configFile_1.ConfigFile.prototype, 'read').callsFake(async function () {

testContext.SANDBOX.restore();
lodash_1.forEach(testContext.SANDBOXES, theSandbox => theSandbox.restore());
Object.values(testContext.SANDBOXES).forEach(theSandbox => theSandbox.restore());
testContext.configStubs = {};

@@ -291,3 +291,5 @@ });

subscription.on('subscriptionComplete', () => {
lodash_1.forEach(this.options.messagePlaylist, message => {
if (!this.options.messagePlaylist)
return;
Object.values(this.options.messagePlaylist).forEach(message => {
setTimeout(() => {

@@ -326,3 +328,3 @@ callback(message);

makeDevHub() {
lodash_1.set(this, 'isDevHub', true);
kit_1.set(this, 'isDevHub', true);
}

@@ -377,3 +379,3 @@ createUser(user) {

}
const isDevHub = lodash_1.get(this, 'isDevHub');
const isDevHub = kit_1.get(this, 'isDevHub');
if (isDevHub) {

@@ -380,0 +382,0 @@ config.set('isDevHub', isDevHub);

@@ -42,11 +42,11 @@ import { AuthInfo } from './authInfo';

id: string;
alias: string;
emailEncodingKey: string;
languageLocaleKey: string;
lastName: string;
localeSidKey: string;
profileId: string;
timeZoneSidKey: string;
username: string;
email: string;
alias?: string;
emailEncodingKey?: string;
languageLocaleKey?: string;
lastName?: string;
localeSidKey?: string;
profileId?: string;
timeZoneSidKey?: string;
email?: string;
/**

@@ -53,0 +53,0 @@ * Constructor

@@ -11,3 +11,2 @@ "use strict";

const ts_types_1 = require("@salesforce/ts-types");
const _ = require("lodash");
const os_1 = require("os");

@@ -26,6 +25,5 @@ const authInfo_1 = require("./authInfo");

const NUMBERS = '1234567890';
// eslint-disable-next-line no-useless-escape
const SYMBOLS = ['!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '_', '[', ']', '|', '-'];
const ALL = [LOWER, UPPER, NUMBERS, SYMBOLS.join('')];
const rand = len => Math.floor(Math.random() * (len.length || len));
const rand = (len) => Math.floor(Math.random() * len.length);
const scimEndpoint = '/services/scim/v1/Users';

@@ -54,3 +52,3 @@ const scimHeaders = { 'auto-approve-user': 'true' };

const connection = await connection_1.Connection.create(await authInfo_1.AuthInfo.create(username));
const fromFields = _.keys(exports.REQUIRED_FIELDS).map(value => ts_types_1.ensure(kit_1.upperFirst(value)));
const fromFields = Object.keys(exports.REQUIRED_FIELDS).map(value => ts_types_1.ensure(kit_1.upperFirst(value)));
const requiredFieldsFromAdminQuery = `SELECT ${fromFields} FROM User WHERE Username='${username}'`;

@@ -60,14 +58,14 @@ const result = await connection.query(requiredFieldsFromAdminQuery);

if (result.totalSize === 1) {
const results = _.mapKeys(result.records[0], (value, key) => kit_1.lowerFirst(key));
const results = kit_1.mapKeys(result.records[0], (value, key) => kit_1.lowerFirst(key));
const fields = {
id: _.get(results, exports.REQUIRED_FIELDS.id),
id: kit_1.get(results, exports.REQUIRED_FIELDS.id),
username,
alias: _.get(results, exports.REQUIRED_FIELDS.alias),
email: _.get(results, exports.REQUIRED_FIELDS.email),
emailEncodingKey: _.get(results, exports.REQUIRED_FIELDS.emailEncodingKey),
languageLocaleKey: _.get(results, exports.REQUIRED_FIELDS.languageLocaleKey),
localeSidKey: _.get(results, exports.REQUIRED_FIELDS.localeSidKey),
profileId: _.get(results, exports.REQUIRED_FIELDS.profileId),
lastName: _.get(results, exports.REQUIRED_FIELDS.lastName),
timeZoneSidKey: _.get(results, exports.REQUIRED_FIELDS.timeZoneSidKey)
alias: kit_1.get(results, exports.REQUIRED_FIELDS.alias),
email: kit_1.get(results, exports.REQUIRED_FIELDS.email),
emailEncodingKey: kit_1.get(results, exports.REQUIRED_FIELDS.emailEncodingKey),
languageLocaleKey: kit_1.get(results, exports.REQUIRED_FIELDS.languageLocaleKey),
localeSidKey: kit_1.get(results, exports.REQUIRED_FIELDS.localeSidKey),
profileId: kit_1.get(results, exports.REQUIRED_FIELDS.profileId),
lastName: kit_1.get(results, exports.REQUIRED_FIELDS.lastName),
timeZoneSidKey: kit_1.get(results, exports.REQUIRED_FIELDS.timeZoneSidKey)
};

@@ -90,3 +88,3 @@ return fields;

if (result.records.length > 0) {
return result.records[0]['Id'];
return result.records[0].Id;
}

@@ -105,2 +103,10 @@ }

/**
* Constructor
* @param {string} [username] The login username for User
*/
constructor(username) {
this.id = '';
this.username = `${Date.now()}_${username}`;
}
/**
* Used to initialize default values for fields based on a templateUser user. This user will be part of the

@@ -115,5 +121,4 @@ * Standard User profile.

const userFields = await _retrieveUserFields.call({ logger: initLogger }, templateUser);
_.merge(fields, userFields);
kit_1.merge(fields, userFields);
fields.profileId = await _retrieveProfileId('Standard User', await connection_1.Connection.create(await authInfo_1.AuthInfo.create(templateUser)));
fields.id = '';
initLogger.debug(`Standard User profileId: ${fields.profileId}`);

@@ -128,9 +133,2 @@ if (newUserName) {

}
/**
* Constructor
* @param {string} [username] The login username for User
*/
constructor(username) {
this.username = `${Date.now()}_${username}`;
}
}

@@ -187,3 +185,4 @@ exports.DefaultUserFields = DefaultUserFields;

try {
const soap = userConnection['soap'];
// @ts-ignore TODO: expose `soap` on Connection however appropriate
const soap = userConnection.soap;
await soap.setPassword(info.getFields().userId, buffer.toString('utf8'));

@@ -322,3 +321,3 @@ this.logger.debug(`Set password for userId: ${info.getFields().userId}`);

const response = await this.org.getConnection().requestRaw(info);
const responseBody = JSON.parse(ts_types_1.ensureString(response['body']));
const responseBody = kit_1.parseJsonMap(ts_types_1.ensureString(response['body']));
const statusCode = ts_types_1.asNumber(response.statusCode);

@@ -330,6 +329,8 @@ this.logger.debug(`user create response.statusCode: ${response.statusCode}`);

if (responseBody) {
const errors = _.get(responseBody, 'Errors');
const errors = ts_types_1.asJsonArray(responseBody.Errors);
if (errors && errors.length > 0) {
message = `${message} causes:${os_1.EOL}`;
_.each(_.get(responseBody, 'Errors'), singleMessage => {
errors.forEach(singleMessage => {
if (!ts_types_1.isJsonMap(singleMessage))
return;
message = `${message}${os_1.EOL}${singleMessage.description}`;

@@ -342,3 +343,3 @@ });

}
fields.id = responseBody['id'];
fields.id = ts_types_1.ensureString(responseBody.id);
await this.updateRequiredUserFields(fields);

@@ -351,3 +352,3 @@ const buffer = new secureBuffer_1.SecureBuffer();

buffer,
userId: responseBody.id
userId: fields.id
};

@@ -360,6 +361,6 @@ }

async updateRequiredUserFields(fields) {
const leftOverRequiredFields = _.omit(fields, [
const leftOverRequiredFields = kit_1.omit(fields, [
exports.REQUIRED_FIELDS.username, exports.REQUIRED_FIELDS.email, exports.REQUIRED_FIELDS.lastName, exports.REQUIRED_FIELDS.profileId
]);
const object = _.mapKeys(leftOverRequiredFields, (value, key) => kit_1.upperFirst(key));
const object = kit_1.mapKeys(leftOverRequiredFields, (value, key) => kit_1.upperFirst(key));
await this.org.getConnection().sobject('User').update(object);

@@ -366,0 +367,0 @@ this.logger.debug(`Successfully Updated additional properties for user: ${fields.username}`);

@@ -116,14 +116,4 @@ "use strict";

const stats = await Promise.all(files.map(file => exports.stat(path.join(dirPath, file))));
const metas = stats.map((value, index) => {
value['path'] = path.join(dirPath, files[index]);
return value;
});
await Promise.all(metas.map(meta => {
if (meta.isDirectory()) {
return remove(meta['path']);
}
else {
return exports.unlink(meta['path']);
}
}));
const metas = stats.map((value, index) => Object.assign(value, { path: path.join(dirPath, files[index]) }));
await Promise.all(metas.map(meta => meta.isDirectory() ? remove(meta.path) : exports.unlink(meta.path)));
await exports.rmdir(dirPath);

@@ -130,0 +120,0 @@ }

@@ -12,4 +12,4 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const kit_1 = require("@salesforce/kit");
const ts_types_1 = require("@salesforce/ts-types");
const lodash_1 = require("lodash");
const url_1 = require("url");

@@ -42,4 +42,5 @@ /**

];
return whitelistOfSalesforceDomainPatterns.some(pattern => lodash_1.endsWith(url.hostname, pattern)) ||
lodash_1.includes(whitelistOfSalesforceHosts, url.hostname);
return whitelistOfSalesforceDomainPatterns.some(pattern => {
return url.hostname.endsWith(pattern) || whitelistOfSalesforceHosts.includes(url.hostname);
});
}

@@ -106,3 +107,3 @@ exports.isSalesforceDomain = isSalesforceDomain;

let key;
lodash_1.findKey(data, (val, k) => {
kit_1.findKey(data, (val, k) => {
if (k[0] === k[0].toUpperCase()) {

@@ -109,0 +110,0 @@ key = k;

{
"name": "@salesforce/core",
"version": "0.20.0",
"version": "0.21.0",
"description": "Core libraries to interact with SFDX projects, orgs, and APIs.",

@@ -8,13 +8,12 @@ "main": "lib/exported",

"scripts": {
"typings": "node_modules/.bin/tsc -p typings/tsconfig.json",
"build": "npm run clean && npm run compile && npm run lint && npm run docgen",
"clean": "scripts/clean.js",
"compile": "node_modules/.bin/tsc -p ./tsconfig.json",
"docgen": "npm run compile && node_modules/.bin/jsdoc lib --readme EXPORTED.md -r -c jsdoc.json --package package.json",
"lint": "node_modules/.bin/tslint -p .",
"lint-report": "npm run lint -- -t checkstyle -o checkstyle.xml",
"clean": "scripts/clean.js",
"compile": "node_modules/.bin/tsc -p ./tsconfig.json",
"build": "npm run clean && npm run lint && npm run compile && npm run docgen",
"watch": "tsc -w -p tsconfig.json",
"prepare": "npm run build",
"test": "npm run lint && mocha \"test/**/*Test.ts\"",
"test-with-coverage": "npm run lint-report && scripts/unit-test-with-coverage.js",
"prepare": "npm run build",
"docgen": "npm run compile && node_modules/.bin/jsdoc lib --readme EXPORTED.md -r -c jsdoc.json --package package.json"
"watch": "tsc -w -p tsconfig.json"
},

@@ -31,8 +30,7 @@ "keywords": [

"messages",
"typings",
"!lib/**/*.map"
],
"dependencies": {
"@salesforce/kit": "0.0.7",
"@salesforce/ts-types": "0.4.0",
"@salesforce/kit": "0.4.0",
"@salesforce/ts-types": "0.11.0",
"@types/jsforce": "1.8.12",

@@ -44,3 +42,2 @@ "bunyan-sfdx-no-dtrace": "1.8.2",

"jsonwebtoken": "7.0.0",
"lodash": "^4.17.4",
"mkdirp": "0.5.1"

@@ -51,4 +48,5 @@ },

"@types/chai": "4.0.4",
"@types/debug": "0.0.30",
"@types/jsen": "0.0.19",
"@types/lodash": "4.14.88",
"@types/jsonwebtoken": "7.2.8",
"@types/mkdirp": "0.5.2",

@@ -55,0 +53,0 @@ "@types/mocha": "2.2.42",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc