Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@salesforce/acu-pack

Package Overview
Dependencies
Maintainers
53
Versions
7
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@salesforce/acu-pack - npm Package Compare versions

Comparing version 2.0.2 to 2.0.3

compiled/commands/acu-pack/api/file/post.d.ts

0

compiled/commands/acu-pack/admin/user/access.d.ts

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

9

compiled/commands/acu-pack/admin/user/access.js

@@ -35,3 +35,2 @@ "use strict";

async runInternal() {
var _a, _b, _c, _d;
let apps = null;

@@ -49,3 +48,3 @@ if (this.flags.applist) {

let query = 'SELECT Id, ApplicationId, Name, Label FROM AppMenuItem';
if ((apps === null || apps === void 0 ? void 0 : apps.length) > 0) {
if (apps?.length > 0) {
const appsFilter = `'${apps.join("','")}'`;

@@ -86,7 +85,7 @@ query += ` WHERE Label IN (${appsFilter})`;

sheet.push([
(_a = permissionSetAssignment.Assignee) === null || _a === void 0 ? void 0 : _a.Username,
permissionSetAssignment.Assignee?.Username,
permissionSetAssignment.AssigneeId,
(_b = permissionSetAssignment.PermissionSet) === null || _b === void 0 ? void 0 : _b.Label,
permissionSetAssignment.PermissionSet?.Label,
permissionSetAssignment.PermissionSetId,
(_d = (_c = permissionSetAssignment.PermissionSet) === null || _c === void 0 ? void 0 : _c.Profile) === null || _d === void 0 ? void 0 : _d.Name,
permissionSetAssignment.PermissionSet?.Profile?.Name,
permissionSetAssignment.ExpirationDate,

@@ -93,0 +92,0 @@ ]);

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -12,3 +11,2 @@ const command_base_1 = require("../../../../lib/command-base");

async runInternal() {
var e_1, _a;
const usernames = [];

@@ -39,15 +37,5 @@ if (this.flags.userlist) {

try {
try {
for (var _b = (e_1 = void 0, tslib_1.__asyncValues(sfdxClient.do(utils_1.RestAction.DELETE, 'IDEWorkspace', workspaceRecords, 'Id', sfdx_client_1.ApiKind.TOOLING, [sfdx_client_1.NO_CONTENT_CODE]))), _c; _c = await _b.next(), !_c.done;) {
const result = _c.value;
this.ux.log(`Deleted Workspace(${result.getContent()}) for user: '${username}'.`);
}
for await (const result of sfdxClient.do(utils_1.RestAction.DELETE, 'IDEWorkspace', workspaceRecords, 'Id', sfdx_client_1.ApiKind.TOOLING, [sfdx_client_1.NO_CONTENT_CODE])) {
this.ux.log(`Deleted Workspace(${result.getContent()}) for user: '${username}'.`);
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) await _a.call(_b);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -54,0 +42,0 @@ catch (err) {

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -11,20 +10,9 @@ const command_base_1 = require("../../../../lib/command-base");

async runInternal() {
var e_1, _a, e_2, _b;
this.ux.log('Checking for pending tests...');
let recordCount = 0;
try {
for (var _c = tslib_1.__asyncValues(sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias)), _d; _d = await _c.next(), !_d.done;) {
recordCount = _d.value;
if (recordCount === 0) {
break;
}
for await (recordCount of sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias)) {
if (recordCount === 0) {
break;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
if (recordCount !== 0) {

@@ -48,17 +36,7 @@ this.raiseError(`${recordCount} Apex Test(s) are still executing - please try again later.`);

const sfdxClient = new sfdx_client_1.SfdxClient(this.orgAlias);
try {
for (var _e = (e_2 = void 0, tslib_1.__asyncValues(sfdxClient.do(utils_1.RestAction.DELETE, metaDataType, records, 'Id', sfdx_client_1.ApiKind.TOOLING, [
sfdx_client_1.NO_CONTENT_CODE,
]))), _f; _f = await _e.next(), !_f.done;) {
const result = _f.value;
this.ux.log(`(${++counter}/${records.length}) Deleted id: ${result.getContent()}`);
}
for await (const result of sfdxClient.do(utils_1.RestAction.DELETE, metaDataType, records, 'Id', sfdx_client_1.ApiKind.TOOLING, [
sfdx_client_1.NO_CONTENT_CODE,
])) {
this.ux.log(`(${++counter}/${records.length}) Deleted id: ${result.getContent()}`);
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_f && !_f.done && (_b = _e.return)) await _b.call(_e);
}
finally { if (e_2) throw e_2.error; }
}
this.ux.log('Cleared.');

@@ -65,0 +43,0 @@ }

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -10,20 +9,9 @@ const command_base_1 = require("../../../../lib/command-base");

async runInternal() {
var e_1, _a, e_2, _b, e_3, _c;
this.ux.log('Checking for pending tests...');
let recordCount = 0;
try {
for (var _d = tslib_1.__asyncValues(sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias)), _e; _e = await _d.next(), !_e.done;) {
recordCount = _e.value;
if (recordCount === 0) {
break;
}
for await (recordCount of sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias)) {
if (recordCount === 0) {
break;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_e && !_e.done && (_a = _d.return)) await _a.call(_d);
}
finally { if (e_1) throw e_1.error; }
}
if (recordCount !== 0) {

@@ -58,15 +46,5 @@ this.raiseError(`${recordCount} Apex Test(s) are still executing - please try again later.`);

}
try {
for (var _f = tslib_1.__asyncValues(sfdx_tasks_1.SfdxTasks.waitForJob(this.orgAlias, jobInfo, waitCountMaxSeconds)), _g; _g = await _f.next(), !_g.done;) {
jobInfo = _g.value;
this.ux.log(`${new Date().toJSON()} state: ${jobInfo.state} id: ${jobInfo.id} batch: ${jobInfo.batchId} isDone: ${jobInfo.isDone()}`);
}
for await (jobInfo of sfdx_tasks_1.SfdxTasks.waitForJob(this.orgAlias, jobInfo, waitCountMaxSeconds)) {
this.ux.log(`${new Date().toJSON()} state: ${jobInfo.state} id: ${jobInfo.id} batch: ${jobInfo.batchId} isDone: ${jobInfo.isDone()}`);
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_g && !_g.done && (_b = _f.return)) await _b.call(_f);
}
finally { if (e_2) throw e_2.error; }
}
if (!jobInfo.isDone()) {

@@ -79,18 +57,8 @@ this.raiseError(`Timeout while waiting for Apex Test Job to Complete:${JSON.stringify(jobInfo)}`);

const createdDate = jobInfo.createdDate || new Date().toJSON();
try {
for (var _h = tslib_1.__asyncValues(sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias, waitCountMaxSeconds, createdDate)), _j; _j = await _h.next(), !_j.done;) {
recordCount = _j.value;
if (recordCount === 0) {
break;
}
this.ux.log(`${recordCount} Apex Test(s) remaining.`);
for await (recordCount of sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias, waitCountMaxSeconds, createdDate)) {
if (recordCount === 0) {
break;
}
this.ux.log(`${recordCount} Apex Test(s) remaining.`);
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_j && !_j.done && (_c = _h.return)) await _c.call(_h);
}
finally { if (e_3) throw e_3.error; }
}
if (recordCount !== 0) {

@@ -97,0 +65,0 @@ this.raiseError(`${recordCount} Apex Test(s) are still executing - please try again later.`);

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -10,22 +9,10 @@ const command_base_1 = require("../../../../lib/command-base");

async runInternal() {
var e_1, _a;
var _b;
this.ux.log('Checking for pending tests...');
const waitCountMaxSeconds = (this.flags.wait || Report.defaultJobStatusWaitMax) * 60;
let recordCount = 0;
try {
for (var _c = tslib_1.__asyncValues(sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias, waitCountMaxSeconds)), _d; _d = await _c.next(), !_d.done;) {
recordCount = _d.value;
if (recordCount === 0) {
break;
}
for await (recordCount of sfdx_query_1.SfdxQuery.waitForApexTests(this.orgAlias, waitCountMaxSeconds)) {
if (recordCount === 0) {
break;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
if (recordCount !== 0) {

@@ -78,3 +65,3 @@ this.raiseError(`${recordCount} Apex Test(s) are still executing - please try again later.`);

sheetData.push([
(_b = record.ApexClass) === null || _b === void 0 ? void 0 : _b.Name,
record.ApexClass?.Name,
record.MethodName,

@@ -81,0 +68,0 @@ record.Message,

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

@@ -127,3 +127,2 @@ "use strict";

const getDec = (fld, maxLength) => {
var _a;
if (!fld) {

@@ -137,3 +136,3 @@ this.raiseError('The fld argument cannot be null.');

}
const scale = (_a = fld.scale) !== null && _a !== void 0 ? _a : 0;
const scale = fld.scale ?? 0;
for (let index = 1; index <= numLen - scale; index++) {

@@ -171,3 +170,2 @@ num += get1Rand();

const getValue = (fld) => {
var _a, _b;
if (!fld) {

@@ -222,3 +220,3 @@ this.raiseError('The fld argument cannot be null.');

case 'multipicklist': {
if (((_a = fld.picklistValues) === null || _a === void 0 ? void 0 : _a.length) === 0) {
if (fld.picklistValues?.length === 0) {
this.ux.log(`Skipping: ${fld.name} (${fld.type}) - no picklist values.`);

@@ -231,3 +229,3 @@ }

case 'picklist': {
if (((_b = fld.picklistValues) === null || _b === void 0 ? void 0 : _b.length) === 0) {
if (fld.picklistValues?.length === 0) {
this.ux.log(`Skipping: ${fld.name} (${fld.type}) - no picklist values.`);

@@ -234,0 +232,0 @@ }

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -10,25 +9,14 @@ const command_base_1 = require("../../../lib/command-base");

async runInternal() {
var e_1, _a;
const apiKind = this.flags.tooling ? sfdx_client_1.ApiKind.TOOLING : sfdx_client_1.ApiKind.DEFAULT;
const sfdxClient = new sfdx_client_1.SfdxClient(this.orgAlias);
const ids = this.flags.ids.split(',');
try {
for (var _b = tslib_1.__asyncValues(sfdxClient.getByIds(this.flags.metadata, ids, apiKind)), _c; _c = await _b.next(), !_c.done;) {
const response = _c.value;
const outFilePath = this.flags.output || '{Id}.json';
const content = response.getContent();
if (response.isBinary) {
await utils_1.default.writeFile(outFilePath.replace('{Id}', response.id), content);
}
else {
await utils_1.default.writeFile(outFilePath.replace('{Id}', response.id), JSON.stringify(content));
}
for await (const response of sfdxClient.getByIds(this.flags.metadata, ids, apiKind)) {
const outFilePath = this.flags.output || '{Id}.json';
const content = response.getContent();
if (response.isBinary) {
await utils_1.default.writeFile(outFilePath.replace('{Id}', response.id), content);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) await _a.call(_b);
else {
await utils_1.default.writeFile(outFilePath.replace('{Id}', response.id), JSON.stringify(content));
}
finally { if (e_1) throw e_1.error; }
}

@@ -35,0 +23,0 @@ }

@@ -0,0 +0,0 @@ import { OutputFlags } from '@oclif/parser';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const path = require("path");

@@ -18,4 +17,2 @@ const command_1 = require("@salesforce/command");

static async getMetadataMapFromOrg(orgAlias, ux, options, cmdFlags) {
var e_1, _a;
var _b;
const metadataMap = new Map();

@@ -56,3 +53,3 @@ const excluded = new Set(options.excludeMetadataTypes);

}
if (!((_b = results.map) === null || _b === void 0 ? void 0 : _b.size)) {
if (!results.map?.size) {
ux.log('No Deployable Source Tracking changes found.');

@@ -80,17 +77,7 @@ return;

const namespaces = cmdFlags.namespaces ? new Set(cmdFlags.namespaces.split()) : new Set();
try {
for (var _c = tslib_1.__asyncValues(sfdx_tasks_1.SfdxTasks.getTypesForPackage(orgAlias, describeMetadatas, namespaces)), _d; _d = await _c.next(), !_d.done;) {
const entry = _d.value;
// If specific members were defined previously - just use them
metadataMap.set(entry.name, entry.members);
ux.log(`Processed (${++counter}/${describeMetadatas.size}): ${entry.name}`);
}
for await (const entry of sfdx_tasks_1.SfdxTasks.getTypesForPackage(orgAlias, describeMetadatas, namespaces)) {
// If specific members were defined previously - just use them
metadataMap.set(entry.name, entry.members);
ux.log(`Processed (${++counter}/${describeMetadatas.size}): ${entry.name}`);
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -100,3 +87,2 @@ return metadataMap;

static async getMetadataMapFromFolder(folder, ux, options) {
var e_2, _a, e_3, _b;
const metadataMap = new Map();

@@ -110,88 +96,45 @@ const excluded = new Set(options.excludeMetadataTypes);

}
try {
// Get all the folders from the root of the MDAPI folder
for (var _c = tslib_1.__asyncValues(utils_1.default.getFolders(folder, false)), _d; _d = await _c.next(), !_d.done;) {
const folderPath = _d.value;
const packageType = options.mdapiMap.get(path.basename(folderPath));
if (!packageType) {
continue;
}
const members = [];
try {
for (var _e = (e_3 = void 0, tslib_1.__asyncValues(Build.getMDAPIFiles(packageType, folderPath, false))), _f; _f = await _e.next(), !_f.done;) {
const memberFile = _f.value;
members.push(memberFile.replace(folderPath + path.sep, ''));
}
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_f && !_f.done && (_b = _e.return)) await _b.call(_e);
}
finally { if (e_3) throw e_3.error; }
}
metadataMap.set(packageType, members);
// Get all the folders from the root of the MDAPI folder
for await (const folderPath of utils_1.default.getFolders(folder, false)) {
const packageType = options.mdapiMap.get(path.basename(folderPath));
if (!packageType) {
continue;
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
const members = [];
for await (const memberFile of Build.getMDAPIFiles(packageType, folderPath, false)) {
members.push(memberFile.replace(folderPath + path.sep, ''));
}
finally { if (e_2) throw e_2.error; }
metadataMap.set(packageType, members);
}
return metadataMap;
}
static getMDAPIFiles(xmlName, folder, isDocument = false) {
return tslib_1.__asyncGenerator(this, arguments, function* getMDAPIFiles_1() {
var e_4, _a, e_5, _b;
try {
for (var _c = tslib_1.__asyncValues(utils_1.default.getItems(folder, utils_1.IOItem.Both, false)), _d; _d = yield tslib_1.__await(_c.next()), !_d.done;) {
const filePath = _d.value;
if (filePath.endsWith(constants_1.default.METADATA_FILE_SUFFIX)) {
continue;
}
const itemName = path.basename(filePath);
const isDir = yield tslib_1.__await(utils_1.default.isDirectory(filePath));
if (itemName !== 'unfiled$public') {
if (isDocument) {
yield yield tslib_1.__await(itemName);
}
else if (!isDir) {
yield yield tslib_1.__await(schema_utils_1.default.getMetadataBaseName(itemName));
}
}
// if not os.path.isdir(filePath) and xmlName in INST_PKG_REF_METADATA:
// Common.removeInstPkgReference(filePath, Common.canRemoveAllPackageReferences(xmlName))
if (isDir) {
const fullCopyPath = delta_provider_1.DeltaProvider.getFullCopyPath(filePath, delta_command_1.DeltaCommandBase.defaultCopyDirList);
if (fullCopyPath) {
yield yield tslib_1.__await(itemName);
}
else {
try {
for (var _e = (e_5 = void 0, tslib_1.__asyncValues(Build.getMDAPIFiles(xmlName, filePath, xmlName === 'Document'))), _f; _f = yield tslib_1.__await(_e.next()), !_f.done;) {
const subFilePath = _f.value;
yield yield tslib_1.__await(path.join(filePath, subFilePath));
}
}
catch (e_5_1) { e_5 = { error: e_5_1 }; }
finally {
try {
if (_f && !_f.done && (_b = _e.return)) yield tslib_1.__await(_b.call(_e));
}
finally { if (e_5) throw e_5.error; }
}
}
}
static async *getMDAPIFiles(xmlName, folder, isDocument = false) {
for await (const filePath of utils_1.default.getItems(folder, utils_1.IOItem.Both, false)) {
if (filePath.endsWith(constants_1.default.METADATA_FILE_SUFFIX)) {
continue;
}
const itemName = path.basename(filePath);
const isDir = await utils_1.default.isDirectory(filePath);
if (itemName !== 'unfiled$public') {
if (isDocument) {
yield itemName;
}
else if (!isDir) {
yield schema_utils_1.default.getMetadataBaseName(itemName);
}
}
catch (e_4_1) { e_4 = { error: e_4_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) yield tslib_1.__await(_a.call(_c));
// if not os.path.isdir(filePath) and xmlName in INST_PKG_REF_METADATA:
// Common.removeInstPkgReference(filePath, Common.canRemoveAllPackageReferences(xmlName))
if (isDir) {
const fullCopyPath = delta_provider_1.DeltaProvider.getFullCopyPath(filePath, delta_command_1.DeltaCommandBase.defaultCopyDirList);
if (fullCopyPath) {
yield itemName;
}
finally { if (e_4) throw e_4.error; }
else {
for await (const subFilePath of Build.getMDAPIFiles(xmlName, filePath, xmlName === 'Document')) {
yield path.join(filePath, subFilePath);
}
}
}
});
}
}

@@ -198,0 +141,0 @@ async runInternal() {

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const path = require("path");

@@ -13,3 +12,2 @@ const command_1 = require("@salesforce/command");

async runInternal() {
var e_1, _a;
// Gather metadata names to include

@@ -46,16 +44,6 @@ const metaNames = utils_1.default.sortArray(this.flags.metadata ? this.flags.metadata.split() : sfdx_permission_1.SfdxPermission.defaultPermissionMetaTypes);

let counter = 0;
try {
for (var _b = tslib_1.__asyncValues(sfdx_tasks_1.SfdxTasks.getTypesForPackage(this.orgAlias, describeMetadatas, this.namespaces)), _c; _c = await _b.next(), !_c.done;) {
const entry = _c.value;
metadataMap.set(entry.name, entry.members);
this.ux.log(`Processed (${++counter}/${describeMetadatas.size}): ${entry.name}`);
}
for await (const entry of sfdx_tasks_1.SfdxTasks.getTypesForPackage(this.orgAlias, describeMetadatas, this.namespaces)) {
metadataMap.set(entry.name, entry.members);
this.ux.log(`Processed (${++counter}/${describeMetadatas.size}): ${entry.name}`);
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) await _a.call(_b);
}
finally { if (e_1) throw e_1.error; }
}
// Write the final package

@@ -62,0 +50,0 @@ await sfdx_core_1.SfdxCore.writePackageFile(metadataMap, this.packageFileName);

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const path = require("path");
const os = require("os");
const fs_1 = require("fs");

@@ -16,5 +14,5 @@ const command_1 = require("@salesforce/command");

const sfdx_query_1 = require("../../../lib/sfdx-query");
const constants_1 = require("../../../lib/constants");
class Dictionary extends command_base_1.CommandBase {
async runInternal() {
var e_1, _a, e_2, _b;
// Read/Write the options file if it does not exist already

@@ -26,3 +24,3 @@ this.options = await options_factory_1.OptionsFactory.get(schema_options_1.default, this.flags.options);

// Create for writing - truncates if exists
const fileStream = fs_1.createWriteStream(schemaTmpFile, { flags: 'w' });
const fileStream = (0, fs_1.createWriteStream)(schemaTmpFile, { flags: 'w' });
let counter = 0;

@@ -39,3 +37,3 @@ const schemas = new Set();

for (const name of this.options.outputDefMap.keys()) {
fileStream.write(`*${name}${os.EOL}`);
fileStream.write(`*${name}${constants_1.default.EOL}`);
const collection = schema[name];

@@ -63,30 +61,20 @@ if (!collection) {

const dynamicCode = this.options.getDynamicCode(name);
try {
for (var _c = (e_1 = void 0, tslib_1.__asyncValues(schema_utils_1.default.getDynamicSchemaData(schema, dynamicCode, collection))), _d; _d = await _c.next(), !_d.done;) {
const row = _d.value;
if (row.length === 0) {
continue;
}
const nameFieldValue = row[nameFieldIndex];
const fieldDefinitionRecord = fieldDefinitionMap.get(nameFieldValue);
if (fieldDefinitionRecord != null) {
for (let index = 0; index < outputDefs.length; index++) {
const outputDef = outputDefs[index];
for (const entityDefinitionField of entityDefinitionFields) {
if (outputDef.includes(`|${schema_utils_1.default.ENTITY_DEFINITION}.${entityDefinitionField}`)) {
row[index] = fieldDefinitionRecord[entityDefinitionField];
}
for await (const row of schema_utils_1.default.getDynamicSchemaData(schema, dynamicCode, collection)) {
if (row.length === 0) {
continue;
}
const nameFieldValue = row[nameFieldIndex];
const fieldDefinitionRecord = fieldDefinitionMap.get(nameFieldValue);
if (fieldDefinitionRecord != null) {
for (let index = 0; index < outputDefs.length; index++) {
const outputDef = outputDefs[index];
for (const entityDefinitionField of entityDefinitionFields) {
if (outputDef.includes(`|${schema_utils_1.default.ENTITY_DEFINITION}.${entityDefinitionField}`)) {
row[index] = fieldDefinitionRecord[entityDefinitionField];
}
}
}
fileStream.write(`${JSON.stringify(row)}${os.EOL}`);
}
fileStream.write(`${JSON.stringify(row)}${constants_1.default.EOL}`);
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -108,26 +96,16 @@ schemas.add(schema.name);

let sheet = null;
try {
for (var _e = tslib_1.__asyncValues(utils_1.default.readFileLines(schemaTmpFile)), _f; _f = await _e.next(), !_f.done;) {
const line = _f.value;
if (line.startsWith('*')) {
sheetName = line.substring(1);
const outputDefs = this.options.outputDefMap.get(sheetName);
const headers = this.getColumnRow(outputDefs);
sheet = workbookMap.get(sheetName);
if (!sheet) {
sheet = [[...headers]];
workbookMap.set(sheetName, sheet);
}
continue;
for await (const line of utils_1.default.readFileLines(schemaTmpFile)) {
if (line.startsWith('*')) {
sheetName = line.substring(1);
const outputDefs = this.options.outputDefMap.get(sheetName);
const headers = this.getColumnRow(outputDefs);
sheet = workbookMap.get(sheetName);
if (!sheet) {
sheet = [[...headers]];
workbookMap.set(sheetName, sheet);
}
sheet.push(JSON.parse(line));
continue;
}
sheet.push(JSON.parse(line));
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_f && !_f.done && (_b = _e.return)) await _b.call(_e);
}
finally { if (e_2) throw e_2.error; }
}
office_1.Office.writeXlxsWorkbook(workbookMap, reportPath);

@@ -134,0 +112,0 @@ }

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

@@ -0,0 +0,0 @@ import { CommandBase } from '../../../../lib/command-base';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -45,15 +44,13 @@ const command_base_1 = require("../../../../lib/command-base");

}
diff(source) {
return tslib_1.__asyncGenerator(this, arguments, function* diff_1() {
// git has already done all of the hashing/diffing for us
source = source ? utils_1.default.normalizePath(source) : this.deltaOptions.source;
for (const [deltaFile, deltaKind] of this.deltas) {
// Did we exclude the filepath?
if (!deltaFile.startsWith(source)) {
yield tslib_1.__await(this.logMessage(`Skipping delta file line: '${deltaFile}' not in source path: '${source}'.`, true));
continue;
}
yield yield tslib_1.__await(new delta_provider_1.Delta(deltaKind, deltaFile));
async *diff(source) {
// git has already done all of the hashing/diffing for us
source = source ? utils_1.default.normalizePath(source) : this.deltaOptions.source;
for (const [deltaFile, deltaKind] of this.deltas) {
// Did we exclude the filepath?
if (!deltaFile.startsWith(source)) {
await this.logMessage(`Skipping delta file line: '${deltaFile}' not in source path: '${source}'.`, true);
continue;
}
});
yield new delta_provider_1.Delta(deltaKind, deltaFile);
}
}

@@ -60,0 +57,0 @@ async validateDeltaOptions(deltaOptions) {

@@ -0,0 +0,0 @@ import { CommandBase } from '../../../../lib/command-base';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const fs_1 = require("fs");
const path = require("path");
const os = require("os");
const command_1 = require("@salesforce/command");

@@ -13,2 +11,3 @@ const md5File = require("md5-file");

const delta_provider_1 = require("../../../../lib/delta-provider");
const constants_1 = require("../../../../lib/constants");
class Md5 extends command_base_1.CommandBase {

@@ -50,77 +49,64 @@ constructor() {

}
diff(source) {
return tslib_1.__asyncGenerator(this, arguments, function* diff_1() {
var e_1, _a;
let hasUpdates = false;
source = source ? utils_1.default.normalizePath(source) : this.deltaOptions.source;
try {
for (var _b = tslib_1.__asyncValues(utils_1.default.getFiles(source)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const deltaFile = _c.value;
if (source && !deltaFile.startsWith(source)) {
yield tslib_1.__await(this.logMessage(`Skipping delta file line: '${deltaFile}' not in source path: '${source}'.`, true));
continue;
}
const hash = md5File.sync(deltaFile);
const entry = this.deltas.get(deltaFile);
let deltaKind;
// Is this the same?
if (!entry) {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.A;
this.deltas.set(deltaFile, { hash, isFound: true });
hasUpdates = true;
}
else if (hash !== entry.hash) {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.M;
this.deltas.set(deltaFile, { hash, isFound: true });
hasUpdates = true;
}
else {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.NONE;
this.deltas.set(deltaFile, { hash, isFound: true });
}
// return the delta
yield yield tslib_1.__await(new delta_provider_1.Delta(deltaKind, deltaFile));
}
async *diff(source) {
let hasUpdates = false;
source = source ? utils_1.default.normalizePath(source) : this.deltaOptions.source;
for await (const deltaFile of utils_1.default.getFiles(source)) {
if (source && !deltaFile.startsWith(source)) {
await this.logMessage(`Skipping delta file line: '${deltaFile}' not in source path: '${source}'.`, true);
continue;
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_1) throw e_1.error; }
const hash = md5File.sync(deltaFile);
const entry = this.deltas.get(deltaFile);
let deltaKind;
// Is this the same?
if (!entry) {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.A;
this.deltas.set(deltaFile, { hash, isFound: true });
hasUpdates = true;
}
// Check for deletes
const deleted = [];
for (const [fp, data] of this.deltas) {
if (!data.isFound) {
// note deleted files
deleted.push({ deltaKind: delta_provider_1.DeltaProvider.deltaTypeKind.D, deltaFile: fp });
hasUpdates = true;
}
else if (hash !== entry.hash) {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.M;
this.deltas.set(deltaFile, { hash, isFound: true });
hasUpdates = true;
}
// Return deleted entries
for (const del of deleted) {
yield yield tslib_1.__await(del);
// Remove the delete entry from the deltas
this.deltas.delete(del.deltaFile);
else {
deltaKind = delta_provider_1.DeltaProvider.deltaTypeKind.NONE;
this.deltas.set(deltaFile, { hash, isFound: true });
}
// Update hash file?
if (hasUpdates) {
const md5FilePath = this.deltaOptions.deltaFilePath;
yield tslib_1.__await(this.logMessage('Updating hash file...', true));
if (!(yield tslib_1.__await(utils_1.default.pathExists(md5FilePath)))) {
const folder = path.dirname(md5FilePath);
if (folder && !(yield tslib_1.__await(utils_1.default.pathExists(folder)))) {
yield tslib_1.__await(utils_1.default.mkDirPath(folder));
}
// return the delta
yield new delta_provider_1.Delta(deltaKind, deltaFile);
}
// Check for deletes
const deleted = [];
for (const [fp, data] of this.deltas) {
if (!data.isFound) {
// note deleted files
deleted.push({ deltaKind: delta_provider_1.DeltaProvider.deltaTypeKind.D, deltaFile: fp });
hasUpdates = true;
}
}
// Return deleted entries
for (const del of deleted) {
yield del;
// Remove the delete entry from the deltas
this.deltas.delete(del.deltaFile);
}
// Update hash file?
if (hasUpdates) {
const md5FilePath = this.deltaOptions.deltaFilePath;
await this.logMessage('Updating hash file...', true);
if (!(await utils_1.default.pathExists(md5FilePath))) {
const folder = path.dirname(md5FilePath);
if (folder && !(await utils_1.default.pathExists(folder))) {
await utils_1.default.mkDirPath(folder);
}
else {
yield tslib_1.__await(fs_1.promises.unlink(md5FilePath));
}
for (const [fp, data] of this.deltas) {
yield tslib_1.__await(fs_1.promises.appendFile(md5FilePath, `${fp}${this.deltaLineToken}${data.hash}${os.EOL}`));
}
yield tslib_1.__await(this.logMessage(`Updated hash file: ${md5FilePath} with ${this.deltas.size} entries.`, true));
}
});
else {
await fs_1.promises.unlink(md5FilePath);
}
for (const [fp, data] of this.deltas) {
await fs_1.promises.appendFile(md5FilePath, `${fp}${this.deltaLineToken}${data.hash}${constants_1.default.EOL}`);
}
await this.logMessage(`Updated hash file: ${md5FilePath} with ${this.deltas.size} entries.`, true);
}
}

@@ -127,0 +113,0 @@ };

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const path = require("path");

@@ -17,3 +16,2 @@ const command_1 = require("@salesforce/command");

async runInternal() {
var e_1, _a;
if (!this.flags.source) {

@@ -32,23 +30,13 @@ this.flags.source = (await sfdx_project_1.default.default()).getDefaultDirectory();

this.ux.log(`Scanning metadata in: ${folder}`);
try {
for (var _b = (e_1 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(folder))), _c; _c = await _b.next(), !_c.done;) {
const filePath = _c.value;
const json = await utils_1.default.readObjectFromXmlFile(filePath);
if (json.CustomObject) {
this.processObjectMeta(filePath, json);
}
if (json.CustomField) {
this.processFieldMeta(filePath, json);
}
if (json.PermissionSet || json.Profile) {
this.processPermissionSetMeta(filePath, json);
}
for await (const filePath of utils_1.default.getFiles(folder)) {
const json = await utils_1.default.readObjectFromXmlFile(filePath);
if (json.CustomObject) {
this.processObjectMeta(filePath, json);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) await _a.call(_b);
if (json.CustomField) {
this.processFieldMeta(filePath, json);
}
finally { if (e_1) throw e_1.error; }
if (json.PermissionSet || json.Profile) {
this.processPermissionSetMeta(filePath, json);
}
}

@@ -55,0 +43,0 @@ }

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -11,3 +10,2 @@ const command_base_1 = require("../../../lib/command-base");

async runInternal() {
var e_1, _a, e_2, _b;
const sourceFolders = !this.flags.source ? Profile.defaultPermissionsGlobs : this.flags.source.split(',');

@@ -23,25 +21,15 @@ this.permissions = new Map();

this.ux.log(`Reading metadata in: ${sourceFolder}`);
try {
for (var _c = (e_1 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(sourceFolder.trim()))), _d; _d = await _c.next(), !_d.done;) {
const filePath = _d.value;
this.ux.log(`\tProcessing: ${filePath}`);
const json = await utils_1.default.readObjectFromXmlFile(filePath);
if (!json.PermissionSet && !json.Profile) {
this.ux.log(`\tUnable to process file: ${filePath}`);
continue;
}
// Read all the CustomObject typenames PermissionSet from and add to the customObjects Set
const permSet = sfdx_permission_1.PermissionSet.fromXml(filePath, json);
custObjs.push(...Array.from(permSet.getPermissionCollection(sfdx_permission_1.SfdxPermission.customObject).keys()));
// Add to collection for update later
sourceFilePaths.add(filePath);
for await (const filePath of utils_1.default.getFiles(sourceFolder.trim())) {
this.ux.log(`\tProcessing: ${filePath}`);
const json = await utils_1.default.readObjectFromXmlFile(filePath);
if (!json.PermissionSet && !json.Profile) {
this.ux.log(`\tUnable to process file: ${filePath}`);
continue;
}
// Read all the CustomObject typenames PermissionSet from and add to the customObjects Set
const permSet = sfdx_permission_1.PermissionSet.fromXml(filePath, json);
custObjs.push(...Array.from(permSet.getPermissionCollection(sfdx_permission_1.SfdxPermission.customObject).keys()));
// Add to collection for update later
sourceFilePaths.add(filePath);
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) await _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -84,19 +72,9 @@ // Debug

const nameSet = new Set();
try {
for (var _e = (e_2 = void 0, tslib_1.__asyncValues(sfdx_tasks_1.SfdxTasks.listMetadata(this.orgAlias, permissionMetaDataType))), _f; _f = await _e.next(), !_f.done;) {
const metaData = _f.value;
if (!metaData.fullName) {
this.ux.log(`Error No fullName field on type ${permissionMetaDataType}`);
continue;
}
nameSet.add(metaData.fullName);
for await (const metaData of sfdx_tasks_1.SfdxTasks.listMetadata(this.orgAlias, permissionMetaDataType)) {
if (!metaData.fullName) {
this.ux.log(`Error No fullName field on type ${permissionMetaDataType}`);
continue;
}
nameSet.add(metaData.fullName);
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_f && !_f.done && (_b = _e.return)) await _b.call(_e);
}
finally { if (e_2) throw e_2.error; }
}
orgMetaDataMap.set(permissionMetaDataType, nameSet);

@@ -103,0 +81,0 @@ }

@@ -0,0 +0,0 @@ import { flags } from '@salesforce/command';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const command_1 = require("@salesforce/command");

@@ -11,6 +10,4 @@ const command_base_1 = require("../../../lib/command-base");

async runInternal() {
var e_1, _a, e_2, _b;
var _c;
// Read/Write the options file if it does not exist already
const options = await options_factory_1.OptionsFactory.get(xpath_options_1.XPathOptions, (_c = this.flags.options) !== null && _c !== void 0 ? _c : XPath.defaultOptionsFileName);
const options = await options_factory_1.OptionsFactory.get(xpath_options_1.XPathOptions, this.flags.options ?? XPath.defaultOptionsFileName);
for (const [sourceFolder, rules] of options.rules) {

@@ -20,36 +17,23 @@ if (!sourceFolder) {

}
try {
for (var _d = (e_1 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(sourceFolder))), _e; _e = await _d.next(), !_e.done;) {
const filePath = _e.value;
this.ux.log(`Processing file: '${filePath}`);
let xml = null;
try {
for (var _f = (e_2 = void 0, tslib_1.__asyncValues(utils_1.default.readFileLines(filePath))), _g; _g = await _f.next(), !_g.done;) {
const line = _g.value;
xml += line;
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_g && !_g.done && (_b = _f.return)) await _b.call(_f);
}
finally { if (e_2) throw e_2.error; }
}
const xPaths = [];
for await (const filePath of utils_1.default.getFiles(sourceFolder)) {
this.ux.log(`Processing file: '${filePath}`);
let xml = null;
for await (const line of utils_1.default.readFileLines(filePath)) {
xml += line;
}
const xPaths = [];
for (const rule of rules) {
xPaths.push(rule.xPath);
}
for (const [xPath, values] of utils_1.default.selectXPath(xml, xPaths)) {
for (const rule of rules) {
xPaths.push(rule.xPath);
}
for (const [xPath, values] of utils_1.default.selectXPath(xml, xPaths)) {
for (const rule of rules) {
if (rule.xPath === xPath) {
for (const ruleValue of rule.values) {
for (const xmlValue of values) {
if (ruleValue.trim() === xmlValue.trim()) {
// Set the proper exit code to indicate violation/failure
this.gotError = true;
this.ux.log(`${rule.name} - Violation!`);
this.ux.log(`\txpath: ${xPath}`);
this.ux.log(`\tvalue: ${xmlValue}`);
}
if (rule.xPath === xPath) {
for (const ruleValue of rule.values) {
for (const xmlValue of values) {
if (ruleValue.trim() === xmlValue.trim()) {
// Set the proper exit code to indicate violation/failure
this.gotError = true;
this.ux.log(`${rule.name} - Violation!`);
this.ux.log(`\txpath: ${xPath}`);
this.ux.log(`\tvalue: ${xmlValue}`);
}

@@ -62,9 +46,2 @@ }

}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_e && !_e.done && (_a = _d.return)) await _a.call(_d);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -71,0 +48,0 @@ return;

declare const _default: {};
export default _default;

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { SfdxCommand } from '@salesforce/command';

@@ -0,0 +0,0 @@ "use strict";

@@ -40,2 +40,8 @@ export default class Constants {

static readonly HTTP_STATUS_REDIRECT: number[];
static readonly EOL: string;
static readonly CR = "\r";
static readonly LF = "\n";
static readonly CONENTVERSION_MAX_SIZE = 37000000;
static readonly MIME_JSON = "application/json";
static readonly DEFAULT_CSV_TEXT_WRAPPERS: string[];
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const os = require("os");
class Constants {

@@ -45,2 +46,8 @@ }

Constants.HTTP_STATUS_REDIRECT = [301];
Constants.EOL = os.EOL;
Constants.CR = '\r';
Constants.LF = '\n';
Constants.CONENTVERSION_MAX_SIZE = 37000000;
Constants.MIME_JSON = 'application/json';
Constants.DEFAULT_CSV_TEXT_WRAPPERS = ['"'];
//# sourceMappingURL=constants.js.map

@@ -0,0 +0,0 @@ import { FlagsConfig } from '@salesforce/command';

@@ -64,3 +64,2 @@ "use strict";

static async getDeltaOptions(commandFlags) {
var _a, _b, _c, _d, _e;
let deltaOptions = new delta_options_1.DeltaOptions();

@@ -75,7 +74,7 @@ if (!commandFlags) {

else {
deltaOptions.deltaFilePath = (_a = commandFlags.deltaFilePath) !== null && _a !== void 0 ? _a : null;
deltaOptions.source = (_b = commandFlags.source) !== null && _b !== void 0 ? _b : null;
deltaOptions.destination = (_c = commandFlags.destination) !== null && _c !== void 0 ? _c : null;
deltaOptions.forceFile = (_d = commandFlags.force) !== null && _d !== void 0 ? _d : null;
deltaOptions.ignoreFile = (_e = commandFlags.ignore) !== null && _e !== void 0 ? _e : null;
deltaOptions.deltaFilePath = commandFlags.deltaFilePath ?? null;
deltaOptions.source = commandFlags.source ?? null;
deltaOptions.destination = commandFlags.destination ?? null;
deltaOptions.forceFile = commandFlags.force ?? null;
deltaOptions.ignoreFile = commandFlags.ignore ?? null;
if (commandFlags.copyfulldir) {

@@ -82,0 +81,0 @@ deltaOptions.fullCopyDirNames = commandFlags.copyfulldir.split(',');

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { DeltaOptions } from './delta-options';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DeltaProvider = exports.Delta = void 0;
const tslib_1 = require("tslib");
const os = require("os");
const path = require("path");

@@ -51,3 +49,2 @@ const fs_1 = require("fs");

async run(deltaOptions) {
var e_1, _a, e_2, _b, e_3, _c, e_4, _d, e_5, _e, e_6, _f, e_7, _g;
if (!deltaOptions) {

@@ -104,28 +101,8 @@ throw new Error('No DeltaOptions specified.');

await this.logMessage('Ignore Set:');
try {
for (var _h = tslib_1.__asyncValues(utils_1.default.readFileLines(ignoreFile)), _j; _j = await _h.next(), !_j.done;) {
const line = _j.value;
try {
for (var _k = (e_2 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(line))), _l; _l = await _k.next(), !_l.done;) {
const filePath = _l.value;
ignoreSet.add(utils_1.default.normalizePath(filePath));
await this.logMessage(`\t${filePath}`);
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_l && !_l.done && (_b = _k.return)) await _b.call(_k);
}
finally { if (e_2) throw e_2.error; }
}
for await (const line of utils_1.default.readFileLines(ignoreFile)) {
for await (const filePath of utils_1.default.getFiles(line)) {
ignoreSet.add(utils_1.default.normalizePath(filePath));
await this.logMessage(`\t${filePath}`);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_j && !_j.done && (_a = _h.return)) await _a.call(_h);
}
finally { if (e_1) throw e_1.error; }
}
}

@@ -149,147 +126,97 @@ if (!this.diff) {

await this.logMessage('Puring force file entries from deltas.', true);
try {
for (var _m = tslib_1.__asyncValues(utils_1.default.readFileLines(forceFile)), _o; _o = await _m.next(), !_o.done;) {
const line = _o.value;
try {
for (var _p = (e_4 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(line))), _q; _q = await _p.next(), !_q.done;) {
const filePath = _q.value;
if (this.deltas.delete(filePath)) {
await this.logMessage(`Purged: ${filePath}`, true);
}
}
for await (const line of utils_1.default.readFileLines(forceFile)) {
for await (const filePath of utils_1.default.getFiles(line)) {
if (this.deltas.delete(filePath)) {
await this.logMessage(`Purged: ${filePath}`, true);
}
catch (e_4_1) { e_4 = { error: e_4_1 }; }
finally {
try {
if (_q && !_q.done && (_d = _p.return)) await _d.call(_p);
}
finally { if (e_4) throw e_4.error; }
}
}
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_o && !_o.done && (_c = _m.return)) await _c.call(_m);
}
finally { if (e_3) throw e_3.error; }
}
}
}
await this.logMessage(`Scanning folder: ${source}.`, true);
try {
for (var _r = tslib_1.__asyncValues(this.diff(source)), _s; _s = await _r.next(), !_s.done;) {
const delta = _s.value;
const deltaKind = delta.deltaKind;
const deltaFile = delta.deltaFile;
if (ignoreSet.has(deltaFile)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${deltaFile}`, true);
metrics.Ign++;
continue;
}
// Determine the action
switch (deltaKind) {
// [D]eleted files
case DeltaProvider.deltaTypeKind.D:
await this.logMessage(`DELETED File: ${deltaFile}`);
if (deleteReportFile) {
await fs_1.promises.appendFile(deleteReportFile, deltaFile + os.EOL);
for await (const delta of this.diff(source)) {
const deltaKind = delta.deltaKind;
const deltaFile = delta.deltaFile;
if (ignoreSet.has(deltaFile)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${deltaFile}`, true);
metrics.Ign++;
continue;
}
// Determine the action
switch (deltaKind) {
// [D]eleted files
case DeltaProvider.deltaTypeKind.D:
await this.logMessage(`DELETED File: ${deltaFile}`);
if (deleteReportFile) {
await fs_1.promises.appendFile(deleteReportFile, deltaFile + constants_1.default.EOL);
}
metrics.Del++;
break;
// [A]dded & [M]odified files
case DeltaProvider.deltaTypeKind.A:
case DeltaProvider.deltaTypeKind.M: {
// check the source folder for associated files.
const fullCopyPath = DeltaProvider.getFullCopyPath(deltaFile, deltaOptions.fullCopyDirNames);
const dirName = fullCopyPath ?? path.dirname(deltaFile);
// const deltaFileBaseName = `${path.basename(deltaFile).split('.')[0]}.`;
const deltaFileBaseName = schema_utils_1.default.getMetadataBaseName(deltaFile);
for await (const filePath of utils_1.default.getFiles(dirName, fullCopyPath != null)) {
// have we already processed this file?
if (copiedSet.has(filePath)) {
await this.logMessage(`Already Copied ${filePath} - skipping`);
continue;
}
metrics.Del++;
break;
// [A]dded & [M]odified files
case DeltaProvider.deltaTypeKind.A:
case DeltaProvider.deltaTypeKind.M: {
// check the source folder for associated files.
const fullCopyPath = DeltaProvider.getFullCopyPath(deltaFile, deltaOptions.fullCopyDirNames);
const dirName = fullCopyPath !== null && fullCopyPath !== void 0 ? fullCopyPath : path.dirname(deltaFile);
// const deltaFileBaseName = `${path.basename(deltaFile).split('.')[0]}.`;
const deltaFileBaseName = schema_utils_1.default.getMetadataBaseName(deltaFile);
try {
for (var _t = (e_6 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(dirName, fullCopyPath != null))), _u; _u = await _t.next(), !_u.done;) {
const filePath = _u.value;
// have we already processed this file?
if (copiedSet.has(filePath)) {
await this.logMessage(`Already Copied ${filePath} - skipping`);
continue;
if (filePath.startsWith(fullCopyPath) || path.basename(filePath).startsWith(deltaFileBaseName)) {
// are we ignoring this file?
if (ignoreSet.has(filePath)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${filePath}`, true);
metrics.Ign++;
}
else {
const destinationPath = filePath.replace(source, destination);
if (!isDryRun) {
await utils_1.default.copyFile(filePath, destinationPath);
}
if (filePath.startsWith(fullCopyPath) || path.basename(filePath).startsWith(deltaFileBaseName)) {
// are we ignoring this file?
if (ignoreSet.has(filePath)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${filePath}`, true);
metrics.Ign++;
}
else {
const destinationPath = filePath.replace(source, destination);
if (!isDryRun) {
await utils_1.default.copyFile(filePath, destinationPath);
}
await this.logMessage(`Delta (${deltaKind}) found: ${destinationPath}`);
metrics.Copy++;
copiedSet.add(filePath);
}
}
await this.logMessage(`Delta (${deltaKind}) found: ${destinationPath}`);
metrics.Copy++;
copiedSet.add(filePath);
}
}
catch (e_6_1) { e_6 = { error: e_6_1 }; }
finally {
try {
if (_u && !_u.done && (_f = _t.return)) await _f.call(_t);
}
finally { if (e_6) throw e_6.error; }
}
// Sometimes the meta-data files can be located in the parent dir (staticresources, documents, experiences)
// so let's check there
const parentDirName = path.dirname(dirName);
const deltaParentBaseName = `${path.basename(dirName)}.`;
for await (const parentFilePath of utils_1.default.getFiles(parentDirName, false)) {
// have we already processed this file?
if (copiedSet.has(parentFilePath)) {
await this.logMessage(`Already Copied ${parentFilePath} - skipping`);
continue;
}
// Sometimes the meta-data files can be located in the parent dir (staticresources, documents, experiences)
// so let's check there
const parentDirName = path.dirname(dirName);
const deltaParentBaseName = `${path.basename(dirName)}.`;
try {
for (var _v = (e_7 = void 0, tslib_1.__asyncValues(utils_1.default.getFiles(parentDirName, false))), _w; _w = await _v.next(), !_w.done;) {
const parentFilePath = _w.value;
// have we already processed this file?
if (copiedSet.has(parentFilePath)) {
await this.logMessage(`Already Copied ${parentFilePath} - skipping`);
continue;
}
// are we ignoring this file?
if (ignoreSet.has(parentFilePath)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${parentFilePath}`, true);
metrics.Ign++;
continue;
}
if (path.basename(parentFilePath).startsWith(deltaParentBaseName) && parentFilePath.endsWith(constants_1.default.METADATA_FILE_SUFFIX)) {
const destinationPath = parentFilePath.replace(source, destination);
if (!isDryRun) {
await utils_1.default.copyFile(parentFilePath, destinationPath);
}
await this.logMessage(`Delta (${deltaKind}) found: ${destinationPath}`);
metrics.Copy++;
copiedSet.add(parentFilePath);
}
}
// are we ignoring this file?
if (ignoreSet.has(parentFilePath)) {
await this.logMessage(`Delta (${deltaKind}) ignored: ${parentFilePath}`, true);
metrics.Ign++;
continue;
}
catch (e_7_1) { e_7 = { error: e_7_1 }; }
finally {
try {
if (_w && !_w.done && (_g = _v.return)) await _g.call(_v);
if (path.basename(parentFilePath).startsWith(deltaParentBaseName) && parentFilePath.endsWith(constants_1.default.METADATA_FILE_SUFFIX)) {
const destinationPath = parentFilePath.replace(source, destination);
if (!isDryRun) {
await utils_1.default.copyFile(parentFilePath, destinationPath);
}
finally { if (e_7) throw e_7.error; }
await this.logMessage(`Delta (${deltaKind}) found: ${destinationPath}`);
metrics.Copy++;
copiedSet.add(parentFilePath);
}
break;
}
case DeltaProvider.deltaTypeKind.NONE:
await this.logMessage(`Delta (${deltaKind}): ${deltaFile}`);
metrics.None++;
break;
default:
await this.logMessage(`WARNING: Unknown Delta (${deltaKind}): ${deltaFile}`);
break;
}
case DeltaProvider.deltaTypeKind.NONE:
await this.logMessage(`Delta (${deltaKind}): ${deltaFile}`);
metrics.None++;
break;
default:
await this.logMessage(`WARNING: Unknown Delta (${deltaKind}): ${deltaFile}`);
}
}
catch (e_5_1) { e_5 = { error: e_5_1 }; }
finally {
try {
if (_s && !_s.done && (_e = _r.return)) await _e.call(_r);
}
finally { if (e_5) throw e_5.error; }
}
}

@@ -305,3 +232,2 @@ catch (err) {

async loadDeltaFile(deltaFilePath) {
var e_8, _a;
// only load the hash once

@@ -311,21 +237,11 @@ deltaFilePath = deltaFilePath ? utils_1.default.normalizePath(deltaFilePath) : this.deltaOptions.deltaFilePath;

await this.logMessage(`Loading delta file: ${deltaFilePath}`);
try {
for (var _b = tslib_1.__asyncValues(utils_1.default.readFileLines(deltaFilePath)), _c; _c = await _b.next(), !_c.done;) {
const line = _c.value;
if (!line || !line.trim()) {
continue;
}
if (line.indexOf(this.deltaLineToken) === -1) {
await this.logMessage(`Skipping invalid line: ${line}`, true);
continue;
}
this.processDeltaLine(line);
for await (const line of utils_1.default.readFileLines(deltaFilePath)) {
if (!line || !line.trim()) {
continue;
}
}
catch (e_8_1) { e_8 = { error: e_8_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) await _a.call(_b);
if (line.indexOf(this.deltaLineToken) === -1) {
await this.logMessage(`Skipping invalid line: ${line}`, true);
continue;
}
finally { if (e_8) throw e_8.error; }
this.processDeltaLine(line);
}

@@ -343,6 +259,6 @@ const isEmpty = this.deltas.size === 0;

if (typeof message === 'string') {
await fs_1.promises.appendFile(this.logFile, `${message}${os.EOL}`);
await fs_1.promises.appendFile(this.logFile, `${message}${constants_1.default.EOL}`);
}
else {
await fs_1.promises.appendFile(this.logFile, `${JSON.stringify(message)}${os.EOL}`);
await fs_1.promises.appendFile(this.logFile, `${JSON.stringify(message)}${constants_1.default.EOL}`);
}

@@ -349,0 +265,0 @@ if (includeConsole || this.deltaOptions.logAllMessagesToConsole) {

export declare class Office {
static writeXlxsWorkbook(workbookMap: Map<string, string[][]>, xlxsFilePath: string): void;
}

@@ -29,3 +29,3 @@ "use strict";

}
xlsx_1.writeFile(workbook, xlxsFilePath);
(0, xlsx_1.writeFile)(workbook, xlxsFilePath);
}

@@ -32,0 +32,0 @@ }

@@ -0,0 +0,0 @@ import { OptionsBase, OptionsSettings } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ export declare class OptionsSettings {

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { UX } from '@salesforce/command';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ export default class SchemaUtils {

@@ -0,0 +0,0 @@ "use strict";

@@ -9,2 +9,19 @@ import { RestAction, RestResult } from './utils';

export declare class SfdxClient {
static readonly metaDataInfo: {
ContentVersion: {
MetaName: string;
DataName: string;
Filename: string;
};
Document: {
MetaName: string;
DataName: string;
Filename: string;
};
Attachment: {
MetaName: string;
DataName: string;
Filename: string;
};
};
private static defailtIdField;

@@ -25,10 +42,12 @@ private headers;

updateByRecords(metaDataType: string, records: any[], recordIdField?: string, apiKind?: ApiKind): AsyncGenerator<RestResult, void, void>;
doMultiPart(action: RestAction, record: any, filePath: string, validStatusCodes?: number[]): Promise<RestResult>;
postObjectMultipart(objectName: string, objectRecord: any, fileName: string, filePath: string): Promise<any>;
do(action: RestAction, metaDataType: string, records?: any[], recordIdField?: string, apiKind?: ApiKind, validStatusCodes?: number[]): AsyncGenerator<RestResult, void, void>;
doComposite(action: RestAction, record: any, validStatusCodes?: number[]): Promise<RestResult>;
getMaxApiVersion(): Promise<string>;
getUri(metaDataType?: string, id?: string, apiKind?: ApiKind): Promise<string>;
private doInternal;
private doInternalByIds;
private doInternalById;
private getUri;
private handleResponse;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SfdxClient = exports.ApiKind = exports.NO_CONTENT_CODE = void 0;
const tslib_1 = require("tslib");
const fs = require("fs");
const FormData = require("form-data");
const sfdx_tasks_1 = require("./sfdx-tasks");
const utils_1 = require("./utils");
const utils_2 = require("./utils");
const constants_1 = require(".//constants");
exports.NO_CONTENT_CODE = 204;

@@ -37,23 +39,10 @@ var ApiKind;

}
getMetadataSchemas(apiKind = ApiKind.DEFAULT) {
return tslib_1.__asyncGenerator(this, arguments, function* getMetadataSchemas_1() {
var e_1, _a;
const result = yield tslib_1.__await(this.doInternal(utils_2.RestAction.GET, null, apiKind));
if (result.isError) {
result.throw();
}
try {
for (var _b = tslib_1.__asyncValues(result.body.sobjects), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const metaDataType = _c.value;
yield yield tslib_1.__await(metaDataType);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_1) throw e_1.error; }
}
});
async *getMetadataSchemas(apiKind = ApiKind.DEFAULT) {
const result = await this.doInternal(utils_2.RestAction.GET, null, apiKind);
if (result.isError) {
result.throw();
}
for await (const metaDataType of result.body.sobjects) {
yield metaDataType;
}
}

@@ -83,55 +72,29 @@ async getMetadataSchema(metaDataType, apiKind = ApiKind.DEFAULT) {

}
getByIds(metaDataType, ids, apiKind = ApiKind.DEFAULT) {
return tslib_1.__asyncGenerator(this, arguments, function* getByIds_1() {
var e_2, _a;
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
async *getByIds(metaDataType, ids, apiKind = ApiKind.DEFAULT) {
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
}
if (!ids) {
throw new Error('id parameter is required.');
}
for await (const result of this.doInternalByIds(utils_2.RestAction.GET, metaDataType, ids, null, apiKind)) {
if (result.isError) {
result.throw();
}
if (!ids) {
throw new Error('id parameter is required.');
}
try {
for (var _b = tslib_1.__asyncValues(this.doInternalByIds(utils_2.RestAction.GET, metaDataType, ids, null, apiKind)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const result = _c.value;
if (result.isError) {
result.throw();
}
yield yield tslib_1.__await(result);
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_2) throw e_2.error; }
}
});
yield result;
}
}
getByRecords(metaDataType, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT) {
return tslib_1.__asyncGenerator(this, arguments, function* getByRecords_1() {
var e_3, _a;
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
async *getByRecords(metaDataType, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT) {
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
}
if (!records) {
throw new Error('records parameter is required.');
}
for await (const result of this.doInternalByIds(utils_2.RestAction.GET, metaDataType, records, recordIdField, apiKind)) {
if (result.isError) {
result.throw();
}
if (!records) {
throw new Error('records parameter is required.');
}
try {
for (var _b = tslib_1.__asyncValues(this.doInternalByIds(utils_2.RestAction.GET, metaDataType, records, recordIdField, apiKind)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const result = _c.value;
if (result.isError) {
result.throw();
}
yield yield tslib_1.__await(result);
}
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_3) throw e_3.error; }
}
});
yield result;
}
}

@@ -152,58 +115,72 @@ /* eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types */

}
updateByRecords(metaDataType, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT) {
return tslib_1.__asyncGenerator(this, arguments, function* updateByRecords_1() {
var e_4, _a;
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
async *updateByRecords(metaDataType, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT) {
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
}
if (!records) {
throw new Error('records parameter is required.');
}
// Salesforce uses PATCH for updates
for await (const result of this.doInternalByIds(utils_2.RestAction.PATCH, metaDataType, records, recordIdField, apiKind, [exports.NO_CONTENT_CODE])) {
if (result.isError) {
result.throw();
}
if (!records) {
throw new Error('records parameter is required.');
}
try {
// Salesforce uses PATCH for updates
for (var _b = tslib_1.__asyncValues(this.doInternalByIds(utils_2.RestAction.PATCH, metaDataType, records, recordIdField, apiKind, [exports.NO_CONTENT_CODE])), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const result = _c.value;
if (result.isError) {
result.throw();
}
yield yield tslib_1.__await(result);
}
}
catch (e_4_1) { e_4 = { error: e_4_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_4) throw e_4.error; }
}
yield result;
}
}
/* eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types */
async doMultiPart(action, record, filePath, validStatusCodes = [200]) {
if (!record) {
throw new Error('record parameter is required.');
}
if (!filePath) {
throw new Error('filePath parameter is required.');
}
const id = utils_1.default.getFieldValue(record, SfdxClient.defailtIdField, true);
// Delete the id field as SFDC API returns BAD_REQUEST if the object has an ID
if (id) {
delete record[SfdxClient.defailtIdField];
}
const uri = await this.getUri('ContentVersion');
const result = await this.handleResponse(utils_2.RestAction.POST, uri, record, validStatusCodes);
result.id = id;
return result;
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
async postObjectMultipart(objectName, objectRecord, fileName, filePath) {
const form = new FormData();
const formContent = JSON.stringify(objectRecord);
const metaName = SfdxClient.metaDataInfo[objectName].MetaName;
form.append(metaName, formContent, {
contentType: constants_1.default.MIME_JSON,
});
const dataName = SfdxClient.metaDataInfo[objectName].DataName;
const data = fs.createReadStream(filePath);
form.append(dataName, data, {
filename: fileName,
contentType: utils_1.default.getMIMEType(fileName), // 'application/octet-stream',
});
const uri = await this.getUri(objectName);
const result = await utils_1.default.getRestResult(utils_2.RestAction.POST, uri, form, form.getHeaders({ Authorization: `Bearer ${this.orgInfo.accessToken}` }), [200, 201]);
// Log the form data if an error occurs
if (!result.isError) {
result.id = result.body.id;
}
return result;
}
do(action, metaDataType, records = null, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = [200]) {
return tslib_1.__asyncGenerator(this, arguments, function* do_1() {
var e_5, _a;
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
}
if (records) {
try {
for (var _b = tslib_1.__asyncValues(this.doInternalByIds(action, metaDataType, records, recordIdField, apiKind, validStatusCodes)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const result = _c.value;
if (result.isError) {
result.throw();
}
yield yield tslib_1.__await(result);
}
async *do(action, metaDataType, records = null, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = [200]) {
if (!metaDataType) {
throw new Error('metadataType parameter is required.');
}
if (records) {
for await (const result of this.doInternalByIds(action, metaDataType, records, recordIdField, apiKind, validStatusCodes)) {
if (result.isError) {
result.throw();
}
catch (e_5_1) { e_5 = { error: e_5_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_5) throw e_5.error; }
}
yield result;
}
else {
yield yield tslib_1.__await(yield tslib_1.__await(this.doInternal(action, metaDataType, apiKind, null, validStatusCodes)));
}
});
}
else {
yield await this.doInternal(action, metaDataType, apiKind, null, validStatusCodes);
}
}

@@ -227,25 +204,2 @@ /* eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types */

}
async doInternal(action = utils_2.RestAction.GET, metaDataType = null, record = null, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
const uri = await this.getUri(metaDataType, null, apiKind);
return await this.handleResponse(action, uri, record, validStatusCodes);
}
doInternalByIds(action = utils_2.RestAction.GET, metaDataType = null, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
return tslib_1.__asyncGenerator(this, arguments, function* doInternalByIds_1() {
for (const record of records) {
yield yield tslib_1.__await(yield tslib_1.__await(this.doInternalById(action, metaDataType, record, recordIdField, apiKind, validStatusCodes)));
}
});
}
async doInternalById(action = utils_2.RestAction.GET, metaDataType = null, record, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
let id = null;
if (apiKind !== ApiKind.COMPOSITE && record) {
id = utils_1.default.getFieldValue(record, recordIdField, true);
// Delete the id field as SFDC API restuen BAD_REQUEST if the object has an ID
delete record[recordIdField];
}
const uri = await this.getUri(metaDataType, id, apiKind);
const result = await this.handleResponse(action, uri, record, validStatusCodes);
result.id = id;
return result;
}
async getUri(metaDataType = null, id = null, apiKind = ApiKind.DEFAULT) {

@@ -280,2 +234,23 @@ await this.initialize(false);

}
async doInternal(action = utils_2.RestAction.GET, metaDataType = null, record = null, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
const uri = await this.getUri(metaDataType, null, apiKind);
return await this.handleResponse(action, uri, record, validStatusCodes);
}
async *doInternalByIds(action = utils_2.RestAction.GET, metaDataType = null, records, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
for (const record of records) {
yield await this.doInternalById(action, metaDataType, record, recordIdField, apiKind, validStatusCodes);
}
}
async doInternalById(action = utils_2.RestAction.GET, metaDataType = null, record, recordIdField = SfdxClient.defailtIdField, apiKind = ApiKind.DEFAULT, validStatusCodes = null) {
let id = null;
if (apiKind !== ApiKind.COMPOSITE && record) {
id = utils_1.default.getFieldValue(record, recordIdField, true);
// Delete the id field as SFDC API returns BAD_REQUEST if the object has an ID
delete record[recordIdField];
}
const uri = await this.getUri(metaDataType, id, apiKind);
const result = await this.handleResponse(action, uri, record, validStatusCodes);
result.id = id;
return result;
}
async handleResponse(action = utils_2.RestAction.GET, uri, record = null, validStatusCodes = null) {

@@ -286,3 +261,20 @@ return await utils_1.default.getRestResult(action, uri, record, this.headers, validStatusCodes);

exports.SfdxClient = SfdxClient;
SfdxClient.metaDataInfo = {
ContentVersion: {
MetaName: 'entity_content',
DataName: 'VersionData',
Filename: 'PathOnClient'
},
Document: {
MetaName: 'entity_document',
DataName: 'Body',
Filename: 'Name'
},
Attachment: {
MetaName: 'entity_document',
DataName: 'Body',
Filename: 'Name'
},
};
SfdxClient.defailtIdField = 'id';
//# sourceMappingURL=sfdx-client.js.map

@@ -0,0 +0,0 @@ /// <reference types="node" />

@@ -12,3 +12,3 @@ "use strict";

return new Promise((resolve, reject) => {
child_process_1.exec(cmd, SfdxCore.bufferOptions, (error, stdout) => {
(0, child_process_1.exec)(cmd, SfdxCore.bufferOptions, (error, stdout) => {
let response;

@@ -77,3 +77,3 @@ try {

for (const sType of packageObj.Package.types) {
if (sType === null || sType === void 0 ? void 0 : sType.members) {
if (sType?.members) {
const memPopIndexes = [];

@@ -91,3 +91,3 @@ let memIndex = 0;

}
if (!(sType === null || sType === void 0 ? void 0 : sType.members) || sType.members.length === 0) {
if (!sType?.members || sType.members.length === 0) {
popIndexes.push(typeIndex);

@@ -94,0 +94,0 @@ }

@@ -0,0 +0,0 @@ export declare abstract class XmlPermission {

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ export declare class PackageDirectory {

@@ -0,0 +0,0 @@ "use strict";

@@ -0,0 +0,0 @@ export declare class SfdxEntity {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SfdxQuery = exports.SfdxCodeCoverageItem = exports.SfdxCodeCoverage = exports.SfdxObjectPermission = exports.SfdxFieldPermission = exports.SfdxPermission = exports.SfdxPermissionSet = exports.SfdxFolder = exports.SfdxSeupEntityAccess = exports.SfdxEntity = void 0;
const tslib_1 = require("tslib");
const constants_1 = require("./constants");

@@ -121,3 +120,2 @@ const sfdx_core_1 = require("./sfdx-core");

static async getPermissions(usernameOrAlias) {
var _a;
const query = 'SELECT Id,Name,Profile.Name,IsOwnedByProfile FROM PermissionSet ORDER BY Profile.Name, Name';

@@ -130,3 +128,3 @@ const records = await SfdxQuery.doSoqlQuery(usernameOrAlias, query);

profile.name = record.Name;
profile.profileName = (_a = record.Profile) === null || _a === void 0 ? void 0 : _a.Name;
profile.profileName = record.Profile?.Name;
profile.isOwnedByProfile = record.IsOwnedByProfile;

@@ -268,3 +266,2 @@ profileMap.set(profile.id, profile);

static async getCodeCoverage(usernameOrAlias) {
var _a;
if (!usernameOrAlias) {

@@ -280,3 +277,3 @@ return null;

coverageItem.id = record.ApexClassOrTriggerId;
coverageItem.name = (_a = record.ApexClassOrTrigger) === null || _a === void 0 ? void 0 : _a.Name;
coverageItem.name = record.ApexClassOrTrigger?.Name;
coverageItem.uncoveredLines = record.Coverage.uncoveredLines || [];

@@ -288,44 +285,29 @@ coverageItem.coveredLines = record.Coverage.coveredLines || [];

}
static waitForRecordCount(usernameOrAlias, query, recordCount = 0, maxWaitSeconds = 60, sleepMiliseconds = 5000) {
return tslib_1.__asyncGenerator(this, arguments, function* waitForRecordCount_1() {
const maxCounter = (maxWaitSeconds * 1000) / sleepMiliseconds;
let counter = 0;
let records = [];
while (maxCounter <= 0 || counter <= maxCounter) {
yield tslib_1.__await(utils_1.default.sleep(sleepMiliseconds));
records = yield tslib_1.__await(SfdxQuery.doSoqlQuery(usernameOrAlias, query));
yield yield tslib_1.__await(records.length);
counter++;
if (records.length === recordCount) {
break;
}
static async *waitForRecordCount(usernameOrAlias, query, recordCount = 0, maxWaitSeconds = 60, sleepMiliseconds = 5000) {
const maxCounter = (maxWaitSeconds * 1000) / sleepMiliseconds;
let counter = 0;
let records = [];
while (maxCounter <= 0 || counter <= maxCounter) {
await utils_1.default.sleep(sleepMiliseconds);
records = await SfdxQuery.doSoqlQuery(usernameOrAlias, query);
yield records.length;
counter++;
if (records.length === recordCount) {
break;
}
});
}
}
static waitForApexTests(username, waitCountMaxSeconds = 0, createdDate = new Date().toJSON()) {
return tslib_1.__asyncGenerator(this, arguments, function* waitForApexTests_1() {
var e_1, _a;
const query = `SELECT ApexClassId, ShouldSkipCodeCoverage, Status, CreatedDate FROM ApexTestQueueItem WHERE CreatedDate > ${createdDate} AND Status NOT IN ('Completed', 'Failed', 'Aborted')`;
const targetCount = 0;
let recordCount = 0;
// Check every 30 seconds or waitCountMaxSeconds so we don't waste a bunch of queries
const interval = waitCountMaxSeconds >= 30 ? 30000 : waitCountMaxSeconds;
try {
for (var _b = tslib_1.__asyncValues(SfdxQuery.waitForRecordCount(username, query, targetCount, waitCountMaxSeconds, interval)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
recordCount = _c.value;
yield yield tslib_1.__await(recordCount);
if (recordCount === targetCount) {
break;
}
}
static async *waitForApexTests(username, waitCountMaxSeconds = 0, createdDate = new Date().toJSON()) {
const query = `SELECT ApexClassId, ShouldSkipCodeCoverage, Status, CreatedDate FROM ApexTestQueueItem WHERE CreatedDate > ${createdDate} AND Status NOT IN ('Completed', 'Failed', 'Aborted')`;
const targetCount = 0;
let recordCount = 0;
// Check every 30 seconds or waitCountMaxSeconds so we don't waste a bunch of queries
const interval = waitCountMaxSeconds >= 30 ? 30000 : waitCountMaxSeconds;
for await (recordCount of SfdxQuery.waitForRecordCount(username, query, targetCount, waitCountMaxSeconds, interval)) {
yield recordCount;
if (recordCount === targetCount) {
break;
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_1) throw e_1.error; }
}
return yield tslib_1.__await(recordCount);
});
}
return recordCount;
}

@@ -332,0 +314,0 @@ // Gets the SfdxSetupEntityAccess inforamtion for the specified SetupEntityTypes

@@ -0,0 +0,0 @@ import { SfdxEntity } from './sfdx-query';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SfdxTasks = exports.SfdxOrgInfo = exports.SfdxJobInfo = void 0;
const tslib_1 = require("tslib");
const os = require("os");
const path = require("path");

@@ -46,3 +44,3 @@ const fs_1 = require("fs");

? []
: ts_types_1.ensureArray(response.metadataObjects);
: (0, ts_types_1.ensureArray)(response.metadataObjects);
}

@@ -62,70 +60,37 @@ static async executeAnonymousBlock(usernameOrAlias, apexFilePath, logLevel = 'debug') {

}
static getTypesForPackage(usernameOrAlias, describeMetadatas, namespaces = null) {
return tslib_1.__asyncGenerator(this, arguments, function* getTypesForPackage_1() {
var e_1, _a, e_2, _b, e_3, _c;
let folderPathMap;
for (const describeMetadata of describeMetadatas) {
const members = [];
if (!describeMetadata.inFolder) {
try {
for (var _d = (e_1 = void 0, tslib_1.__asyncValues(this.listMetadata(usernameOrAlias, describeMetadata.xmlName, namespaces))), _e; _e = yield tslib_1.__await(_d.next()), !_e.done;) {
const result = _e.value;
members.push(result.fullName);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_e && !_e.done && (_a = _d.return)) yield tslib_1.__await(_a.call(_d));
}
finally { if (e_1) throw e_1.error; }
}
static async *getTypesForPackage(usernameOrAlias, describeMetadatas, namespaces = null) {
let folderPathMap;
for (const describeMetadata of describeMetadatas) {
const members = [];
if (!describeMetadata.inFolder) {
for await (const result of this.listMetadata(usernameOrAlias, describeMetadata.xmlName, namespaces)) {
members.push(result.fullName);
}
else {
const folderMetaName = describeMetadata.xmlName === sfdx_core_1.SfdxCore.EMAIL_TEMPLATE_XML_NAME
? sfdx_core_1.SfdxCore.EMAIL_TEMPLATE_XML_NAME
: `${describeMetadata.xmlName}Folder`;
// Get SOQL folder data (ONCE!)
if (!folderPathMap) {
folderPathMap = yield tslib_1.__await(this.getFolderSOQLData(usernameOrAlias));
}
else {
const folderMetaName = describeMetadata.xmlName === sfdx_core_1.SfdxCore.EMAIL_TEMPLATE_XML_NAME
? sfdx_core_1.SfdxCore.EMAIL_TEMPLATE_XML_NAME
: `${describeMetadata.xmlName}Folder`;
// Get SOQL folder data (ONCE!)
if (!folderPathMap) {
folderPathMap = await this.getFolderSOQLData(usernameOrAlias);
}
// Iterate all the folder metas
for await (const folderMeta of this.listMetadata(usernameOrAlias, folderMetaName, namespaces)) {
// Set the parent Id (used for nested folders)
// Salesforce does not return the full path in the metadada
//
const folderPath = folderPathMap.has(folderMeta.id)
? folderPathMap.get(folderMeta.id)
: folderMeta.fullName;
// Add the meta for just the folder
members.push(folderPath);
for await (const inFolderMetadata of this.listMetadataInFolder(usernameOrAlias, describeMetadata.xmlName, folderMeta.fullName)) {
// Add the meta for the item in the folder
members.push([folderPath, path.basename(inFolderMetadata.fullName)].join('/'));
}
try {
// Iterate all the folder metas
for (var _f = (e_2 = void 0, tslib_1.__asyncValues(this.listMetadata(usernameOrAlias, folderMetaName, namespaces))), _g; _g = yield tslib_1.__await(_f.next()), !_g.done;) {
const folderMeta = _g.value;
// Set the parent Id (used for nested folders)
// Salesforce does not return the full path in the metadada
//
const folderPath = folderPathMap.has(folderMeta.id)
? folderPathMap.get(folderMeta.id)
: folderMeta.fullName;
// Add the meta for just the folder
members.push(folderPath);
try {
for (var _h = (e_3 = void 0, tslib_1.__asyncValues(this.listMetadataInFolder(usernameOrAlias, describeMetadata.xmlName, folderMeta.fullName))), _j; _j = yield tslib_1.__await(_h.next()), !_j.done;) {
const inFolderMetadata = _j.value;
// Add the meta for the item in the folder
members.push([folderPath, path.basename(inFolderMetadata.fullName)].join('/'));
}
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_j && !_j.done && (_c = _h.return)) yield tslib_1.__await(_c.call(_h));
}
finally { if (e_3) throw e_3.error; }
}
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_g && !_g.done && (_b = _f.return)) yield tslib_1.__await(_b.call(_f));
}
finally { if (e_2) throw e_2.error; }
}
}
yield yield tslib_1.__await({ name: describeMetadata.xmlName, members });
}
});
yield { name: describeMetadata.xmlName, members };
}
}

@@ -154,51 +119,47 @@ static async listMetadatas(usernameOrAlias, metadataTypes, namespaces = null) {

}
static listMetadata(usernameOrAlias, metadataType, namespaces = null) {
return tslib_1.__asyncGenerator(this, arguments, function* listMetadata_1() {
const results = yield tslib_1.__await(sfdx_core_1.SfdxCore.command(`${constants_1.default.SFDX_MDAPI_LISTMETADATA} --json -m ${metadataType} -u ${usernameOrAlias}`));
// If there are no instances of the metadatatype SFDX just returns {status:0}
if (results) {
let resultsArray;
try {
resultsArray = ts_types_1.ensureArray(results);
static async *listMetadata(usernameOrAlias, metadataType, namespaces = null) {
const results = await sfdx_core_1.SfdxCore.command(`${constants_1.default.SFDX_MDAPI_LISTMETADATA} --json -m ${metadataType} -u ${usernameOrAlias}`);
// If there are no instances of the metadatatype SFDX just returns {status:0}
if (results) {
let resultsArray;
try {
resultsArray = (0, ts_types_1.ensureArray)(results);
}
catch {
resultsArray = [results];
}
for (const result of resultsArray) {
// If we have a metadata namespace AND
// We are excluding namespaces OR
// The list of allowed namespaces does not include the metdata namespace
// Continue.
if (result.namespacePrefix && (!namespaces || !namespaces.has(result.namespacePrefix))) {
continue;
}
catch (_a) {
resultsArray = [results];
}
for (const result of resultsArray) {
// If we have a metadata namespace AND
// We are excluding namespaces OR
// The list of allowed namespaces does not include the metdata namespace
// Continue.
if (result.namespacePrefix && (!namespaces || !namespaces.has(result.namespacePrefix))) {
continue;
}
yield yield tslib_1.__await(result);
}
yield result;
}
});
}
}
static listMetadataInFolder(usernameOrAlias, metadataType, folderName, namespaces = null) {
return tslib_1.__asyncGenerator(this, arguments, function* listMetadataInFolder_1() {
const results = yield tslib_1.__await(sfdx_core_1.SfdxCore.command(`${constants_1.default.SFDX_MDAPI_LISTMETADATA} --json -m ${metadataType} --folder ${folderName} -u ${usernameOrAlias}`));
// If there are no instances of the metadatatype SFDX just returns {status:0}
if (results) {
let resultsArray;
try {
resultsArray = ts_types_1.ensureArray(results);
static async *listMetadataInFolder(usernameOrAlias, metadataType, folderName, namespaces = null) {
const results = await sfdx_core_1.SfdxCore.command(`${constants_1.default.SFDX_MDAPI_LISTMETADATA} --json -m ${metadataType} --folder ${folderName} -u ${usernameOrAlias}`);
// If there are no instances of the metadatatype SFDX just returns {status:0}
if (results) {
let resultsArray;
try {
resultsArray = (0, ts_types_1.ensureArray)(results);
}
catch {
resultsArray = [results];
}
for (const result of resultsArray) {
// If we have a metadata namespace AND
// We are excluding namespaces OR
// The list of allowed namespaces does not include the metdata namespace
// Continue.
if (result.namespacePrefix && (!namespaces || !namespaces.has(result.namespacePrefix))) {
continue;
}
catch (_a) {
resultsArray = [results];
}
for (const result of resultsArray) {
// If we have a metadata namespace AND
// We are excluding namespaces OR
// The list of allowed namespaces does not include the metdata namespace
// Continue.
if (result.namespacePrefix && (!namespaces || !namespaces.has(result.namespacePrefix))) {
continue;
}
yield yield tslib_1.__await(result);
}
yield result;
}
});
}
}

@@ -216,7 +177,7 @@ static async describeObject(usernameOrAlias, objectName) {

// Create for writing - truncates if exists
const stream = fs_1.openSync(tempFileName, 'w');
const stream = (0, fs_1.openSync)(tempFileName, 'w');
// NOTE: Do NOT include spaces between fields...results in an error
fs_1.writeSync(stream, `ApexClassId,ShouldSkipCodeCoverage${os.EOL}`);
(0, fs_1.writeSync)(stream, `ApexClassId,ShouldSkipCodeCoverage${constants_1.default.EOL}`);
for (const sfdxEntity of sfdxEntities) {
fs_1.writeSync(stream, `${sfdxEntity.id},${shouldSkipCodeCoverage}${os.EOL}`);
(0, fs_1.writeSync)(stream, `${sfdxEntity.id},${shouldSkipCodeCoverage}${constants_1.default.EOL}`);
}

@@ -240,15 +201,13 @@ const command = `${constants_1.default.SFDX_DATA_UPSERT} --json -s ApexTestQueueItem -i Id -f "${tempFileName}" -u ${usernameOrAlias}`;

}
static waitForJob(usernameOrAlias, jobInfo, maxWaitSeconds = -1, sleepMiliseconds = 5000) {
return tslib_1.__asyncGenerator(this, arguments, function* waitForJob_1() {
const maxCounter = (maxWaitSeconds * 1000) / sleepMiliseconds;
jobInfo.statusCount = 0;
while ((maxCounter <= 0 || jobInfo.statusCount <= maxCounter) && !jobInfo.isDone()) {
yield tslib_1.__await(utils_1.default.sleep(sleepMiliseconds));
jobInfo = yield tslib_1.__await(SfdxTasks.getBulkJobStatus(usernameOrAlias, jobInfo));
jobInfo.maxStatusCount = maxCounter;
jobInfo.statusCount++;
yield yield tslib_1.__await(jobInfo);
}
return yield tslib_1.__await(jobInfo);
});
static async *waitForJob(usernameOrAlias, jobInfo, maxWaitSeconds = -1, sleepMiliseconds = 5000) {
const maxCounter = (maxWaitSeconds * 1000) / sleepMiliseconds;
jobInfo.statusCount = 0;
while ((maxCounter <= 0 || jobInfo.statusCount <= maxCounter) && !jobInfo.isDone()) {
await utils_1.default.sleep(sleepMiliseconds);
jobInfo = await SfdxTasks.getBulkJobStatus(usernameOrAlias, jobInfo);
jobInfo.maxStatusCount = maxCounter;
jobInfo.statusCount++;
yield jobInfo;
}
return jobInfo;
}

@@ -339,5 +298,5 @@ static async getOrgInfo(orgAliasOrUsername) {

try {
resultsArray = ts_types_1.ensureArray(results);
resultsArray = (0, ts_types_1.ensureArray)(results);
}
catch (_a) {
catch {
resultsArray = [results];

@@ -344,0 +303,0 @@ }

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -40,2 +40,5 @@ import { Logger } from '@salesforce/core';

static isJsonEnabled: boolean;
static ReadFileBase64EncodingOption: {
encoding: string;
};
static TempFilesPath: string;

@@ -84,5 +87,8 @@ static defaultXmlOptions: {

static chunkRecords(recordsToChunk: any[], chunkSize: number): any[];
static getRestResult(action: RestAction, url: string, parameter?: any, headers?: any, validStatusCodes?: [], isFollowRedirects?: boolean): Promise<RestResult>;
static getRestResult(action: RestAction, url: string, parameter?: any, headers?: any, validStatusCodes?: number[], isFollowRedirects?: boolean): Promise<RestResult>;
static isDirectory(filePath: string): Promise<boolean>;
static normalizePath(filePath: string): string;
static parseDelimitedLine(delimitedLine: string, delimiter?: string, wrapperChars?: string[], skipChars?: string[]): string[];
static parseCSVFile(csvFilePath: string, delimiter?: string, wrapperChars?: string[]): AsyncGenerator<any, void, void>;
static getMIMEType(filename: string): string;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RestResult = exports.IOItem = exports.RestAction = exports.LoggerLevel = exports.NO_CONTENT_CODE = void 0;
const tslib_1 = require("tslib");
const path = require("path");

@@ -9,2 +8,3 @@ const fs_1 = require("fs");

const readline_1 = require("readline");
const mime = require("mime-types");
const xpath = require("xpath");

@@ -56,5 +56,4 @@ const xmldom_1 = require("@xmldom/xmldom");

get redirectUrl() {
var _a;
return this.isRedirect
? (_a = this.headers) === null || _a === void 0 ? void 0 : _a.location
? this.headers?.location
: null;

@@ -102,137 +101,85 @@ }

}
static getFiles(folderPath, isRecursive = true) {
return tslib_1.__asyncGenerator(this, arguments, function* getFiles_1() {
var e_1, _a;
try {
for (var _b = tslib_1.__asyncValues(Utils.getItems(folderPath, IOItem.File, isRecursive)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const item = _c.value;
yield yield tslib_1.__await(item);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_1) throw e_1.error; }
}
});
static async *getFiles(folderPath, isRecursive = true) {
for await (const item of Utils.getItems(folderPath, IOItem.File, isRecursive)) {
yield item;
}
}
static getFolders(folderPath, isRecursive = true) {
return tslib_1.__asyncGenerator(this, arguments, function* getFolders_1() {
var e_2, _a;
try {
for (var _b = tslib_1.__asyncValues(Utils.getItems(folderPath, IOItem.Folder, isRecursive)), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const item = _c.value;
yield yield tslib_1.__await(item);
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_2) throw e_2.error; }
}
});
static async *getFolders(folderPath, isRecursive = true) {
for await (const item of Utils.getItems(folderPath, IOItem.Folder, isRecursive)) {
yield item;
}
}
static getItems(rootPath, itemKind, isRecursive = true, depth = 0) {
return tslib_1.__asyncGenerator(this, arguments, function* getItems_1() {
var e_3, _a;
let fileItems;
// If we have a wildcarded path - lets use glob
const isGlob = yield tslib_1.__await(this.glob.hasMagic(rootPath));
if (isGlob) {
// Globs should be specific so just return
fileItems = yield tslib_1.__await(this.glob(rootPath));
for (const filePath of fileItems) {
yield yield tslib_1.__await(Utils.normalizePath(filePath));
}
return yield tslib_1.__await(void 0);
static async *getItems(rootPath, itemKind, isRecursive = true, depth = 0) {
let fileItems;
// If we have a wildcarded path - lets use glob
const isGlob = await this.glob.hasMagic(rootPath);
if (isGlob) {
// Globs should be specific so just return
fileItems = await this.glob(rootPath);
for (const filePath of fileItems) {
yield Utils.normalizePath(filePath);
}
const stats = yield tslib_1.__await(Utils.getPathStat(rootPath));
if (!stats) {
/* eslint-disable-next-line no-console */
console.log(`WARNING: ${rootPath} not found.`);
return yield tslib_1.__await(void 0);
return;
}
const stats = await Utils.getPathStat(rootPath);
if (!stats) {
/* eslint-disable-next-line no-console */
console.log(`WARNING: ${rootPath} not found.`);
return;
}
if (stats.isFile()) {
if (itemKind !== IOItem.Folder && depth !== 0) {
yield rootPath;
}
if (stats.isFile()) {
if (itemKind !== IOItem.Folder && depth !== 0) {
yield yield tslib_1.__await(rootPath);
// Nothing else to do
return;
}
// We are on a folder
if (itemKind !== IOItem.File && depth !== 0) {
yield rootPath;
}
// Are we recursive or just starting at the root folder
if (isRecursive || depth === 0) {
depth++;
const subItems = await fs_1.promises.readdir(rootPath);
for (const subItem of subItems) {
const subItemPath = path.join(rootPath, subItem);
const subStats = await Utils.getPathStat(subItemPath);
if (!subStats) {
throw new Error('Invalid Path - NO STATS');
}
// Nothing else to do
return yield tslib_1.__await(void 0);
}
// We are on a folder
if (itemKind !== IOItem.File && depth !== 0) {
yield yield tslib_1.__await(rootPath);
}
// Are we recursive or just starting at the root folder
if (isRecursive || depth === 0) {
depth++;
const subItems = yield tslib_1.__await(fs_1.promises.readdir(rootPath));
for (const subItem of subItems) {
const subItemPath = path.join(rootPath, subItem);
const subStats = yield tslib_1.__await(Utils.getPathStat(subItemPath));
if (!subStats) {
throw new Error('Invalid Path - NO STATS');
if (subStats.isFile()) {
if (itemKind !== IOItem.Folder) {
yield Utils.normalizePath(subItemPath);
}
if (subStats.isFile()) {
if (itemKind !== IOItem.Folder) {
yield yield tslib_1.__await(Utils.normalizePath(subItemPath));
}
continue;
continue;
}
// We are on a folder again
if (itemKind !== IOItem.File) {
yield Utils.normalizePath(subItemPath);
}
if (isRecursive) {
for await (const subFilePath of Utils.getItems(subItemPath, itemKind, isRecursive, depth)) {
yield subFilePath;
}
// We are on a folder again
if (itemKind !== IOItem.File) {
yield yield tslib_1.__await(Utils.normalizePath(subItemPath));
}
if (isRecursive) {
try {
for (var _b = (e_3 = void 0, tslib_1.__asyncValues(Utils.getItems(subItemPath, itemKind, isRecursive, depth))), _c; _c = yield tslib_1.__await(_b.next()), !_c.done;) {
const subFilePath = _c.value;
yield yield tslib_1.__await(subFilePath);
}
}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) yield tslib_1.__await(_a.call(_b));
}
finally { if (e_3) throw e_3.error; }
}
}
}
}
});
}
}
static readFileLines(filePath) {
return tslib_1.__asyncGenerator(this, arguments, function* readFileLines_1() {
var e_4, _a;
if (!(yield tslib_1.__await(Utils.pathExists(filePath)))) {
return yield tslib_1.__await(void 0);
}
const rl = readline_1.createInterface({
input: fs_2.createReadStream(filePath),
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
crlfDelay: Infinity,
});
try {
// Walk the file
/* eslint-disable @typescript-eslint/ban-ts-comment */
// @ts-ignore
for (var rl_1 = tslib_1.__asyncValues(rl), rl_1_1; rl_1_1 = yield tslib_1.__await(rl_1.next()), !rl_1_1.done;) {
const line = rl_1_1.value;
yield yield tslib_1.__await(line);
}
}
catch (e_4_1) { e_4 = { error: e_4_1 }; }
finally {
try {
if (rl_1_1 && !rl_1_1.done && (_a = rl_1.return)) yield tslib_1.__await(_a.call(rl_1));
}
finally { if (e_4) throw e_4.error; }
}
static async *readFileLines(filePath) {
if (!(await Utils.pathExists(filePath))) {
return;
}
const rl = (0, readline_1.createInterface)({
input: (0, fs_2.createReadStream)(filePath),
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
crlfDelay: Infinity,
});
// Walk the file
/* eslint-disable @typescript-eslint/ban-ts-comment */
// @ts-ignore
for await (const line of rl) {
yield line;
}
}

@@ -366,3 +313,3 @@ /* eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types */

}
const options = xmlOptions !== null && xmlOptions !== void 0 ? xmlOptions : Utils.defaultXmlOptions;
const options = xmlOptions ?? Utils.defaultXmlOptions;
let xml = new xml2js.Builder(options).buildObject(metadata);

@@ -389,3 +336,3 @@ if (options.eofChar) {

}
const options = xmlOptions !== null && xmlOptions !== void 0 ? xmlOptions : Utils.defaultXmlOptions;
const options = xmlOptions ?? Utils.defaultXmlOptions;
const xmlString = await fs_1.promises.readFile(filePath, options.encoding);

@@ -480,5 +427,75 @@ /* eslint-disable-next-line @typescript-eslint/no-unsafe-return */

}
static parseDelimitedLine(delimitedLine, delimiter = ',', wrapperChars = constants_1.default.DEFAULT_CSV_TEXT_WRAPPERS, skipChars = [constants_1.default.EOL, constants_1.default.CR, constants_1.default.LF]) {
if (delimitedLine === null) {
return null;
}
const parts = [];
let part = null;
let inWrapper = false;
const addPart = function (ch) {
part = part ? part + ch : ch;
return part;
};
let lastChar = null;
for (const ch of delimitedLine) {
lastChar = ch;
if (skipChars.includes(lastChar)) {
continue;
}
if (lastChar === delimiter) {
if (inWrapper) {
addPart(lastChar);
}
else {
// insert a blank string if part is null
parts.push(part);
part = null;
}
continue;
}
// is this part wrapped? (i.e. "this is wrapped, becuase it has the delimiter")
if (wrapperChars.includes(lastChar)) {
inWrapper = !inWrapper;
if (part === null) {
part = '';
}
continue;
}
addPart(lastChar);
}
// do we have a trailing part?
if (part || lastChar === delimiter) {
parts.push(part);
}
return parts;
}
static async *parseCSVFile(csvFilePath, delimiter = ',', wrapperChars = constants_1.default.DEFAULT_CSV_TEXT_WRAPPERS) {
if (csvFilePath === null) {
return null;
}
let headers = null;
for await (const line of this.readFileLines(csvFilePath)) {
const parts = this.parseDelimitedLine(line, delimiter, wrapperChars);
if (!parts) {
continue;
}
if (!headers) {
headers = parts;
continue;
}
const csvObj = {};
for (let index = 0; index < headers.length; index++) {
const header = headers[index];
csvObj[header] = index < parts.length ? parts[index] : null;
}
yield csvObj;
}
}
static getMIMEType(filename) {
return mime.lookup(filename);
}
}
exports.default = Utils;
Utils.isJsonEnabled = false;
Utils.ReadFileBase64EncodingOption = { encoding: 'base64' };
Utils.TempFilesPath = 'Processing_AcuPack_Temp_DoNotUse';

@@ -485,0 +502,0 @@ Utils.defaultXmlOptions = {

@@ -0,0 +0,0 @@ declare class MergeResult {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const os = require("os");
const path = require("path");

@@ -8,2 +7,3 @@ const fs_1 = require("fs");

const sfdx_core_1 = require("./sfdx-core");
const constants_1 = require("./constants");
class MergeResult {

@@ -80,6 +80,6 @@ }

if (typeof message === 'string') {
await fs_1.promises.appendFile(logFile, `${message}${os.EOL}`);
await fs_1.promises.appendFile(logFile, `${message}${constants_1.default.EOL}`);
}
else {
await fs_1.promises.appendFile(logFile, `${JSON.stringify(message)}${os.EOL}`);
await fs_1.promises.appendFile(logFile, `${JSON.stringify(message)}${constants_1.default.EOL}`);
}

@@ -94,3 +94,3 @@ if (ux) {

result.source = source;
result.destination = destination !== null && destination !== void 0 ? destination : new Object(destination);
result.destination = destination ?? new Object(destination);
if (!result.source.Package) {

@@ -97,0 +97,0 @@ result.source['Package'] = {};

@@ -0,0 +0,0 @@ import { OptionsBase } from './options';

@@ -0,0 +0,0 @@ "use strict";

@@ -118,2 +118,11 @@ {

"api": {
"file": {
"post": {
"metadataFlagDescription": "The MetaData Type name to upload",
"commandDescription": "\r\nUploads ContentVersion files using a multi-part message when necessary.",
"recordsFlagDescription": "The Path to the file (CSV) containing the ContentVersion record data to upload",
"columnsFlagDescription": "A comma seperated list of the columns to use from the CSV file. If not specififed, all the columns are used.",
"allOrNothingFlagDescription": "Set this flag to stop the upload process on the first error"
}
},
"get": {

@@ -120,0 +129,0 @@ "commandDescription": "\r\nPerforms the GET REST action against the specified URL/URI.",

{
"name": "@salesforce/acu-pack",
"description": "SFDX CLI Extensions",
"version": "2.0.2",
"version": "2.0.3",
"author": "Salesforce Customer Success Group (CSG)",

@@ -21,5 +21,7 @@ "bugs": {

"bent": "^7.3.12",
"form-data": "^4.0.0",
"glob": "^7.2.3",
"jsforce": "^1.11.0",
"md5-file": "^5.0.0",
"mime-types": "^2.1.35",
"tslib": "^2",

@@ -26,0 +28,0 @@ "xlsx": "^0.17.5",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc