@xliic/cicd-core-node
Advanced tools
Comparing version 3.0.0 to 4.0.0
@@ -77,3 +77,3 @@ "use strict"; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const { body } = yield got_1.default(`api/v1/collections/${collectionId}/apis`, gotOptions("GET", options)); | ||
const { body } = yield (0, got_1.default)(`api/v1/collections/${collectionId}/apis`, gotOptions("GET", options)); | ||
return body; | ||
@@ -85,3 +85,3 @@ }); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const { body } = yield got_1.default(`api/v1/apis/${apiId}`, gotOptions("DELETE", options)); | ||
const { body } = yield (0, got_1.default)(`api/v1/apis/${apiId}`, gotOptions("DELETE", options)); | ||
return body; | ||
@@ -101,3 +101,3 @@ }); | ||
try { | ||
const { body } = yield got_1.default("api/v1/apis", Object.assign(Object.assign({}, gotOptions("POST", options)), { body: form })); | ||
const { body } = yield (0, got_1.default)("api/v1/apis", Object.assign(Object.assign({}, gotOptions("POST", options)), { body: form })); | ||
return { | ||
@@ -122,3 +122,3 @@ id: body.desc.id, | ||
try { | ||
const { body } = yield got_1.default("api/v2/apis", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { | ||
const { body } = yield (0, got_1.default)("api/v2/apis", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { | ||
cid: collectionId, | ||
@@ -148,3 +148,3 @@ name, | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const { body } = (yield got_1.default(`api/v1/apis/${apiId}`, gotOptions("GET", options))); | ||
const { body } = (yield (0, got_1.default)(`api/v1/apis/${apiId}`, gotOptions("GET", options))); | ||
const lastAssessment = ((_a = body === null || body === void 0 ? void 0 : body.assessment) === null || _a === void 0 ? void 0 : _a.last) | ||
@@ -169,3 +169,3 @@ ? new Date(body.assessment.last) | ||
const previousStatus = yield readApiStatus(apiId, options); | ||
const { body } = yield got_1.default(`api/v1/apis/${apiId}`, Object.assign(Object.assign({}, gotOptions("PUT", options)), { json: { specfile: contents.toString("base64") } })); | ||
const { body } = yield (0, got_1.default)(`api/v1/apis/${apiId}`, Object.assign(Object.assign({}, gotOptions("PUT", options)), { json: { specfile: contents.toString("base64") } })); | ||
return { | ||
@@ -184,3 +184,3 @@ id: body.desc.id, | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const response = yield got_1.default(`api/v1/collections/${id}`, Object.assign({}, gotOptions("GET", options))); | ||
const response = yield (0, got_1.default)(`api/v1/collections/${id}`, Object.assign({}, gotOptions("GET", options))); | ||
return response.body; | ||
@@ -194,3 +194,3 @@ }); | ||
try { | ||
const response = yield got_1.default(`api/v1/collections/technicalName`, Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { technicalName } })); | ||
const response = yield (0, got_1.default)(`api/v1/collections/technicalName`, Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { technicalName } })); | ||
const body = response.body; | ||
@@ -210,3 +210,3 @@ return body.id; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const { body } = yield got_1.default(`api/v1/collections/${id}`, Object.assign({}, gotOptions("DELETE", options))); | ||
const { body } = yield (0, got_1.default)(`api/v1/collections/${id}`, Object.assign({}, gotOptions("DELETE", options))); | ||
return body.id; | ||
@@ -223,3 +223,3 @@ }); | ||
}; | ||
const { body } = yield got_1.default("api/v1/collections", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: Object.assign({ technicalName: technicalName, name: name, source: options.cicdName }, sharing) })); | ||
const { body } = yield (0, got_1.default)("api/v1/collections", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: Object.assign({ technicalName: technicalName, name: name, source: options.cicdName }, sharing) })); | ||
return body.desc.id; | ||
@@ -241,3 +241,3 @@ }); | ||
if (ready) { | ||
const { body } = (yield got_1.default(`api/v1/apis/${api.id}/assessmentreport`, gotOptions("GET", options))); | ||
const { body } = (yield (0, got_1.default)(`api/v1/apis/${api.id}/assessmentreport`, gotOptions("GET", options))); | ||
const report = JSON.parse(Buffer.from(body.data, "base64").toString("utf8")); | ||
@@ -261,3 +261,3 @@ return report; | ||
// TODO check if we can get scan report with no body, just the date? | ||
const { body } = (yield got_1.default(`api/v1/apis/${apiId}/scanreport?medium=2`, gotOptions("GET", options))); | ||
const { body } = (yield (0, got_1.default)(`api/v1/apis/${apiId}/scanreport?medium=2`, gotOptions("GET", options))); | ||
const report = JSON.parse(Buffer.from(body.data, "base64").toString("utf8")); | ||
@@ -298,3 +298,3 @@ return [new Date(body.date), report]; | ||
const base64 = Buffer.from(JSON.stringify(config), "utf-8").toString("base64"); | ||
const response = yield got_1.default(`api/v1/apis/${apiId}/scan`, Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { config: base64 } })); | ||
const response = yield (0, got_1.default)(`api/v1/apis/${apiId}/scan`, Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { config: base64 } })); | ||
const body = response.body; | ||
@@ -313,3 +313,3 @@ return body.id; | ||
try { | ||
const { body } = (yield got_1.default(`api/v1/apis/${apiId}/scanConfigurations`, gotOptions("GET", options))); | ||
const { body } = (yield (0, got_1.default)(`api/v1/apis/${apiId}/scanConfigurations`, gotOptions("GET", options))); | ||
return body; | ||
@@ -316,0 +316,0 @@ } |
@@ -29,5 +29,5 @@ "use strict"; | ||
else { | ||
const report = yield api_1.readAssessment(api, options); | ||
const issues = yield issues_1.getIssues(path.resolve(options.rootDir, filename), report, api.mapping, options.logger, options.lineNumbers); | ||
const failures = checks_1.checkReport(report, failureConditions, options.logger); | ||
const report = yield (0, api_1.readAssessment)(api, options); | ||
const issues = yield (0, issues_1.getIssues)(path.resolve(options.rootDir, filename), report, api.mapping, options.logger, options.lineNumbers); | ||
const failures = (0, checks_1.checkReport)(report, failureConditions, options.logger); | ||
files.set(filename, Object.assign(Object.assign({}, api), { score: report.score, issues, | ||
@@ -34,0 +34,0 @@ failures })); |
@@ -32,3 +32,3 @@ "use strict"; | ||
catch (err) { | ||
error_1.handleTaskError(err, options); | ||
(0, error_1.handleTaskError)(err, options); | ||
} | ||
@@ -39,2 +39,3 @@ }); | ||
function verifyAuditInputs(inputs) { | ||
var _a; | ||
if (!inputs.rootDir || inputs.rootDir === "") { | ||
@@ -46,4 +47,6 @@ throw new error_1.TaskError("The root directory must be specified"); | ||
} | ||
if (!inputs.branchName || inputs.branchName === "") { | ||
throw new error_1.TaskError("The branch name must be specified"); | ||
if (!inputs.reference.branch && | ||
!inputs.reference.tag && | ||
!inputs.reference.pr) { | ||
throw new error_1.TaskError("The branch, tag or pull request/merge request must be specified"); | ||
} | ||
@@ -64,11 +67,14 @@ let minScore = undefined; | ||
} | ||
const config = config_1.readAuditConfig(inputs.rootDir, inputs.branchName, minScore, inputs.logger); | ||
return Object.assign(Object.assign({}, inputs), { platformUrl: url.origin, config }); | ||
const config = (0, config_1.readAuditConfig)(inputs.rootDir, inputs.reference, minScore, inputs.logger); | ||
const lineNumbers = { | ||
lineNumbers: (_a = inputs.lineNumbers) !== null && _a !== void 0 ? _a : false, | ||
}; | ||
return Object.assign(Object.assign(Object.assign({}, inputs), lineNumbers), { platformUrl: url.origin, config }); | ||
} | ||
function runAudit(options) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const [openapiFilenames, errors] = yield discovery_1.discoverOpenApiFiles(options); | ||
const discovered = yield discovery_1.auditDiscoveredFiles(openapiFilenames, options); | ||
const [openapiFilenames, errors] = yield (0, discovery_1.discoverOpenApiFiles)(options); | ||
const discovered = yield (0, discovery_1.auditDiscoveredFiles)(openapiFilenames, options); | ||
if (options.config.mappedFiles) { | ||
const mapped = yield mapping_1.auditMappedFiles(options); | ||
const mapped = yield (0, mapping_1.auditMappedFiles)(options); | ||
return new Map([...errors, ...discovered, ...mapped]); | ||
@@ -94,3 +100,3 @@ } | ||
logger.error(`Failed to audit ${filename}`); | ||
logger.error(` ${error_1.formatApiErrors(result)}`); | ||
logger.error(` ${(0, error_1.formatApiErrors)(result)}`); | ||
} | ||
@@ -97,0 +103,0 @@ else { |
@@ -1,6 +0,6 @@ | ||
import { Logger, AuditConfig, ScanConfig } from "./types"; | ||
import { Logger, AuditConfig, ScanConfig, Reference } from "./types"; | ||
export declare class ConfigError extends Error { | ||
constructor(message: string); | ||
} | ||
export declare function readAuditConfig(rootDir: string, branchName: string, minScore: number | undefined, logger: Logger): AuditConfig; | ||
export declare function readScanConfig(rootDir: string, branchName: string, logger: Logger): ScanConfig; | ||
export declare function readAuditConfig(rootDir: string, reference: Reference, minScore: number | undefined, logger: Logger): AuditConfig; | ||
export declare function readScanConfig(rootDir: string, reference: Reference, logger: Logger): ScanConfig; |
@@ -16,2 +16,3 @@ "use strict"; | ||
const CONF_FILE = "42c-conf.yaml"; | ||
const CONF_FILE_ALTERNATE = "42c-conf.yml"; | ||
class ConfigError extends Error { | ||
@@ -23,3 +24,3 @@ constructor(message) { | ||
exports.ConfigError = ConfigError; | ||
function readAuditConfig(rootDir, branchName, minScore, logger) { | ||
function readAuditConfig(rootDir, reference, minScore, logger) { | ||
const config = readConfig(rootDir, logger); | ||
@@ -34,3 +35,3 @@ if (!config || !config.audit) { | ||
} | ||
const audit = matchBranchConfig(branchName, config.audit, logger); | ||
const audit = matchTaskConfig(reference, config.audit, logger); | ||
return { | ||
@@ -43,3 +44,3 @@ discovery: readDiscovery(audit.discovery), | ||
exports.readAuditConfig = readAuditConfig; | ||
function readScanConfig(rootDir, branchName, logger) { | ||
function readScanConfig(rootDir, reference, logger) { | ||
const config = readConfig(rootDir, logger); | ||
@@ -53,3 +54,3 @@ if (!config || !config.scan) { | ||
} | ||
const scan = matchBranchConfig(branchName, config.scan, logger); | ||
const scan = matchTaskConfig(reference, config.scan, logger); | ||
return { | ||
@@ -83,3 +84,3 @@ discovery: readDiscovery(scan.discovery), | ||
const defaultDiscovery = { | ||
name: "${repository} ${branch}", | ||
name: "${repo_short_path} ${branch}${tag}${pr}", | ||
search: [ | ||
@@ -107,6 +108,20 @@ "**/*.json", | ||
const configFilePath = path.join(rootDir, CONF_FILE); | ||
if (!fs.existsSync(configFilePath)) { | ||
const configFilePathAlternate = path.join(rootDir, CONF_FILE_ALTERNATE); | ||
let confFile; | ||
if (fs.existsSync(configFilePath)) { | ||
confFile = CONF_FILE; | ||
} | ||
else if (fs.existsSync(configFilePathAlternate)) { | ||
confFile = CONF_FILE_ALTERNATE; | ||
} | ||
else { | ||
return null; | ||
} | ||
const config = parse_1.parseYaml(rootDir, CONF_FILE); | ||
let config = null; | ||
try { | ||
config = (0, parse_1.parseYaml)(rootDir, confFile); | ||
} | ||
catch (ex) { | ||
throw new ConfigError("Failed to parse configuration file"); | ||
} | ||
const ajv = new ajv_1.default(); | ||
@@ -121,5 +136,17 @@ const validate = ajv.compile(schema); | ||
} | ||
function matchBranchConfig(branchName, config, logger) { | ||
if (config === null || config === void 0 ? void 0 : config.branches) { | ||
const patterns = Object.keys(config.branches); | ||
function matchTaskConfig(reference, config, logger) { | ||
var _a; | ||
if (reference.branch) { | ||
return findTaskConfig(reference.branch, config === null || config === void 0 ? void 0 : config.branches, "branch", logger); | ||
} | ||
else if (reference.tag) { | ||
return findTaskConfig(reference.tag, config === null || config === void 0 ? void 0 : config.tags, "tag", logger); | ||
} | ||
else { | ||
return (_a = config === null || config === void 0 ? void 0 : config.prs) !== null && _a !== void 0 ? _a : {}; | ||
} | ||
} | ||
function findTaskConfig(name, container, containerName, logger) { | ||
if (container) { | ||
const patterns = Object.keys(container); | ||
// sort patterns longest first | ||
@@ -129,12 +156,12 @@ patterns.sort((a, b) => b.length - a.length); | ||
const isMatch = picomatch(pattern); | ||
if (isMatch(branchName)) { | ||
logger.debug(`Matched branch name '${branchName}' to pattern '${pattern}'`); | ||
return config.branches[pattern]; | ||
if (isMatch(name)) { | ||
logger.debug(`Matched ${containerName} name '${name}' to pattern '${pattern}'`); | ||
return container[pattern]; | ||
} | ||
else { | ||
logger.debug(`No match for branch name '${branchName}' and pattern '${pattern}'`); | ||
logger.debug(`No match for ${containerName} name '${name}' and pattern '${pattern}'`); | ||
} | ||
} | ||
} | ||
logger.debug(`No configuration found for branch name '${branchName}', using default config`); | ||
logger.debug(`No configuration found for ${containerName} name '${name}', using default config`); | ||
// return default config | ||
@@ -141,0 +168,0 @@ return {}; |
@@ -19,3 +19,5 @@ "use strict"; | ||
const scan_results_1 = require("./scan-results"); | ||
const preserving_json_yaml_parser_1 = require("@xliic/preserving-json-yaml-parser"); | ||
function auditDiscoveredFiles(openapiFilenames, options) { | ||
var _a, _b; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
@@ -25,5 +27,5 @@ if (openapiFilenames.length === 0) { | ||
} | ||
const collectionId = yield createOrFindCollectionId(util_1.makeTechnicalCollectionName(options.repoName, options.branchName), util_1.makeCollectionName(options.repoName, options.branchName, options.config.discovery.name), options); | ||
const collectionId = yield createOrFindCollectionId((0, util_1.makeTechnicalCollectionName)(options.repoName, options.reference), (0, util_1.makeCollectionName)(options.repoName, options.reference, (_b = (_a = options.config) === null || _a === void 0 ? void 0 : _a.discovery) === null || _b === void 0 ? void 0 : _b.name, options.logger), options); | ||
const fileMap = yield uploadFilesToCollection(openapiFilenames, collectionId, options); | ||
return audit_results_1.readAuditResults(fileMap, options.config.failureConditions, options); | ||
return (0, audit_results_1.readAuditResults)(fileMap, options.config.failureConditions, options); | ||
}); | ||
@@ -33,2 +35,3 @@ } | ||
function scanDiscoveredFiles(openapiFilenames, options) { | ||
var _a, _b; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
@@ -38,6 +41,6 @@ if (openapiFilenames.length === 0) { | ||
} | ||
const collectionId = yield createOrFindCollectionId(util_1.makeTechnicalCollectionName(options.repoName, options.branchName), util_1.makeCollectionName(options.repoName, options.branchName, options.config.discovery.name), options); | ||
const collectionId = yield createOrFindCollectionId((0, util_1.makeTechnicalCollectionName)(options.repoName, options.reference), (0, util_1.makeCollectionName)(options.repoName, options.reference, (_b = (_a = options.config) === null || _a === void 0 ? void 0 : _a.discovery) === null || _b === void 0 ? void 0 : _b.name, options.logger), options); | ||
const files = yield uploadFilesToCollection(openapiFilenames, collectionId, options); | ||
const scanned = yield scan_results_1.startScan(files, options); | ||
const results = yield scan_results_1.readScanResults(scanned, options); | ||
const scanned = yield (0, scan_results_1.startScan)(files, options); | ||
const results = yield (0, scan_results_1.readScanResults)(scanned, options); | ||
return results; | ||
@@ -54,3 +57,3 @@ }); | ||
} | ||
const discoveredFilenames = yield findOpenapiFiles(options); | ||
const discoveredFilenames = yield findOpenapiFiles(options.rootDir, options.config.discovery, options.logger); | ||
const filteredFilenames = discoveredFilenames.filter((filename) => { | ||
@@ -67,3 +70,3 @@ if (options.config.mappedFiles[filename]) { | ||
try { | ||
if (parse_1.isOpenAPI(options.rootDir, filename)) { | ||
if ((0, parse_1.isOpenAPI)(options.rootDir, filename)) { | ||
openapiFilenames.push(filename); | ||
@@ -85,9 +88,7 @@ } | ||
exports.discoverOpenApiFiles = discoverOpenApiFiles; | ||
function findOpenapiFiles(options) { | ||
function findOpenapiFiles(rootDir, discovery, log) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const log = options.logger; | ||
const patterns = options.config.discovery.search; | ||
log.debug(`Looking for OpenAPI files in: ${patterns}`); | ||
const paths = yield globby(patterns, { | ||
cwd: options.rootDir, | ||
log.debug(`Looking for OpenAPI files in: ${discovery.search}`); | ||
const paths = yield globby(discovery.search, { | ||
cwd: rootDir, | ||
}); | ||
@@ -100,3 +101,3 @@ return paths; | ||
const result = new Map(); | ||
const apis = yield api_1.listApis(collectionId, options); | ||
const apis = yield (0, api_1.listApis)(collectionId, options); | ||
const actions = createApiActions(apis.list, filenames); | ||
@@ -107,3 +108,3 @@ for (const action of actions) { | ||
// remove it from the collection | ||
yield api_1.deleteApi(action.apiId, options); | ||
yield (0, api_1.deleteApi)(action.apiId, options); | ||
} | ||
@@ -162,8 +163,9 @@ else if (action.action === "create") { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const [parsed, mapping] = yield parse_1.bundle(options.rootDir, filename); | ||
if ("error" in parsed) { | ||
return { errors: { bundling: parsed.error } }; | ||
const [bundled, err] = yield (0, parse_1.bundle)(options.rootDir, filename); | ||
if (err) { | ||
return { errors: { bundling: err.message } }; | ||
} | ||
const apiData = Buffer.from(JSON.stringify(parsed), "utf8"); | ||
const api = yield api_1.updateApi(apiId, apiData, options); | ||
const [parsed, mapping] = bundled; | ||
const apiData = Buffer.from((0, preserving_json_yaml_parser_1.stringify)(parsed), "utf8"); | ||
const api = yield (0, api_1.updateApi)(apiId, apiData, options); | ||
if ("errors" in api) { | ||
@@ -180,9 +182,10 @@ return api; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const [parsed, mapping] = yield parse_1.bundle(options.rootDir, filename); | ||
if ("error" in parsed) { | ||
return { errors: { bundling: parsed.error } }; | ||
const [bundled, err] = yield (0, parse_1.bundle)(options.rootDir, filename); | ||
if (err) { | ||
return { errors: { bundling: err.message } }; | ||
} | ||
const name = util_1.makeSafeName(((_a = parsed === null || parsed === void 0 ? void 0 : parsed.info) === null || _a === void 0 ? void 0 : _a.title) || "No title"); | ||
const apiData = Buffer.from(JSON.stringify(parsed), "utf8"); | ||
const api = yield api_1.createTechnicalApi(collectionId, filename, name, apiData, options); | ||
const [parsed, mapping] = bundled; | ||
const name = (0, util_1.makeSafeName)(((_a = parsed === null || parsed === void 0 ? void 0 : parsed.info) === null || _a === void 0 ? void 0 : _a.title) || "No title"); | ||
const apiData = Buffer.from((0, preserving_json_yaml_parser_1.stringify)(parsed), "utf8"); | ||
const api = yield (0, api_1.createTechnicalApi)(collectionId, filename, name, apiData, options); | ||
if ("errors" in api) { | ||
@@ -201,5 +204,5 @@ return api; | ||
log.debug(`Checking for the technical collection name: ${technicalName}`); | ||
const existingId = yield api_1.readTechnicalCollection(technicalName, options); | ||
const existingId = yield (0, api_1.readTechnicalCollection)(technicalName, options); | ||
if (!existingId) { | ||
const newId = yield api_1.createTechnicalCollection(technicalName, name, options); | ||
const newId = yield (0, api_1.createTechnicalCollection)(technicalName, name, options); | ||
log.debug(`Created a new collection name "${name}" ID: ${newId}`); | ||
@@ -206,0 +209,0 @@ return newId; |
@@ -30,2 +30,3 @@ "use strict"; | ||
} | ||
return "Unknown error"; | ||
} | ||
@@ -32,0 +33,0 @@ exports.formatApiErrors = formatApiErrors; |
@@ -18,4 +18,5 @@ "use strict"; | ||
const fs = require("fs"); | ||
const openapi_ast_node_1 = require("@xliic/openapi-ast-node"); | ||
const preserving_json_yaml_parser_1 = require("@xliic/preserving-json-yaml-parser"); | ||
const parse_1 = require("./parse"); | ||
// @ts-ignore | ||
const line_chomper_1 = require("line-chomper"); | ||
@@ -70,3 +71,3 @@ const stream_1 = require("stream"); | ||
else { | ||
const mapping = parse_1.findMapping(mappings, pointer); | ||
const mapping = (0, parse_1.findMapping)(mappings, pointer); | ||
if (mapping) { | ||
@@ -86,3 +87,3 @@ const [root, lines] = yield getParsed(mapping.file); | ||
return new Promise((resolve, reject) => { | ||
line_chomper_1.mapLineOffsets(stream_1.Readable.from([text]), 1, (err, offsets) => { | ||
(0, line_chomper_1.mapLineOffsets)(stream_1.Readable.from([text]), 1, (err, offsets) => { | ||
if (err) { | ||
@@ -104,8 +105,18 @@ reject(err); | ||
filename.toLowerCase().endsWith(".yml")) { | ||
const [parsed, errors] = openapi_ast_node_1.parse(text, "yaml", null); | ||
parsedFiles[filename] = parsed; | ||
const [parsed, errors] = (0, preserving_json_yaml_parser_1.parse)(text, "yaml", {}); | ||
if (parsed) { | ||
parsedFiles[filename] = parsed; | ||
} | ||
else { | ||
throw new Error("Unable to parse YAML file: " + filename); | ||
} | ||
} | ||
else { | ||
const [parsed, errors] = openapi_ast_node_1.parse(text, "json", null); | ||
parsedFiles[filename] = parsed; | ||
const [parsed, errors] = (0, preserving_json_yaml_parser_1.parse)(text, "json", {}); | ||
if (parsed) { | ||
parsedFiles[filename] = parsed; | ||
} | ||
else { | ||
throw new Error("Unable to parse YAML file: " + filename); | ||
} | ||
} | ||
@@ -147,2 +158,3 @@ linezFiles[filename] = yield getOffsets(text); | ||
criticality, | ||
// @ts-ignore | ||
severity: criticalityToSeverity[criticality], | ||
@@ -149,0 +161,0 @@ }); |
@@ -13,2 +13,3 @@ "use strict"; | ||
exports.scanMappedFiles = exports.auditMappedFiles = void 0; | ||
const preserving_json_yaml_parser_1 = require("@xliic/preserving-json-yaml-parser"); | ||
const api_1 = require("./api"); | ||
@@ -21,3 +22,3 @@ const audit_results_1 = require("./audit-results"); | ||
const updatedMappedFiles = yield uploadMappedFiles(options); | ||
return audit_results_1.readAuditResults(updatedMappedFiles, options.config.failureConditions, options); | ||
return (0, audit_results_1.readAuditResults)(updatedMappedFiles, options.config.failureConditions, options); | ||
}); | ||
@@ -29,4 +30,4 @@ } | ||
const files = yield uploadMappedFiles(options); | ||
const scanned = yield scan_results_1.startScan(files, options); | ||
const results = yield scan_results_1.readScanResults(scanned, options); | ||
const scanned = yield (0, scan_results_1.startScan)(files, options); | ||
const results = yield (0, scan_results_1.readScanResults)(scanned, options); | ||
return results; | ||
@@ -38,15 +39,16 @@ }); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const result = new Map(); | ||
const fileMap = new Map(); | ||
for (const [filename, apiId] of Object.entries(options.config.mappedFiles)) { | ||
const [parsed, mapping] = yield parse_1.bundle(options.rootDir, filename); | ||
if ("error" in parsed) { | ||
result.set(filename, { errors: { bundling: parsed.error } }); | ||
const [bundled, err] = yield (0, parse_1.bundle)(options.rootDir, filename); | ||
if (err) { | ||
fileMap.set(filename, { errors: { bundling: err.message } }); | ||
continue; | ||
} | ||
const apiData = Buffer.from(JSON.stringify(parsed), "utf8"); | ||
const api = yield api_1.updateApi(apiId, apiData, options); | ||
result.set(filename, Object.assign(Object.assign({}, api), { mapping })); | ||
const [parsed, mapping] = bundled; | ||
const apiData = Buffer.from((0, preserving_json_yaml_parser_1.stringify)(parsed), "utf8"); | ||
const api = yield (0, api_1.updateApi)(apiId, apiData, options); | ||
fileMap.set(filename, Object.assign(Object.assign({}, api), { mapping })); | ||
} | ||
return result; | ||
return fileMap; | ||
}); | ||
} |
@@ -1,7 +0,7 @@ | ||
import { JsonMapping, MappingTreeNode } from "./types"; | ||
export declare function getOpenApiVersion(parsed: any): string; | ||
export declare function findMapping(root: MappingTreeNode, pointer: string): JsonMapping; | ||
export declare function bundle(rootDir: string, filename: string): Promise<[any, MappingTreeNode]>; | ||
import { JsonMapping, MappingTreeNode, Result } from "./types"; | ||
export declare function getOpenApiVersion(parsed: any): "v2" | "v3"; | ||
export declare function findMapping(root: MappingTreeNode, pointer: string): JsonMapping | null; | ||
export declare function bundle(rootDir: string, filename: string): Promise<Result<[any, MappingTreeNode], Error>>; | ||
export declare function isOpenAPI(rootDir: string, filename: string): boolean; | ||
export declare function parseYaml(rootDir: string, filename: string): any; | ||
export declare function parseJson(rootDir: string, filename: string): any; | ||
export declare function parseYaml(rootDir: string, filename: string): import("@xliic/preserving-json-yaml-parser").Parsed | undefined; | ||
export declare function parseJson(rootDir: string, filename: string): import("@xliic/preserving-json-yaml-parser").Parsed | undefined; |
@@ -19,9 +19,13 @@ "use strict"; | ||
const path_1 = require("path"); | ||
const yaml = require("js-yaml"); | ||
// @ts-ignore | ||
const parser = require("@xliic/json-schema-ref-parser"); | ||
// @ts-ignore | ||
const url = require("@xliic/json-schema-ref-parser/lib/util/url"); | ||
// @ts-ignore | ||
const Pointer = require("@xliic/json-schema-ref-parser/lib/pointer"); | ||
// @ts-ignore | ||
const $Ref = require("@xliic/json-schema-ref-parser/lib/ref"); | ||
// @ts-ignore | ||
const errors_1 = require("@xliic/json-schema-ref-parser/lib/util/errors"); | ||
const openapi_ast_node_1 = require("@xliic/openapi-ast-node"); | ||
const preserving_json_yaml_parser_1 = require("@xliic/preserving-json-yaml-parser"); | ||
const destinationMap = { | ||
@@ -46,5 +50,5 @@ v2: { | ||
if (filename.toLowerCase().endsWith(".json")) { | ||
return JSON.parse(fs.readFileSync(path_1.resolve(rootDir, filename), { encoding: "utf-8" })); | ||
return parseJson(rootDir, filename); | ||
} | ||
return yaml.load(path_1.resolve(rootDir, filename)); | ||
return parseYaml(rootDir, filename); | ||
} | ||
@@ -58,3 +62,3 @@ function resolver(rootDir) { | ||
read: (file) => __awaiter(this, void 0, void 0, function* () { | ||
const filename = path_1.resolve(rootDir, url.toFileSystemPath(file.url)); | ||
const filename = (0, path_1.resolve)(rootDir, url.toFileSystemPath(file.url)); | ||
try { | ||
@@ -82,3 +86,3 @@ return fs.readFileSync(filename, { | ||
} | ||
return null; | ||
throw new Error("Unsupported OpenAPI version"); | ||
} | ||
@@ -117,3 +121,3 @@ exports.getOpenApiVersion = getOpenApiVersion; | ||
function findMapping(root, pointer) { | ||
const path = openapi_ast_node_1.parseJsonPointer(pointer); | ||
const path = (0, preserving_json_yaml_parser_1.parseJsonPointer)(pointer); | ||
let current = root; | ||
@@ -130,3 +134,3 @@ let i = 0; | ||
const remaining = path.slice(i, path.length); | ||
return { file, hash: hash + openapi_ast_node_1.joinJsonPointer(remaining) }; | ||
return { file, hash: hash + (0, preserving_json_yaml_parser_1.joinJsonPointer)(remaining) }; | ||
} | ||
@@ -139,5 +143,4 @@ return { file, hash }; | ||
const parsed = parseDocument(rootDir, filename); | ||
const cwd = path_1.dirname(path_1.resolve(rootDir, filename)) + "/"; | ||
const cwd = (0, path_1.dirname)((0, path_1.resolve)(rootDir, filename)) + "/"; | ||
const state = { | ||
version: null, | ||
parsed: null, | ||
@@ -161,3 +164,3 @@ mapping: { value: null, children: {} }, | ||
// TODO check that hashPath == 'schemas' or 'parameters', etc. | ||
const targetFileName = path_1.relative(cwd, filename); | ||
const targetFileName = (0, path_1.relative)(cwd, filename); | ||
let path = [ | ||
@@ -198,7 +201,11 @@ "components", | ||
const destinations = destinationMap[state.version]; | ||
// @ts-ignore | ||
const destination = destinations[parentKey] | ||
? destinations[parentKey] | ||
: destinations[grandparentKey] | ||
? destinations[grandparentKey] | ||
: null; | ||
? // @ts-ignore | ||
destinations[parentKey] | ||
: // @ts-ignore | ||
destinations[grandparentKey] | ||
? // @ts-ignore | ||
destinations[grandparentKey] | ||
: null; | ||
if (destination) { | ||
@@ -226,3 +233,3 @@ const ref = entry.$ref.$ref; | ||
const bundled = yield parser.bundle(parsed, options); | ||
return [bundled, state.mapping]; | ||
return [[bundled, state.mapping], undefined]; | ||
} | ||
@@ -242,7 +249,7 @@ catch (errors) { | ||
.map((error) => { | ||
const source = error.source == "" ? path_1.resolve(rootDir, filename) : error.source; | ||
return ` Failed to resolve reference: in ${source} at ${openapi_ast_node_1.joinJsonPointer(error.path)}: ${error.message}`; | ||
const source = error.source == "" ? (0, path_1.resolve)(rootDir, filename) : error.source; | ||
return ` Failed to resolve reference: in ${source} at ${(0, preserving_json_yaml_parser_1.joinJsonPointer)(error.path)}: ${error.message}`; | ||
}) | ||
.join("\n"); | ||
return [{ error: `Error bundling OpenAPI file:\n${messages}` }, null]; | ||
return [undefined, new Error(`Error bundling OpenAPI file:\n${messages}`)]; | ||
} | ||
@@ -271,5 +278,13 @@ }); | ||
try { | ||
return yaml.load(fs.readFileSync(path_1.resolve(rootDir, filename), { | ||
const text = fs.readFileSync((0, path_1.resolve)(rootDir, filename), { | ||
encoding: "utf-8", | ||
})); | ||
}); | ||
const [parsed, errors] = (0, preserving_json_yaml_parser_1.parse)(text, "yaml", {}); | ||
if (errors.length > 0) { | ||
const message = errors | ||
.map((error) => `${error.message} at ${error.offset}`) | ||
.join(", "); | ||
throw new Error(message); | ||
} | ||
return parsed; | ||
} | ||
@@ -283,3 +298,13 @@ catch (e) { | ||
try { | ||
return JSON.parse(fs.readFileSync(path_1.resolve(rootDir, filename), { encoding: "utf-8" })); | ||
const text = fs.readFileSync((0, path_1.resolve)(rootDir, filename), { | ||
encoding: "utf-8", | ||
}); | ||
const [parsed, errors] = (0, preserving_json_yaml_parser_1.parse)(text, "json", {}); | ||
if (errors.length > 0) { | ||
const message = errors | ||
.map((error) => `${error.message} at ${error.offset}`) | ||
.join(", "); | ||
throw new Error(message); | ||
} | ||
return parsed; | ||
} | ||
@@ -286,0 +311,0 @@ catch (e) { |
@@ -33,3 +33,3 @@ "use strict"; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const { body } = yield got_1.default("api/job", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { | ||
const { body } = yield (0, got_1.default)("api/job", Object.assign(Object.assign({}, gotOptions("POST", options)), { json: { | ||
token, | ||
@@ -36,0 +36,0 @@ } })); |
@@ -28,4 +28,4 @@ "use strict"; | ||
else { | ||
const [reportDate, report] = yield api_1.readScanReport(remote.id, options); | ||
const scanConfig = yield api_1.readScanConfiguration(remote.id, options); | ||
const [reportDate, report] = yield (0, api_1.readScanReport)(remote.id, options); | ||
const scanConfig = yield (0, api_1.readScanConfiguration)(remote.id, options); | ||
if (scanConfig) { | ||
@@ -52,3 +52,3 @@ const job = yield scanManager.startScan(scanConfig.tokenId, options); | ||
log.debug(`Reading scan report for ${filename}`); | ||
const report = yield api_1.readScan(remote.id, remote.lastOnPremScan, options); | ||
const report = yield (0, api_1.readScan)(remote.id, remote.lastOnPremScan, options); | ||
result.set(filename, Object.assign(Object.assign({}, remote), { report })); | ||
@@ -55,0 +55,0 @@ } |
@@ -32,3 +32,3 @@ "use strict"; | ||
catch (err) { | ||
error_1.handleTaskError(err, options); | ||
(0, error_1.handleTaskError)(err, options); | ||
} | ||
@@ -45,4 +45,6 @@ }); | ||
} | ||
if (!inputs.branchName || inputs.branchName === "") { | ||
throw new error_1.TaskError("The branch name must be specified"); | ||
if (!inputs.reference.branch && | ||
!inputs.reference.tag && | ||
!inputs.reference.pr) { | ||
throw new error_1.TaskError("The branch, tag or pull request/merge request must be specified"); | ||
} | ||
@@ -60,3 +62,3 @@ if (!inputs.platformUrl || inputs.platformUrl === "") { | ||
const scanManagerUrl = new url_1.URL(inputs.scanManagerUrl); | ||
const config = config_1.readScanConfig(inputs.rootDir, inputs.branchName, inputs.logger); | ||
const config = (0, config_1.readScanConfig)(inputs.rootDir, inputs.reference, inputs.logger); | ||
return Object.assign(Object.assign({}, inputs), { platformUrl: platformUrl.origin, scanManagerUrl: scanManagerUrl.origin, config }); | ||
@@ -66,6 +68,6 @@ } | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const [openapiFilenames, errors] = yield discovery_1.discoverOpenApiFiles(options); | ||
const discovered = yield discovery_1.scanDiscoveredFiles(openapiFilenames, options); | ||
const [openapiFilenames, errors] = yield (0, discovery_1.discoverOpenApiFiles)(options); | ||
const discovered = yield (0, discovery_1.scanDiscoveredFiles)(openapiFilenames, options); | ||
if (options.config.mappedFiles) { | ||
const mapped = yield mapping_1.scanMappedFiles(options); | ||
const mapped = yield (0, mapping_1.scanMappedFiles)(options); | ||
return new Map([...errors, ...discovered, ...mapped]); | ||
@@ -96,3 +98,3 @@ } | ||
if ("errors" in result) { | ||
logger.error(` ${error_1.formatApiErrors(result)}`); | ||
logger.error(` ${(0, error_1.formatApiErrors)(result)}`); | ||
} | ||
@@ -99,0 +101,0 @@ else { |
@@ -13,2 +13,11 @@ { | ||
} | ||
}, | ||
"tags": { | ||
"type": "object", | ||
"additionalProperties": { | ||
"$ref": "#/definitions/auditConfig" | ||
} | ||
}, | ||
"prs": { | ||
"$ref": "#/definitions/auditConfig" | ||
} | ||
@@ -15,0 +24,0 @@ }, |
@@ -0,1 +1,2 @@ | ||
export declare type Result<R, E> = [R, undefined] | [undefined, E]; | ||
export interface ApiStatus { | ||
@@ -21,3 +22,3 @@ isAssessmentProcessed: boolean; | ||
export interface MappingTreeNode { | ||
value: JsonMapping; | ||
value: JsonMapping | null; | ||
children: { | ||
@@ -84,3 +85,3 @@ [key: string]: MappingTreeNode; | ||
export interface TaskConfig { | ||
discovery: Discovery; | ||
discovery?: Discovery; | ||
mappedFiles: FileApiIdMap; | ||
@@ -93,3 +94,3 @@ } | ||
} | ||
export interface YamlBranchConfig { | ||
export interface YamlTaskConfig { | ||
fail_on?: YamlFailureConditions; | ||
@@ -99,6 +100,2 @@ mapping?: Mapping; | ||
} | ||
export interface BranchConfig { | ||
fail_on: FailureConditions; | ||
mapping?: Mapping; | ||
} | ||
export interface YamlFailureConditions { | ||
@@ -144,8 +141,7 @@ invalid_contract?: boolean; | ||
export interface YamlDiscovery { | ||
name?: string; | ||
search?: string[]; | ||
"collection-name"?: string; | ||
search: string[]; | ||
"collection-name": string; | ||
} | ||
export interface Discovery { | ||
name: string; | ||
name?: string; | ||
search: string[]; | ||
@@ -164,3 +160,3 @@ } | ||
logger: Logger; | ||
branchName: string; | ||
reference: Reference; | ||
repoName: string; | ||
@@ -180,2 +176,7 @@ cicdName: string; | ||
} | ||
export declare type Reference = { | ||
branch?: string; | ||
tag?: string; | ||
pr?: string; | ||
}; | ||
export interface TaskOptions { | ||
@@ -188,3 +189,3 @@ referer: string; | ||
logger: Logger; | ||
branchName: string; | ||
reference: Reference; | ||
repoName: string; | ||
@@ -199,3 +200,3 @@ cicdName: string; | ||
export interface AuditOptions extends TaskOptions { | ||
lineNumbers?: boolean; | ||
lineNumbers: boolean; | ||
config: AuditConfig; | ||
@@ -202,0 +203,0 @@ } |
@@ -0,3 +1,4 @@ | ||
import { Logger, Reference } from "./types"; | ||
export declare function makeSafeName(name: string): string; | ||
export declare function makeCollectionName(repo: string, branch: string, template: string): string; | ||
export declare function makeTechnicalCollectionName(repoName: string, branchName: string): string; | ||
export declare function makeCollectionName(repo: string, reference: Reference, nameTemplate: string | undefined, log: Logger): string; | ||
export declare function makeTechnicalCollectionName(repo: string, reference: Reference): string; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.makeTechnicalCollectionName = exports.makeCollectionName = exports.makeSafeName = void 0; | ||
const url_1 = require("url"); | ||
const constants_1 = require("./constants"); | ||
@@ -11,13 +12,57 @@ function makeSafeName(name) { | ||
exports.makeSafeName = makeSafeName; | ||
function makeCollectionName(repo, branch, template) { | ||
function makeCollectionName(repo, reference, nameTemplate, log) { | ||
const repoShortPath = makeRepoShortPath(repo, log); | ||
const repoHost = makeRepoHost(repo, log); | ||
const template = nameTemplate !== null && nameTemplate !== void 0 ? nameTemplate : "${repo_short_path} ${branch}${tag}${pr}"; | ||
const branch = reference.branch ? `Branch:${reference.branch}` : ""; | ||
const tag = reference.tag ? `Tag:${reference.tag}` : ""; | ||
const pr = reference.pr ? `PR:${reference.pr}` : ""; | ||
const replaced = template | ||
.replace("${repo_hostname}", repoHost) | ||
.replace("${repo_short_path}", repoShortPath) | ||
.replace("${repository}", repo) | ||
.replace("${branch}", branch); | ||
.replace("${branch}", branch) | ||
.replace("${tag}", tag) | ||
.replace("${pr}", pr); | ||
return makeSafeName(replaced); | ||
} | ||
exports.makeCollectionName = makeCollectionName; | ||
function makeTechnicalCollectionName(repoName, branchName) { | ||
function makeTechnicalCollectionName(repo, reference) { | ||
if (reference.branch) { | ||
return `${repo}@@${reference.branch}`; | ||
} | ||
else if (reference.tag) { | ||
return `${repo}@@Tag:${reference.tag}`; | ||
} | ||
else { | ||
return `${repo}@@PR:${reference.pr}`; | ||
} | ||
// TODO check max name len | ||
return `${repoName}@@${branchName}`; | ||
} | ||
exports.makeTechnicalCollectionName = makeTechnicalCollectionName; | ||
function makeRepoShortPath(repo, log) { | ||
try { | ||
let { pathname } = new url_1.URL(repo); | ||
if (pathname.startsWith("/")) { | ||
pathname = pathname.slice(1); | ||
} | ||
if (pathname.endsWith(".git")) { | ||
pathname = pathname.slice(0, pathname.lastIndexOf(".git")); | ||
} | ||
return pathname; | ||
} | ||
catch (ex) { | ||
log.debug("Failed to parse repository for ${repo_short_path} using full repository name instead"); | ||
return repo; | ||
} | ||
} | ||
function makeRepoHost(repo, log) { | ||
try { | ||
const { hostname } = new url_1.URL(repo); | ||
return hostname; | ||
} | ||
catch (ex) { | ||
log.warning("Failed to parse repository for ${repo_hostname} using 'unknown' instead"); | ||
return "unknown"; | ||
} | ||
} |
{ | ||
"name": "@xliic/cicd-core-node", | ||
"version": "3.0.0", | ||
"version": "4.0.0", | ||
"description": "Performs API contract security audit to get a detailed analysis of the possible vulnerabilities and other issues in the API contract.", | ||
@@ -22,9 +22,8 @@ "main": "lib/index.js", | ||
"dependencies": { | ||
"@xliic/json-schema-ref-parser": "^9.1.6", | ||
"@xliic/openapi-ast-node": "1.0.0", | ||
"@xliic/json-schema-ref-parser": "^9.2.1", | ||
"@xliic/preserving-json-yaml-parser": "^1.7.2", | ||
"ajv": "^7.0.3", | ||
"form-data": "^3.0.0", | ||
"globby": "^11.0.2", | ||
"got": "^11.8.1", | ||
"js-yaml": "^4.1.0", | ||
"got": "^11.8.3", | ||
"line-chomper": "^0.5.0", | ||
@@ -34,11 +33,11 @@ "picomatch": "^2.2.3" | ||
"devDependencies": { | ||
"@types/got": "^9.6.11", | ||
"@types/jest": "^26.0.23", | ||
"@types/got": "^9.6.12", | ||
"@types/jest": "^27.0.3", | ||
"@types/node": "^14.14.21", | ||
"@types/picomatch": "^2.2.2", | ||
"jest": "^27.0.4", | ||
"json-schema": "^0.3.0", | ||
"ts-jest": "^27.0.3", | ||
"typescript": "^4.1.3" | ||
"jest": "^27.4.3", | ||
"json-schema": "^0.4.0", | ||
"ts-jest": "^27.1.0", | ||
"typescript": "^4.5.2" | ||
} | ||
} |
99406
8
2311
36
+ Added@xliic/preserving-json-yaml-parser@1.11.0(transitive)
+ Addedjsonc-parser@3.3.1(transitive)
+ Addedyaml-language-server-parser@0.1.3(transitive)
- Removed@xliic/openapi-ast-node@1.0.0
- Removedjs-yaml@^4.1.0
- Removed@xliic/openapi-ast-node@1.0.0(transitive)
- Removedjsonc-parser@2.3.1(transitive)
- Removedyaml-ast-parser-custom-tags@0.0.43(transitive)
Updatedgot@^11.8.3