@snyk/code-client
Advanced tools
Comparing version 4.0.0-alpha.0 to 4.0.1-rc
import { AnalyzeFoldersOptions } from './files'; | ||
import { RemoteBundle, AnalysisOptions, ConnectionOptions, GetAnalysisOptions } from './http'; | ||
import { AnalysisOptions, ConnectionOptions, GetAnalysisOptions } from './http'; | ||
import { FileBundle } from './bundles'; | ||
import { AnalysisResult } from './interfaces/analysis-result.interface'; | ||
export declare function analyzeBundle(options: GetAnalysisOptions): Promise<AnalysisResult>; | ||
interface FileAnalysisOptions extends ConnectionOptions, AnalysisOptions, AnalyzeFoldersOptions { | ||
interface FileAnalysisOptions { | ||
connection: ConnectionOptions; | ||
analysisOptions: AnalysisOptions; | ||
fileOptions: AnalyzeFoldersOptions; | ||
} | ||
export declare function analyzeFolders(options: FileAnalysisOptions): Promise<AnalysisResult | null>; | ||
interface CreateBundleFromFoldersOptions extends ConnectionOptions, AnalyzeFoldersOptions { | ||
interface FileAnalysis extends FileAnalysisOptions { | ||
fileBundle: FileBundle; | ||
analysisResults: AnalysisResult; | ||
} | ||
/** | ||
* Creates a remote bundle and returns response from the bundle API | ||
* | ||
* @param {CreateBundleFromFoldersOptions} options | ||
* @returns {Promise<RemoteBundle | null>} | ||
*/ | ||
export declare function createBundleFromFolders(options: CreateBundleFromFoldersOptions): Promise<RemoteBundle | null>; | ||
export declare function analyzeFolders(options: FileAnalysisOptions): Promise<FileAnalysis | null>; | ||
interface ExtendAnalysisOptions extends FileAnalysis { | ||
files: string[]; | ||
} | ||
export declare function extendAnalysis(options: ExtendAnalysisOptions): Promise<FileAnalysis | null>; | ||
export {}; |
@@ -6,10 +6,8 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createBundleFromFolders = exports.analyzeFolders = exports.analyzeBundle = void 0; | ||
/* eslint-disable no-await-in-loop */ | ||
const lodash_pick_1 = __importDefault(require("lodash.pick")); | ||
exports.extendAnalysis = exports.analyzeFolders = exports.analyzeBundle = void 0; | ||
const files_1 = require("./files"); | ||
const http_1 = require("./http"); | ||
const bundles_1 = require("./bundles"); | ||
const emitter_1 = __importDefault(require("./emitter")); | ||
// import { MAX_PAYLOAD } from './constants'; | ||
const bundles_1 = require("./bundles"); | ||
// import { fromEntries } from './lib/utils'; | ||
const sleep = (duration) => new Promise(resolve => setTimeout(resolve, duration)); | ||
@@ -21,5 +19,3 @@ // const ANALYSIS_OPTIONS_DEFAULTS = { | ||
// symlinksEnabled: false, | ||
// maxPayload: MAX_PAYLOAD, | ||
// defaultFileIgnores: IGNORES_DEFAULT, | ||
// source: '', | ||
// }; | ||
@@ -73,71 +69,118 @@ async function pollAnalysis(options) { | ||
async function analyzeFolders(options) { | ||
const remoteBundle = await createBundleFromFolders({ | ||
...lodash_pick_1.default(options, ['baseURL', 'sessionToken', 'source']), | ||
...lodash_pick_1.default(options, ['paths', 'symlinksEnabled', 'maxPayload', 'defaultFileIgnores']), | ||
const fileBundle = await bundles_1.createBundleFromFolders({ | ||
...options.connection, | ||
...options.fileOptions, | ||
}); | ||
if (fileBundle === null) | ||
return null; | ||
// Analyze bundle | ||
if (remoteBundle === null) { | ||
return null; | ||
} | ||
const analysisResults = await analyzeBundle({ | ||
...lodash_pick_1.default(options, ['baseURL', 'sessionToken', 'source', 'severity', 'limitToFiles']), | ||
bundleHash: remoteBundle.bundleHash, | ||
bundleHash: fileBundle.bundleHash, | ||
...options.connection, | ||
...options.analysisOptions, | ||
}); | ||
// TODO: expand relative file names to absolute ones | ||
// analysisResults.files = normalizeResultFiles(analysisData.analysisResults.files, baseDir); | ||
return analysisResults; | ||
return { fileBundle, analysisResults, ...options }; | ||
} | ||
exports.analyzeFolders = analyzeFolders; | ||
/** | ||
* Creates a remote bundle and returns response from the bundle API | ||
* | ||
* @param {CreateBundleFromFoldersOptions} options | ||
* @returns {Promise<RemoteBundle | null>} | ||
*/ | ||
async function createBundleFromFolders(options) { | ||
const baseDir = files_1.determineBaseDir(options.paths); | ||
// Fetch supporte files to save network traffic | ||
const supportedFiles = await getSupportedFiles(options.baseURL, options.source); | ||
// Scan for custom ignore rules | ||
const fileIgnores = await files_1.collectIgnoreRules(options.paths, options.symlinksEnabled, options.defaultFileIgnores); | ||
emitter_1.default.scanFilesProgress(0); | ||
const bundleFiles = []; | ||
let totalFiles = 0; | ||
const bundleFileCollector = files_1.collectBundleFiles({ | ||
...lodash_pick_1.default(options, ['paths', 'symlinksEnabled', 'maxPayload']), | ||
baseDir, | ||
fileIgnores, | ||
supportedFiles, | ||
function mergeBundleResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles = []) { | ||
// Start from the new analysis results | ||
// For each finding of the old analysis, | ||
// if it's location is not part of the limitToFiles or removedFiles (removedFiles should also be checked against condeFlow), | ||
// append the finding to the new analysis and check if the rule must be added as well | ||
const changedFiles = [...limitToFiles, ...removedFiles]; | ||
const sarifResults = (newAnalysisResults.sarif.runs[0].results || []).filter(res => { | ||
var _a, _b, _c; | ||
// TODO: This should not be necessary in theory but, in case of two identical files, | ||
// Bundle Server returns the finding in both files even if limitToFiles only reports one | ||
const loc = (_c = (_b = (_a = res.locations) === null || _a === void 0 ? void 0 : _a[0].physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation) === null || _c === void 0 ? void 0 : _c.uri; | ||
return loc && changedFiles.includes(loc); | ||
}); | ||
for await (const f of bundleFileCollector) { | ||
bundleFiles.push(f); | ||
totalFiles += 1; | ||
emitter_1.default.scanFilesProgress(totalFiles); | ||
const sarifRules = newAnalysisResults.sarif.runs[0].tool.driver.rules || []; | ||
const oldResults = oldAnalysisResults.sarif.runs[0].results || []; | ||
for (let res of oldResults) { | ||
// NOTE: Node 10 doesn't support the more readable .flatMap, so we need to use .reduce, but the behaviour would be the following: | ||
// const locations: string[] = (res.locations || []).flatMap( | ||
// loc => !!loc.physicalLocation?.artifactLocation?.uri ? [loc.physicalLocation.artifactLocation.uri] : [] | ||
// ); | ||
// const codeFlowLocations: string[] = (res.codeFlows || []).flatMap( | ||
// cf => (cf.threadFlows || []).flatMap( | ||
// tf => (tf.locations || []).flatMap( | ||
// loc => !!loc.location?.physicalLocation?.artifactLocation?.uri ? [loc.location.physicalLocation.artifactLocation.uri] : [] | ||
// ) | ||
// ) | ||
// ); | ||
const locations = (res.locations || []).reduce((acc, loc) => { | ||
var _a, _b; | ||
if ((_b = (_a = loc.physicalLocation) === null || _a === void 0 ? void 0 : _a.artifactLocation) === null || _b === void 0 ? void 0 : _b.uri) { | ||
acc.push(loc.physicalLocation.artifactLocation.uri); | ||
} | ||
; | ||
return acc; | ||
}, []); | ||
const codeFlowLocations = (res.codeFlows || []).reduce((acc1, cf) => { | ||
acc1.push(...(cf.threadFlows || []).reduce((acc2, tf) => { | ||
acc2.push(...(tf.locations || []).reduce((acc3, loc) => { | ||
var _a, _b, _c; | ||
if ((_c = (_b = (_a = loc.location) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation) === null || _c === void 0 ? void 0 : _c.uri) { | ||
acc3.push(loc.location.physicalLocation.artifactLocation.uri); | ||
} | ||
; | ||
return acc3; | ||
}, [])); | ||
return acc2; | ||
}, [])); | ||
return acc1; | ||
}, []); | ||
if (locations.some(loc => changedFiles.includes(loc)) || | ||
codeFlowLocations.some(loc => removedFiles.includes(loc))) | ||
continue; | ||
let ruleIndex = sarifRules.findIndex((rule) => rule.id === res.ruleId); | ||
if (ruleIndex === -1 && res.ruleIndex && | ||
oldAnalysisResults.sarif.runs[0].tool.driver.rules && | ||
oldAnalysisResults.sarif.runs[0].tool.driver.rules[res.ruleIndex]) { | ||
const newLength = sarifRules.push(oldAnalysisResults.sarif.runs[0].tool.driver.rules[res.ruleIndex]); | ||
ruleIndex = newLength - 1; | ||
} | ||
res.ruleIndex = ruleIndex; | ||
sarifResults.push(res); | ||
} | ||
const bundleOptions = { | ||
...lodash_pick_1.default(options, ['baseURL', 'sessionToken', 'source']), | ||
baseDir, | ||
files: bundleFiles, | ||
}; | ||
// Create remote bundle | ||
return bundleFiles.length ? bundles_1.remoteBundleFactory(bundleOptions) : null; | ||
newAnalysisResults.sarif.runs[0].results = sarifResults; | ||
newAnalysisResults.sarif.runs[0].tool.driver.rules = sarifRules; | ||
return newAnalysisResults; | ||
} | ||
exports.createBundleFromFolders = createBundleFromFolders; | ||
/** | ||
* Get supported filters and test baseURL for correctness and availability | ||
* | ||
* @param baseURL | ||
* @param source | ||
* @returns | ||
*/ | ||
async function getSupportedFiles(baseURL, source) { | ||
emitter_1.default.supportedFilesLoaded(null); | ||
const resp = await http_1.getFilters(baseURL, source); | ||
if (resp.type === 'error') { | ||
throw resp.error; | ||
async function extendAnalysis(options) { | ||
const { files, removedFiles } = await files_1.prepareExtendingBundle(options.fileBundle.baseDir, options.fileBundle.supportedFiles, options.fileBundle.fileIgnores, options.files, options.fileOptions.maxPayload, options.fileOptions.symlinksEnabled); | ||
if (!files.length && !removedFiles.length) { | ||
return null; // nothing to extend, just return null | ||
} | ||
const supportedFiles = resp.value; | ||
emitter_1.default.supportedFilesLoaded(supportedFiles); | ||
return supportedFiles; | ||
// Extend remote bundle | ||
const remoteBundle = await bundles_1.remoteBundleFactory({ | ||
...options.connection, | ||
bundleHash: options.fileBundle.bundleHash, | ||
baseDir: options.fileBundle.baseDir, | ||
maxPayload: options.fileOptions.maxPayload, | ||
files, | ||
removedFiles, | ||
}); | ||
if (remoteBundle === null) | ||
return null; | ||
const fileBundle = { | ||
...options.fileBundle, | ||
...remoteBundle, | ||
}; | ||
const limitToFiles = files.map(f => f.bundlePath); | ||
let analysisResults = await analyzeBundle({ | ||
bundleHash: remoteBundle.bundleHash, | ||
...options.connection, | ||
...options.analysisOptions, | ||
limitToFiles, | ||
}); | ||
// TODO: Transform relative paths into absolute | ||
// analysisData.analysisResults.files = normalizeResultFiles(analysisData.analysisResults.files, bundle.baseDir); | ||
analysisResults = mergeBundleResults(options.analysisResults, analysisResults, limitToFiles, removedFiles); | ||
return { ...options, fileBundle, analysisResults }; | ||
} | ||
exports.extendAnalysis = extendAnalysis; | ||
//# sourceMappingURL=analysis.js.map |
@@ -1,2 +0,3 @@ | ||
import { FileInfo } from './interfaces/files.interface'; | ||
import { FileInfo, SupportedFiles } from './interfaces/files.interface'; | ||
import { AnalyzeFoldersOptions } from './files'; | ||
import { RemoteBundle, ConnectionOptions } from './http'; | ||
@@ -25,2 +26,16 @@ interface PrepareRemoteBundleOptions extends ConnectionOptions { | ||
export declare function remoteBundleFactory(options: RemoteBundleFactoryOptions): Promise<RemoteBundle | null>; | ||
interface CreateBundleFromFoldersOptions extends ConnectionOptions, AnalyzeFoldersOptions { | ||
} | ||
export interface FileBundle extends RemoteBundle { | ||
baseDir: string; | ||
supportedFiles: SupportedFiles; | ||
fileIgnores: string[]; | ||
} | ||
/** | ||
* Creates a remote bundle and returns response from the bundle API | ||
* | ||
* @param {CreateBundleFromFoldersOptions} options | ||
* @returns {Promise<FileBundle | null>} | ||
*/ | ||
export declare function createBundleFromFolders(options: CreateBundleFromFoldersOptions): Promise<FileBundle | null>; | ||
export {}; |
@@ -6,3 +6,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.remoteBundleFactory = exports.uploadRemoteBundle = void 0; | ||
exports.createBundleFromFolders = exports.remoteBundleFactory = exports.uploadRemoteBundle = void 0; | ||
/* eslint-disable no-await-in-loop */ | ||
@@ -118,2 +118,64 @@ const lodash_chunk_1 = __importDefault(require("lodash.chunk")); | ||
exports.remoteBundleFactory = remoteBundleFactory; | ||
/** | ||
* Get supported filters and test baseURL for correctness and availability | ||
* | ||
* @param baseURL | ||
* @param source | ||
* @returns | ||
*/ | ||
async function getSupportedFiles(baseURL, source) { | ||
emitter_1.default.supportedFilesLoaded(null); | ||
const resp = await http_1.getFilters(baseURL, source); | ||
if (resp.type === 'error') { | ||
throw resp.error; | ||
} | ||
const supportedFiles = resp.value; | ||
emitter_1.default.supportedFilesLoaded(supportedFiles); | ||
return supportedFiles; | ||
} | ||
/** | ||
* Creates a remote bundle and returns response from the bundle API | ||
* | ||
* @param {CreateBundleFromFoldersOptions} options | ||
* @returns {Promise<FileBundle | null>} | ||
*/ | ||
async function createBundleFromFolders(options) { | ||
const baseDir = files_1.determineBaseDir(options.paths); | ||
// Fetch supporte files to save network traffic | ||
const supportedFiles = await getSupportedFiles(options.baseURL, options.source); | ||
// Scan for custom ignore rules | ||
const fileIgnores = await files_1.collectIgnoreRules(options.paths, options.symlinksEnabled, options.defaultFileIgnores); | ||
emitter_1.default.scanFilesProgress(0); | ||
const bundleFiles = []; | ||
let totalFiles = 0; | ||
const bundleFileCollector = files_1.collectBundleFiles({ | ||
...lodash_pick_1.default(options, ['paths', 'symlinksEnabled', 'maxPayload']), | ||
baseDir, | ||
fileIgnores, | ||
supportedFiles, | ||
}); | ||
for await (const f of bundleFileCollector) { | ||
bundleFiles.push(f); | ||
totalFiles += 1; | ||
emitter_1.default.scanFilesProgress(totalFiles); | ||
} | ||
const bundleOptions = { | ||
...lodash_pick_1.default(options, ['baseURL', 'sessionToken', 'source']), | ||
baseDir, | ||
files: bundleFiles, | ||
}; | ||
// Create remote bundle | ||
if (!bundleFiles.length) | ||
return null; | ||
const remoteBundle = await remoteBundleFactory(bundleOptions); | ||
if (remoteBundle === null) | ||
return null; | ||
return { | ||
...remoteBundle, | ||
baseDir, | ||
supportedFiles, | ||
fileIgnores, | ||
}; | ||
} | ||
exports.createBundleFromFolders = createBundleFromFolders; | ||
//# sourceMappingURL=bundles.js.map |
@@ -234,2 +234,3 @@ "use strict"; | ||
const fileInfo = await getFileInfo(filePath.toString(), options.baseDir, false, cache); | ||
// dc ignore AttrAccessOnNull: false positive, there is a precondition with && | ||
if (fileInfo && fileInfo.size <= maxPayload) { | ||
@@ -245,2 +246,3 @@ yield fileInfo; | ||
const fileInfo = await getFileInfo(filePath.toString(), options.baseDir, false, cache); | ||
// dc ignore AttrAccessOnNull: false positive, there is a precondition with && | ||
if (fileInfo && fileInfo.size <= maxPayload) { | ||
@@ -261,3 +263,5 @@ yield fileInfo; | ||
// Exclude files to be ignored based on ignore rules. We assume here, that ignore rules have not been changed. | ||
processingFiles = processingFiles.filter(f => !isMatch(f, fileIgnores)); | ||
processingFiles = processingFiles | ||
.map(f => resolveBundleFilePath(baseDir, f)) | ||
.filter(f => !isMatch(f, fileIgnores)); | ||
if (processingFiles.length) { | ||
@@ -264,0 +268,0 @@ // Determine existing files (minus removed) |
@@ -1,2 +0,3 @@ | ||
import { analyzeFolders, createBundleFromFolders } from './analysis'; | ||
import { analyzeFolders } from './analysis'; | ||
import { createBundleFromFolders } from './bundles'; | ||
import emitter from './emitter'; | ||
@@ -3,0 +4,0 @@ import { startSession, checkSession } from './http'; |
@@ -28,3 +28,4 @@ "use strict"; | ||
Object.defineProperty(exports, "analyzeFolders", { enumerable: true, get: function () { return analysis_1.analyzeFolders; } }); | ||
Object.defineProperty(exports, "createBundleFromFolders", { enumerable: true, get: function () { return analysis_1.createBundleFromFolders; } }); | ||
const bundles_1 = require("./bundles"); | ||
Object.defineProperty(exports, "createBundleFromFolders", { enumerable: true, get: function () { return bundles_1.createBundleFromFolders; } }); | ||
const emitter_1 = __importDefault(require("./emitter")); | ||
@@ -31,0 +32,0 @@ exports.emitter = emitter_1.default; |
{ | ||
"name": "@snyk/code-client", | ||
"version": "4.0.1-rc", | ||
"description": "Typescript consumer of SnykCode public API", | ||
@@ -36,2 +37,3 @@ "main": "dist/index.js", | ||
"testTimeout": 50000, | ||
"verbose": true, | ||
@@ -51,2 +53,3 @@ "clearMocks": true, | ||
"@typescript-eslint/parser": "^4.0.1", | ||
"@types/needle": "^2.5.2", | ||
"eslint": "^7.8.1", | ||
@@ -75,3 +78,2 @@ "eslint-config-airbnb-base": "^14.2.0", | ||
"@types/uuid": "^8.3.0", | ||
"axios": "^0.21.1", | ||
"ignore": "^5.1.8", | ||
@@ -83,6 +85,6 @@ "lodash.chunk": "^4.2.0", | ||
"multimatch": "^5.0.0", | ||
"needle": "^2.8.0", | ||
"queue": "^6.0.1", | ||
"uuid": "^8.3.2" | ||
}, | ||
"version": "4.0.0-alpha.0" | ||
} | ||
} |
@@ -8,2 +8,5 @@ # code-client | ||
This package is published using: | ||
[![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) | ||
# Installation | ||
@@ -90,20 +93,13 @@ | ||
```javascript | ||
const bundle = await codeClient.analyzeFolders({ | ||
baseURL, | ||
sessionToken, | ||
severity: 1, | ||
paths: ['/home/user/repo'], | ||
sarif, | ||
source, | ||
const results = await codeClient.analyzeFolders({ | ||
connection: { baseURL, sessionToken, source }, | ||
analysisOptions: { | ||
severity: 1, | ||
}, | ||
fileOptions: { | ||
paths: ['/home/user/repo'], | ||
symlinksEnabled: false, | ||
}, | ||
}); | ||
// bundle implements interface FileBundle: | ||
// readonly baseURL: string; | ||
// readonly sessionToken: string; | ||
// readonly severity: AnalysisSeverity; | ||
// readonly bundleHash: string; | ||
// readonly analysisResults: IAnalysisResult; | ||
// readonly baseDir: string; | ||
// readonly paths: string[]; | ||
// readonly supportedFiles: SupportedFiles; | ||
``` | ||
@@ -114,12 +110,10 @@ | ||
```javascript | ||
const result = await codeClient.extendBundle({ | ||
sessionToken, | ||
bundleHash, | ||
const results = await codeClient.extendAnalysis({ | ||
...previousAnalysisResults, | ||
files: { | ||
'/home/user/repo/main.js': '3e297985...', | ||
'/home/user/repo/app.js': 'c8bc6452...', | ||
'/home/user/repo/main.js', | ||
'/home/user/repo/app.js', | ||
}, | ||
removedFiles: [], | ||
}); | ||
const { bundleHash, missingFiles, uploadURL } = result; | ||
``` | ||
@@ -126,0 +120,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 2 instances in 1 package
1
0
72274
18
15
899
125
+ Addedneedle@^2.8.0
+ Addeddebug@3.2.7(transitive)
+ Addediconv-lite@0.4.24(transitive)
+ Addedms@2.1.3(transitive)
+ Addedneedle@2.9.1(transitive)
+ Addedsafer-buffer@2.1.2(transitive)
+ Addedsax@1.4.1(transitive)
- Removedaxios@^0.21.1
- Removedaxios@0.21.4(transitive)
- Removedfollow-redirects@1.15.9(transitive)