@snyk/code-client
Advanced tools
Comparing version 4.3.0 to 4.3.1
@@ -190,3 +190,3 @@ "use strict"; | ||
async function extendAnalysis(options) { | ||
const { files, removedFiles } = await (0, files_1.prepareExtendingBundle)(options.fileBundle.baseDir, options.fileBundle.supportedFiles, options.fileBundle.fileIgnores, options.files, options.fileOptions.maxPayload, options.fileOptions.symlinksEnabled); | ||
const { files, removedFiles } = await (0, files_1.prepareExtendingBundle)(options.fileBundle.baseDir, options.fileBundle.supportedFiles, options.fileBundle.fileIgnores, options.files, options.fileOptions.symlinksEnabled); | ||
if (!files.length && !removedFiles.length) { | ||
@@ -200,3 +200,2 @@ return null; // nothing to extend, just return null | ||
baseDir: options.fileBundle.baseDir, | ||
maxPayload: options.fileOptions.maxPayload, | ||
files, | ||
@@ -203,0 +202,0 @@ removedFiles, |
@@ -8,3 +8,2 @@ import { FileInfo, SupportedFiles } from './interfaces/files.interface'; | ||
removedFiles?: string[]; | ||
maxPayload?: number; | ||
} | ||
@@ -14,3 +13,2 @@ interface UpdateRemoteBundleOptions extends ConnectionOptions { | ||
files: FileInfo[]; | ||
maxPayload?: number; | ||
} | ||
@@ -23,3 +21,3 @@ /** | ||
*/ | ||
export declare function uploadRemoteBundle({ maxPayload, ...options }: UpdateRemoteBundleOptions): Promise<boolean>; | ||
export declare function uploadRemoteBundle(options: UpdateRemoteBundleOptions): Promise<void>; | ||
interface RemoteBundleFactoryOptions extends PrepareRemoteBundleOptions { | ||
@@ -26,0 +24,0 @@ baseDir: string; |
@@ -15,3 +15,3 @@ "use strict"; | ||
const emitter_1 = require("./emitter"); | ||
async function* prepareRemoteBundle({ maxPayload = constants_1.MAX_PAYLOAD, ...options }) { | ||
async function* prepareRemoteBundle(options) { | ||
let response; | ||
@@ -21,6 +21,10 @@ let { bundleHash } = options; | ||
emitter_1.emitter.createBundleProgress(cumulativeProgress, options.files.length); | ||
for (const chunkedFiles of (0, files_1.composeFilePayloads)(options.files, maxPayload)) { | ||
for (const chunkedFiles of (0, files_1.composeFilePayloads)(options.files, constants_1.MAX_PAYLOAD)) { | ||
const apiParams = { | ||
...(0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'removedFiles']), | ||
files: chunkedFiles.reduce((d, f) => ({ ...d, [f.bundlePath]: f.hash }), {}), | ||
files: chunkedFiles.reduce((d, f) => { | ||
// deepcode ignore PrototypePollution: FP this is an internal code | ||
d[f.bundlePath] = f.hash; | ||
return d; | ||
}, {}), | ||
}; | ||
@@ -52,3 +56,3 @@ if (!bundleHash) { | ||
*/ | ||
async function uploadRemoteBundle({ maxPayload = constants_1.MAX_PAYLOAD, ...options }) { | ||
async function uploadRemoteBundle(options) { | ||
let uploadedFiles = 0; | ||
@@ -58,23 +62,22 @@ emitter_1.emitter.uploadBundleProgress(0, options.files.length); | ||
const uploadFileChunks = async (bucketFiles) => { | ||
const resp = await (0, http_1.extendBundle)({ | ||
// Note: we specifically create __new__ isolated bundles here to faster files upload | ||
const resp = await (0, http_1.createBundle)({ | ||
...apiParams, | ||
files: bucketFiles.reduce((d, f) => ({ ...d, [f.bundlePath]: (0, lodash_pick_1.default)(f, ['hash', 'content']) }), {}), | ||
files: bucketFiles.reduce((d, f) => { | ||
d[f.bundlePath] = (0, lodash_pick_1.default)(f, ['hash', 'content']); | ||
return d; | ||
}, {}), | ||
}); | ||
// During upload process, we expect the bundleHash not to change (same file map) | ||
if (resp.type !== 'error' && resp.value.bundleHash === apiParams.bundleHash) { | ||
if (resp.type !== 'error') { | ||
uploadedFiles += bucketFiles.length; | ||
emitter_1.emitter.uploadBundleProgress(uploadedFiles, options.files.length); | ||
return true; | ||
} | ||
return false; | ||
}; | ||
const tasks = []; | ||
for (const bucketFiles of (0, files_1.composeFilePayloads)(options.files, maxPayload)) { | ||
tasks.push(bucketFiles); | ||
const files = []; | ||
for (const bucketFiles of (0, files_1.composeFilePayloads)(options.files, constants_1.MAX_PAYLOAD)) { | ||
files.push(bucketFiles); | ||
} | ||
const results = await (0, p_map_1.default)(tasks, async (task) => await uploadFileChunks(task), { | ||
await (0, p_map_1.default)(files, async (task) => await uploadFileChunks(task), { | ||
concurrency: constants_1.UPLOAD_CONCURRENCY, | ||
}); | ||
// Returning false if at least one result is false | ||
return results.every(r => !!r); | ||
} | ||
@@ -90,3 +93,3 @@ exports.uploadRemoteBundle = uploadRemoteBundle; | ||
const missingFiles = await (0, files_1.resolveBundleFiles)(options.baseDir, remoteBundle.missingFiles); | ||
const isUploaded = await uploadRemoteBundle({ | ||
await uploadRemoteBundle({ | ||
...connectionOptions, | ||
@@ -96,5 +99,2 @@ bundleHash: remoteBundle.bundleHash, | ||
}); | ||
if (!isUploaded) { | ||
throw new Error('Failed to upload some files'); | ||
} | ||
const bundleResponse = await (0, http_1.checkBundle)({ ...connectionOptions, bundleHash: remoteBundle.bundleHash }); | ||
@@ -161,3 +161,3 @@ if (bundleResponse.type === 'error') { | ||
const bundleFileCollector = (0, files_1.collectBundleFiles)({ | ||
...(0, lodash_pick_1.default)(options, ['paths', 'symlinksEnabled', 'maxPayload']), | ||
...(0, lodash_pick_1.default)(options, ['paths', 'symlinksEnabled']), | ||
baseDir, | ||
@@ -164,0 +164,0 @@ fileIgnores, |
export declare const MAX_PAYLOAD: number; | ||
export declare const MAX_FILE_SIZE: number; | ||
export declare const defaultBaseURL = "https://api.snyk.deepcode.ai"; | ||
@@ -11,6 +12,6 @@ export declare const HASH_ALGORITHM = "sha256"; | ||
export declare const CACHE_KEY = ".dccache"; | ||
export declare const MAX_UPLOAD_ATTEMPTS = 5; | ||
export declare const UPLOAD_CONCURRENCY = 5; | ||
export declare const MAX_UPLOAD_ATTEMPTS = 10; | ||
export declare const UPLOAD_CONCURRENCY = 2; | ||
export declare const POLLING_INTERVAL = 500; | ||
export declare const MAX_RETRY_ATTEMPTS = 5; | ||
export declare const MAX_RETRY_ATTEMPTS = 10; | ||
export declare const REQUEST_RETRY_DELAY: number; | ||
@@ -17,0 +18,0 @@ export declare const IGNORES_DEFAULT: string[]; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.DEFAULT_ERROR_MESSAGES = exports.ErrorCodes = exports.DCIGNORE_DRAFTS = exports.IGNORE_FILES_NAMES = exports.IGNORES_DEFAULT = exports.REQUEST_RETRY_DELAY = exports.MAX_RETRY_ATTEMPTS = exports.POLLING_INTERVAL = exports.UPLOAD_CONCURRENCY = exports.MAX_UPLOAD_ATTEMPTS = exports.CACHE_KEY = exports.EXCLUDED_NAMES = exports.DOTSNYK_FILENAME = exports.DCIGNORE_FILENAME = exports.GITIGNORE_FILENAME = exports.GIT_FILENAME = exports.ENCODE_TYPE = exports.HASH_ALGORITHM = exports.defaultBaseURL = exports.MAX_PAYLOAD = void 0; | ||
exports.DEFAULT_ERROR_MESSAGES = exports.ErrorCodes = exports.DCIGNORE_DRAFTS = exports.IGNORE_FILES_NAMES = exports.IGNORES_DEFAULT = exports.REQUEST_RETRY_DELAY = exports.MAX_RETRY_ATTEMPTS = exports.POLLING_INTERVAL = exports.UPLOAD_CONCURRENCY = exports.MAX_UPLOAD_ATTEMPTS = exports.CACHE_KEY = exports.EXCLUDED_NAMES = exports.DOTSNYK_FILENAME = exports.DCIGNORE_FILENAME = exports.GITIGNORE_FILENAME = exports.GIT_FILENAME = exports.ENCODE_TYPE = exports.HASH_ALGORITHM = exports.defaultBaseURL = exports.MAX_FILE_SIZE = exports.MAX_PAYLOAD = void 0; | ||
const dcignore_1 = require("@deepcode/dcignore"); | ||
exports.MAX_PAYLOAD = 4 * 1024 * 1024; | ||
exports.MAX_FILE_SIZE = 128 * 1024; | ||
exports.defaultBaseURL = 'https://api.snyk.deepcode.ai'; | ||
@@ -15,7 +16,7 @@ exports.HASH_ALGORITHM = 'sha256'; | ||
exports.CACHE_KEY = '.dccache'; | ||
exports.MAX_UPLOAD_ATTEMPTS = 5; | ||
exports.UPLOAD_CONCURRENCY = 5; | ||
exports.MAX_UPLOAD_ATTEMPTS = 10; | ||
exports.UPLOAD_CONCURRENCY = 2; | ||
exports.POLLING_INTERVAL = 500; | ||
exports.MAX_RETRY_ATTEMPTS = 5; // Request retries on network errors | ||
exports.REQUEST_RETRY_DELAY = 30 * 1000; // 30 seconds delay between retries | ||
exports.MAX_RETRY_ATTEMPTS = 10; // Request retries on network errors | ||
exports.REQUEST_RETRY_DELAY = 5 * 1000; // delay between retries in milliseconds | ||
exports.IGNORES_DEFAULT = [`**/${exports.GIT_FILENAME}/**`]; | ||
@@ -22,0 +23,0 @@ exports.IGNORE_FILES_NAMES = [exports.GITIGNORE_FILENAME, exports.DCIGNORE_FILENAME, exports.DOTSNYK_FILENAME]; |
@@ -11,3 +11,2 @@ import { Cache } from './cache'; | ||
symlinksEnabled?: boolean; | ||
maxPayload?: number; | ||
defaultFileIgnores?: string[]; | ||
@@ -23,4 +22,4 @@ } | ||
* */ | ||
export declare function collectBundleFiles({ maxPayload, symlinksEnabled, baseDir, fileIgnores, paths, supportedFiles, }: CollectBundleFilesOptions): AsyncGenerator<FileInfo>; | ||
export declare function prepareExtendingBundle(baseDir: string, supportedFiles: SupportedFiles, fileIgnores: string[] | undefined, files: string[], maxFileSize?: number, symlinksEnabled?: boolean): Promise<{ | ||
export declare function collectBundleFiles({ symlinksEnabled, baseDir, fileIgnores, paths, supportedFiles, }: CollectBundleFilesOptions): AsyncGenerator<FileInfo>; | ||
export declare function prepareExtendingBundle(baseDir: string, supportedFiles: SupportedFiles, fileIgnores: string[] | undefined, files: string[], symlinksEnabled?: boolean): Promise<{ | ||
files: FileInfo[]; | ||
@@ -27,0 +26,0 @@ removedFiles: string[]; |
@@ -114,3 +114,2 @@ "use strict"; | ||
function parseFileIgnores(path) { | ||
var _a, _b; | ||
let rules = []; | ||
@@ -123,11 +122,12 @@ const dirname = nodePath.dirname(path); | ||
const parsed = (0, yaml_1.parse)(f); | ||
const concatIgnorePath = (p) => `${nodePath.dirname(path)}/${p}`; | ||
const codeIgnoredPaths = ((_a = parsed.exclude.code) === null || _a === void 0 ? void 0 : _a.map(concatIgnorePath)) || []; | ||
const globalIgnoredPaths = ((_b = parsed.exclude.global) === null || _b === void 0 ? void 0 : _b.map(concatIgnorePath)) || []; | ||
return [...codeIgnoredPaths, ...globalIgnoredPaths]; | ||
const codeIgnoredPaths = parsed.exclude.code || []; | ||
const globalIgnoredPaths = parsed.exclude.global || []; | ||
rules = [...codeIgnoredPaths, ...globalIgnoredPaths]; | ||
} | ||
rules = f | ||
.split('\n') | ||
.map(l => l.trim()) | ||
.filter(l => !!l && !l.startsWith('#')); | ||
else { | ||
rules = f | ||
.split('\n') | ||
.map(l => l.trim()) | ||
.filter(l => !!l && !l.startsWith('#')); | ||
} | ||
} | ||
@@ -221,3 +221,3 @@ catch (err) { | ||
* */ | ||
async function* collectBundleFiles({ maxPayload = constants_1.MAX_PAYLOAD, symlinksEnabled = false, baseDir, fileIgnores, paths, supportedFiles, }) { | ||
async function* collectBundleFiles({ symlinksEnabled = false, baseDir, fileIgnores, paths, supportedFiles, }) { | ||
const cache = new cache_1.Cache(constants_1.CACHE_KEY, baseDir); | ||
@@ -233,3 +233,3 @@ const files = []; | ||
continue; | ||
if (fileStats.isFile() && fileStats.size <= maxPayload) { | ||
if (fileStats.isFile() && fileStats.size <= constants_1.MAX_FILE_SIZE) { | ||
files.push(path); | ||
@@ -249,3 +249,3 @@ } | ||
// dc ignore AttrAccessOnNull: false positive, there is a precondition with && | ||
if (fileInfo && fileInfo.size <= maxPayload) { | ||
if (fileInfo && fileInfo.size <= constants_1.MAX_FILE_SIZE) { | ||
yield fileInfo; | ||
@@ -261,3 +261,3 @@ } | ||
// dc ignore AttrAccessOnNull: false positive, there is a precondition with && | ||
if (fileInfo && fileInfo.size <= maxPayload) { | ||
if (fileInfo && fileInfo.size <= constants_1.MAX_FILE_SIZE) { | ||
yield fileInfo; | ||
@@ -270,3 +270,3 @@ } | ||
exports.collectBundleFiles = collectBundleFiles; | ||
async function prepareExtendingBundle(baseDir, supportedFiles, fileIgnores = constants_1.IGNORES_DEFAULT, files, maxFileSize = constants_1.MAX_PAYLOAD, symlinksEnabled = false) { | ||
async function prepareExtendingBundle(baseDir, supportedFiles, fileIgnores = constants_1.IGNORES_DEFAULT, files, symlinksEnabled = false) { | ||
let removedFiles = []; | ||
@@ -290,3 +290,3 @@ let bundleFiles = []; | ||
foundFiles = entries.reduce((s, e) => { | ||
if (e.stats && e.stats.size <= maxFileSize) { | ||
if (e.stats && e.stats.size <= constants_1.MAX_FILE_SIZE) { | ||
s.add(e.path); | ||
@@ -293,0 +293,0 @@ } |
@@ -83,3 +83,3 @@ { | ||
}, | ||
"version": "4.3.0" | ||
"version": "4.3.1" | ||
} |
@@ -95,3 +95,20 @@ # code-client | ||
analysisOptions: { | ||
severity: 1 | ||
}, | ||
fileOptions: { | ||
paths: ['/home/user/repo'], | ||
symlinksEnabled: false, | ||
}, | ||
}); | ||
``` | ||
### Run analysis only for specific file, the one just changed for example | ||
```javascript | ||
const results = await codeClient.analyzeFolders({ | ||
connection: { baseURL, sessionToken, source }, | ||
analysisOptions: { | ||
severity: 1, | ||
limitToFiles: ['recently-changed-file.js'] | ||
}, | ||
@@ -98,0 +115,0 @@ fileOptions: { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
142
130090
1822