Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

fabric8-analytics-lsp-server

Package Overview
Dependencies
Maintainers
3
Versions
103
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

fabric8-analytics-lsp-server - npm Package Compare versions

Comparing version 0.2.5 to 0.2.6

73

collector.js

@@ -6,11 +6,2 @@ /* --------------------------------------------------------------------------------------------

'use strict';
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -43,20 +34,18 @@ exports.ReqDependencyCollector = exports.PomXmlDependencyCollector = exports.DependencyCollector = void 0;

}
collect(contents) {
return __awaiter(this, void 0, void 0, function* () {
const file = utils_1.stream_from_string(contents);
let parser = new json_1.StreamingParser(file);
let dependencies = [];
let tree = yield parser.parse();
let top_level = tree.children[0];
/* Iterate over all keys, select those in which we're interested as defined
by `classes`, and map each item to a new `Dependency` object */
for (const p of top_level.properties) {
if (this.classes.indexOf(p.key) > -1) {
for (const dependency of p.value.object) {
dependencies.push(new Dependency(dependency));
}
async collect(contents) {
const file = utils_1.stream_from_string(contents);
let parser = new json_1.StreamingParser(file);
let dependencies = [];
let tree = await parser.parse();
let top_level = tree.children[0];
/* Iterate over all keys, select those in which we're interested as defined
by `classes`, and map each item to a new `Dependency` object */
for (const p of top_level.properties) {
if (this.classes.indexOf(p.key) > -1) {
for (const dependency of p.value.object) {
dependencies.push(new Dependency(dependency));
}
}
return dependencies;
});
}
return dependencies;
}

@@ -99,7 +88,5 @@ }

}
collect(contents) {
return __awaiter(this, void 0, void 0, function* () {
let parser = new NaivePyParser(contents);
return parser.parse();
});
async collect(contents) {
let parser = new NaivePyParser(contents);
return parser.parse();
}

@@ -159,8 +146,6 @@ }

}
parse() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => {
this.stream.pipe(this.parser.saxStream).on('end', (data) => {
resolve(this.dependencies);
});
async parse() {
return new Promise(resolve => {
this.stream.pipe(this.parser.saxStream).on('end', (data) => {
resolve(this.dependencies);
});

@@ -174,12 +159,10 @@ });

}
collect(contents) {
return __awaiter(this, void 0, void 0, function* () {
const file = utils_1.stream_from_string(contents);
let parser = new NaivePomXmlSaxParser(file);
let dependencies;
yield parser.parse().then(data => {
dependencies = data;
});
return dependencies || [];
async collect(contents) {
const file = utils_1.stream_from_string(contents);
let parser = new NaivePomXmlSaxParser(file);
let dependencies;
await parser.parse().then(data => {
dependencies = data;
});
return dependencies || [];
}

@@ -186,0 +169,0 @@ }

@@ -112,3 +112,3 @@ /* --------------------------------------------------------------------------------------------

this.context = context;
this.binding = { path: ['component_analyses', 'vulnerability'] };
this.binding = { path: ['vulnerability'] };
/* recommendation to use a different version */

@@ -115,0 +115,0 @@ this.changeToBinding = { path: ['recommended_versions'] };

@@ -6,11 +6,2 @@ /* --------------------------------------------------------------------------------------------

'use strict';
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -112,37 +103,35 @@ exports.ValueType = exports.Variant = exports.KeyValueEntry = exports.StreamingParser = void 0;

}
parse() {
return __awaiter(this, void 0, void 0, function* () {
let scope = new Scope(), parser = new Parser(), stream = new Streamer(), emitter = new Emitter(), packer = new Packer({ packKeys: true, packStrings: true, packNumbers: true });
let root = scope;
/* In the following code we observe two event streams, one defined by parser
and the other one by emitter. The difference here is that parser produces raw tokens
with positional information as to where in the file the token is declared, but since
this stream is very low level and contains tokens like ", [, ] etc. we need to correlate
events from this stream with the events produced by the emitter stream which gives
us much finer granularity in handling the underlying JSON structure.
The correlation of the events itself is handled by the `Scope` which in essence
implements a finite state machine to make sense of the two event streams. */
parser.on("data", function (x) {
if (scope.marker != TokenMarker.Invalid) {
scope.consume(x);
}
});
parser.on("error", function (e) {
// the JSON document doesn't have to be well-formed, that's fine
});
emitter.on("startKey", function ( /*e*/) { scope.marker = TokenMarker.Key; });
/* We don't care about numbers, nulls, arrays and booleans thus far */
emitter.on("startString", function () { scope.marker = TokenMarker.Value; });
emitter.on("startObject", function () { scope = scope.add_scope(); });
emitter.on("endObject", function () { scope = scope.leave(); });
this.file
.pipe(parser)
.pipe(stream)
.pipe(packer)
.pipe(emitter);
return new Promise(resolve => {
emitter.on("finish", () => resolve(root));
});
async parse() {
let scope = new Scope(), parser = new Parser(), stream = new Streamer(), emitter = new Emitter(), packer = new Packer({ packKeys: true, packStrings: true, packNumbers: true });
let root = scope;
/* In the following code we observe two event streams, one defined by parser
and the other one by emitter. The difference here is that parser produces raw tokens
with positional information as to where in the file the token is declared, but since
this stream is very low level and contains tokens like ", [, ] etc. we need to correlate
events from this stream with the events produced by the emitter stream which gives
us much finer granularity in handling the underlying JSON structure.
The correlation of the events itself is handled by the `Scope` which in essence
implements a finite state machine to make sense of the two event streams. */
parser.on("data", function (x) {
if (scope.marker != TokenMarker.Invalid) {
scope.consume(x);
}
});
parser.on("error", function (e) {
// the JSON document doesn't have to be well-formed, that's fine
});
emitter.on("startKey", function ( /*e*/) { scope.marker = TokenMarker.Key; });
/* We don't care about numbers, nulls, arrays and booleans thus far */
emitter.on("startString", function () { scope.marker = TokenMarker.Value; });
emitter.on("startObject", function () { scope = scope.add_scope(); });
emitter.on("endObject", function () { scope = scope.leave(); });
this.file
.pipe(parser)
.pipe(stream)
.pipe(packer)
.pipe(emitter);
return new Promise(resolve => {
emitter.on("finish", () => resolve(root));
});
}

@@ -149,0 +138,0 @@ }

{
"name": "fabric8-analytics-lsp-server",
"description": "LSP Server for Dependency Analytics",
"version": "0.2.5",
"version": "0.2.6",
"author": "Pavel Odvody",

@@ -33,7 +33,7 @@ "contributors": [

"dependencies": {
"request": "^2.79.0",
"node-fetch": "^2.6.0",
"stream-json": "0.6.1",
"vscode-languageserver": "^5.3.0-next.9",
"winston": "3.2.1",
"xml2object": "0.1.2",
"vscode-languageserver": "^5.3.0-next.9"
"xml2object": "0.1.2"
},

@@ -40,0 +40,0 @@ "devDependencies": {

@@ -14,3 +14,3 @@ /* --------------------------------------------------------------------------------------------

const https = require('https');
const request = require('request');
const fetch = require('node-fetch');
const winston = require('winston');

@@ -105,26 +105,2 @@ let transport;

;
;
class Aggregator {
constructor(items, callback) {
this.callback = callback;
this.mapping = new Map();
for (let item of items) {
this.mapping.set(item, false);
}
}
is_ready() {
let val = true;
for (let m of this.mapping.entries()) {
val = val && m[1];
}
return val;
}
aggregate(dep) {
this.mapping.set(dep, true);
if (this.is_ready()) {
this.callback();
}
}
}
;
class AnalysisConfig {

@@ -139,2 +115,3 @@ constructor() {

this.home_dir = process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME'];
this.uuid = process.env.UUID || "";
}

@@ -154,2 +131,3 @@ }

let DiagnosticsEngines = [consumers_1.SecurityEngine];
/* Generate summarized notification message for vulnerability analysis */
const getCAmsg = (deps, diagnostics, totalCount) => {

@@ -172,47 +150,33 @@ let msg = `Scanned ${deps.length} ${deps.length == 1 ? 'dependency' : 'dependencies'}, `;

const caDefaultMsg = 'Checking for security vulnerabilities ...';
const metadataCache = new Map();
const get_metadata = (ecosystem, name, version) => {
return new Promise((resolve, reject) => {
const cacheKey = ecosystem + " " + name + " " + version;
const metadata = metadataCache[cacheKey];
if (metadata != null) {
logger.info('cache hit for ' + cacheKey);
connection.console.log('cache hit for ' + cacheKey);
resolve(metadata);
/* Fetch Vulnerabilities by component-analysis batch api-call */
const fetchVulnerabilities = async (reqData) => {
let url = config.server_url;
if (config.three_scale_user_token) {
url += `/component-analyses/?user_key=${config.three_scale_user_token}`;
}
else {
url += `/component-analyses`;
}
const headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + config.api_token,
'uuid': config.uuid,
};
try {
const response = await fetch(url, {
method: 'post',
body: JSON.stringify(reqData),
headers: headers,
});
if (response.ok) {
const respData = await response.json();
return respData;
}
else {
const part = [ecosystem, name, version].map(v => encodeURIComponent(v)).join('/');
const options = {};
options['url'] = config.server_url;
if (config.three_scale_user_token) {
options['url'] += `/component-analyses/${part}?user_key=${config.three_scale_user_token}`;
}
else {
options['url'] += `/component-analyses/${part}/`;
}
options['headers'] = {
'Authorization': 'Bearer ' + config.api_token,
};
logger.debug('get ' + options['url']);
connection.console.log('Scanning ' + part);
if (process.env.RECOMMENDER_API_URL) {
request.get(options, (err, httpResponse, body) => {
if (err) {
reject(err);
}
else {
if ((httpResponse.statusCode === 200 || httpResponse.statusCode === 202)) {
let response = JSON.parse(body);
logger.debug('response ' + response);
metadataCache[cacheKey] = response;
resolve(response);
}
else {
reject(httpResponse.statusCode);
}
}
});
}
return response.status;
}
});
}
catch (err) {
alert(err);
}
};

@@ -227,36 +191,43 @@ /* Total Counts of #Known Security Vulnerability and #Security Advisory */

;
/* Runs DiagnosticPileline to consume response and generate Diagnostic[] */
function runPipeline(response, diagnostics, diagnosticFilePath, dependencyMap, totalCount) {
response.forEach(r => {
const dependency = dependencyMap.get(r.package + r.version);
let pipeline = new consumers_1.DiagnosticsPipeline(DiagnosticsEngines, dependency, config, diagnostics, diagnosticFilePath);
pipeline.run(r);
for (const item of pipeline.items) {
const secEng = item;
totalCount.vulnerabilityCount += secEng.vulnerabilityCount;
totalCount.advisoryCount += secEng.advisoryCount;
}
connection.sendDiagnostics({ uri: diagnosticFilePath, diagnostics: diagnostics });
});
}
/* Slice payload in each chunk size of @batchSize */
function slicePayload(payload, batchSize, ecosystem) {
let reqData = [];
for (let i = 0; i < payload.length; i += batchSize) {
reqData.push({
"ecosystem": ecosystem,
"package_versions": payload.slice(i, i + batchSize)
});
}
return reqData;
}
const regexVersion = new RegExp(/^([a-zA-Z0-9]+\.)?([a-zA-Z0-9]+\.)?([a-zA-Z0-9]+\.)?([a-zA-Z0-9]+)$/);
const sendDiagnostics = (ecosystem, uri, contents, collector) => {
const sendDiagnostics = async (ecosystem, diagnosticFilePath, contents, collector) => {
connection.sendNotification('caNotification', { 'data': caDefaultMsg });
collector.collect(contents).then((deps) => {
let diagnostics = [];
/* Aggregate asynchronous requests and send the diagnostics at once */
let aggregator = new Aggregator(deps, () => {
connection.sendNotification('caNotification', { 'data': getCAmsg(deps, diagnostics, totalCount), 'diagCount': diagnostics.length > 0 ? diagnostics.length : 0 });
connection.sendDiagnostics({ uri: uri, diagnostics: diagnostics });
});
let totalCount = new TotalCount();
for (let dependency of deps) {
if (dependency.name.value && dependency.version.value && regexVersion.test(dependency.version.value.trim())) {
get_metadata(ecosystem, dependency.name.value, dependency.version.value).then((response) => {
if (response != null) {
let pipeline = new consumers_1.DiagnosticsPipeline(DiagnosticsEngines, dependency, config, diagnostics, uri);
pipeline.run(response);
for (const item of pipeline.items) {
let secEng = item;
totalCount.vulnerabilityCount += secEng.vulnerabilityCount;
totalCount.advisoryCount += secEng.advisoryCount;
}
}
aggregator.aggregate(dependency);
}).catch((err) => {
aggregator.aggregate(dependency);
connection.console.log(`Error ${err} while ${dependency.name.value}:${dependency.version.value}`);
});
}
else {
aggregator.aggregate(dependency);
}
}
});
const deps = await collector.collect(contents);
const validPackages = deps.filter(d => regexVersion.test(d.version.value.trim()));
const requestPayload = validPackages.map(d => ({ "package": d.name.value, "version": d.version.value }));
const requestMapper = new Map(validPackages.map(d => [d.name.value + d.version.value, d]));
const batchSize = 10;
let diagnostics = [];
let totalCount = new TotalCount();
const start = new Date().getTime();
const allRequests = slicePayload(requestPayload, batchSize, ecosystem).map(request => fetchVulnerabilities(request).then(response => runPipeline(response, diagnostics, diagnosticFilePath, requestMapper, totalCount)));
await Promise.allSettled(allRequests);
const end = new Date().getTime();
console.log("Time taken to fetch vulnerabilities: " + ((end - start) / 1000).toFixed(1) + " sec.");
connection.sendNotification('caNotification', { 'data': getCAmsg(deps, diagnostics, totalCount), 'diagCount': diagnostics.length > 0 ? diagnostics.length : 0 });
};

@@ -263,0 +234,0 @@ files.on(EventStream.Diagnostics, "^package\\.json$", (uri, name, contents) => {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc