New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

project-health-tool

Package Overview
Dependencies
Maintainers
2
Versions
18
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

project-health-tool - npm Package Compare versions

Comparing version 0.1.2 to 0.1.3

dist/ui/assets/index.7ab971cd.css

26

dist/server/analyse-repos/aggregate-builds-by-repo.js

@@ -11,3 +11,3 @@ "use strict";

const ratings_1 = require("./ratings");
const buildId = (build) => { var _a, _b; return (_b = (_a = build.repository) === null || _a === void 0 ? void 0 : _a.id) !== null && _b !== void 0 ? _b : '<unknown>'; };
const repoId = (build) => { var _a, _b; return (_b = (_a = build.repository) === null || _a === void 0 ? void 0 : _a.id) !== null && _b !== void 0 ? _b : '<unknown>'; };
const defaultBuildStats = { count: 0, success: 0, duration: [] };

@@ -50,13 +50,19 @@ const [timeRange, averageTime] = utils_1.statsStrings('-', utils_1.minutes);

exports.default = (builds) => {
const buildStats = builds
const { buildsById, buildStats } = builds
.reduce((acc, build) => ({
...acc,
[buildId(build)]: combineStats({
count: 1,
success: build.result === BuildInterfaces_1.BuildResult.Succeeded ? 1 : 0,
duration: [(new Date(build.finishTime)).getTime() - (new Date(build.startTime).getTime())]
}, acc[buildId(build)])
}), {});
buildsById: {
...acc.buildsById,
[build.id]: build
},
buildStats: {
...acc.buildStats,
[repoId(build)]: combineStats({
count: 1,
success: build.result === BuildInterfaces_1.BuildResult.Succeeded ? 1 : 0,
duration: [(new Date(build.finishTime)).getTime() - (new Date(build.startTime).getTime())]
}, acc.buildStats[repoId(build)])
}
}), { buildsById: {}, buildStats: {} });
return {
buildByBuildId: (id) => builds.find(b => b.id === id),
buildByBuildId: (id) => (id ? buildsById[id] : undefined),
buildByRepoId: (id) => {

@@ -63,0 +69,0 @@ if (!id)

@@ -74,3 +74,3 @@ "use strict";

name: 'Code quality',
count: 0,
count: 'Unknown',
rating: 0,

@@ -98,5 +98,6 @@ indicators: metrics.map(metric => ({

};
const toTitleCase = (str) => str.replace(/\w\S*/g, txt => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
exports.default = (measures) => {
var _a, _b;
if (!measures)
if (!measures.length)
return { codeQuality: unknownCodeQuality };

@@ -107,3 +108,3 @@ return {

name: 'Code quality',
count: Number(((_b = measures.find(m => m.metric === 'code_smells')) === null || _b === void 0 ? void 0 : _b.value) || 0),
count: toTitleCase(((_b = measures.find(m => m.metric === 'alert_status')) === null || _b === void 0 ? void 0 : _b.value) || 'Unknown'),
indicators: metrics.map(metric => {

@@ -110,0 +111,0 @@ var _a;

@@ -14,3 +14,3 @@ "use strict";

name: 'Tests',
count: stat.coverage === 0 ? 0 : Math.round(stat.coverage),
count: stat.success + stat.failure,
indicators: [

@@ -17,0 +17,0 @@ {

@@ -20,4 +20,4 @@ "use strict";

return ratings_1.withOverallRating({
name: 'PR',
count: activePrCount,
name: 'Pull requests',
count: activePrCount + completedPrCount,
indicators: [

@@ -24,0 +24,0 @@ {

@@ -44,9 +44,10 @@ "use strict";

const topLevelIndicator = (releaseStats) => {
var _a, _b, _c;
var _a;
return ratings_1.withOverallRating({
name: 'Releases',
count: (((_a = releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.sit) === null || _a === void 0 ? void 0 : _a.count) || 0) + (((_b = releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.pp) === null || _b === void 0 ? void 0 : _b.count) || 0) + (((_c = releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.prod) === null || _c === void 0 ? void 0 : _c.count) || 0),
count: (((_a = releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.prod) === null || _a === void 0 ? void 0 : _a.count) || 0),
indicators: [
...indicatorsForEnvironment('EAT', releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.eat),
...indicatorsForEnvironment('SIT', releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.sit),
...indicatorsForEnvironment('PP', releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.pp),
...indicatorsForEnvironment('REPLICA', releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.replica),
...indicatorsForEnvironment('PROD', releaseStats === null || releaseStats === void 0 ? void 0 : releaseStats.prod)

@@ -57,8 +58,10 @@ ]

const environmentName = (environment) => {
var _a, _b, _c;
var _a, _b, _c, _d;
if (((_a = environment.name) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'prod')
return 'prod';
if (['pp', 'replica', 'preprod', 'pre-prod'].includes(((_b = environment.name) !== null && _b !== void 0 ? _b : '').toLowerCase()))
return 'pp';
if (((_c = environment.name) === null || _c === void 0 ? void 0 : _c.toLowerCase()) === 'sit')
if (((_b = environment.name) === null || _b === void 0 ? void 0 : _b.toLowerCase()) === 'eat')
return 'eat';
if (['pp', 'replica', 'preprod', 'pre-prod'].includes(((_c = environment.name) !== null && _c !== void 0 ? _c : '').toLowerCase()))
return 'replica';
if (((_d = environment.name) === null || _d === void 0 ? void 0 : _d.toLowerCase()) === 'sit')
return 'sit';

@@ -65,0 +68,0 @@ return 'other';

@@ -51,35 +51,37 @@ "use strict";

});
exports.default = async (config, collectionName, projectName) => {
exports.default = (config) => {
const { getRepositories, getBuilds, getBranches, getPRs, getTestRuns, getTestCoverage, getReleases } = azure_1.default(config);
const initialiseSonar = sonar_1.default(config);
const [repos, { buildByRepoId, buildByBuildId }, testRuns, releaseByRepoId, codeQualityByRepoName] = await Promise.all([
getRepositories(collectionName, projectName),
getBuilds(collectionName, projectName).then(aggregate_builds_by_repo_1.default),
getTestRuns(collectionName, projectName),
getReleases(collectionName, projectName).then(aggregate_releases_1.default),
initialiseSonar(projectName)
]);
const getCoverageByRepoId = aggregate_coverage_by_repo_1.default(testRuns, buildByBuildId, (buildId) => getTestCoverage(collectionName, projectName, buildId));
return Promise.all(repos.map(async (r) => {
const [branches, prs, coverage, { languages, codeQuality }] = await Promise.all([
getBranches(collectionName, r.id).then(aggregate_branches_1.default),
getPRs(collectionName, r.id).then(aggregate_prs_1.default),
getCoverageByRepoId(r.id),
codeQualityByRepoName(r.name).then(aggregate_code_quality_1.default)
// getCommits(collectionName, r.id!)
return async (collectionName, projectName) => {
const [repos, { buildByRepoId, buildByBuildId }, testRuns, releaseByRepoId, codeQualityByRepoName] = await Promise.all([
getRepositories(collectionName, projectName),
getBuilds(collectionName, projectName).then(aggregate_builds_by_repo_1.default),
getTestRuns(collectionName, projectName),
getReleases(collectionName, projectName).then(aggregate_releases_1.default),
initialiseSonar(projectName)
]);
return withOverallRating({
name: r.name,
id: r.id,
languages,
indicators: [
buildByRepoId(r.id),
branches,
prs,
coverage,
releaseByRepoId(r.id),
codeQuality
]
});
}));
const getCoverageByRepoId = aggregate_coverage_by_repo_1.default(testRuns, buildByBuildId, (buildId) => getTestCoverage(collectionName, projectName, buildId));
return Promise.all(repos.map(async (r) => {
const [branches, prs, coverage, { languages, codeQuality }] = await Promise.all([
getBranches(collectionName, r.id).then(aggregate_branches_1.default),
getPRs(collectionName, r.id).then(aggregate_prs_1.default),
getCoverageByRepoId(r.id),
codeQualityByRepoName(r.name).then(aggregate_code_quality_1.default)
// getCommits(collectionName, r.id!)
]);
return withOverallRating({
name: r.name,
id: r.id,
languages,
indicators: [
buildByRepoId(r.id),
branches,
prs,
coverage,
releaseByRepoId(r.id),
codeQuality
]
});
}));
};
};

@@ -22,2 +22,3 @@ "use strict";

await createDataFolder;
const analyseRepos = analyse_repos_1.default(config);
const overallResults = await Promise.all(config.projects

@@ -27,3 +28,3 @@ .map(async (projectSpec) => {

console.log('Starting analysis for', projectSpec.join('/'));
const repos = await analyse_repos_1.default(config, ...projectSpec);
const repos = await analyseRepos(...projectSpec);
const now = utils_1.shortDateFormat(new Date());

@@ -30,0 +31,0 @@ console.log(`Took ${Date.now() - start}ms to analyse`, projectSpec.join('/'));

@@ -8,33 +8,45 @@ "use strict";

const qs_1 = __importDefault(require("qs"));
const ramda_1 = require("ramda");
const aggregate_code_quality_1 = require("./analyse-repos/aggregate-code-quality");
const using_disk_cache_1 = __importDefault(require("./using-disk-cache"));
const utils_1 = require("./utils");
const sortByLastAnalysedDate = (a, b) => (new Date(b.lastAnalysisDate).getTime() - new Date(a.lastAnalysisDate).getTime());
const getCurrentRepo = (repoName) => ramda_1.pipe(utils_1.filter(repo => repo.name === repoName && Boolean(repo.lastAnalysisDate)), ramda_1.sort(sortByLastAnalysedDate), utils_1.getFirst);
const reposAtSonarServer = (pageIndex = 1) => async (sonarServer) => {
const { url, token } = sonarServer;
const sonarProjectsResponse = await node_fetch_1.default(`${url}/api/projects/search?p=${pageIndex}&ps=500`, {
method: 'GET',
headers: {
Authorization: `Basic ${Buffer.from(`${token}:`).toString('base64')}`
}
});
const responseText = await sonarProjectsResponse.text();
try {
const parsed = JSON.parse(responseText);
return [
...parsed.components.map(component => ({ ...component, url })),
...(parsed.paging.pageSize === parsed.components.length ? await reposAtSonarServer(parsed.paging.pageIndex + 1)(sonarServer) : [])
];
}
catch (e) {
console.error({ sonarServer, responseText, status: sonarProjectsResponse.status });
throw e;
}
};
exports.default = (config) => {
const withDiskCache = using_disk_cache_1.default(config);
return async (project) => {
const projectConfig = config.sonar[project];
if (!projectConfig)
return async () => undefined;
const sonarRepos = await withDiskCache(['sonar', project], async () => {
const sonarProjectsResponse = await node_fetch_1.default(`${projectConfig.url}/api/projects/search`, {
method: 'GET',
headers: {
Authorization: `Basic ${projectConfig.token}`
}
});
return (await sonarProjectsResponse.json()).components;
const sonarRepos = withDiskCache(['sonar'], () => Promise.all(config.sonar.map(reposAtSonarServer())).then(list => list.flat()));
return async (project) => async (repoName) => {
const currentSonarRepo = getCurrentRepo(repoName)(await sonarRepos);
if (!currentSonarRepo)
return [];
return withDiskCache(['sonar', project, repoName], async () => {
var _a;
const response = await node_fetch_1.default(`${currentSonarRepo.url}/api/measures/component?${qs_1.default.stringify({
component: currentSonarRepo.key,
metricKeys: aggregate_code_quality_1.requiredMetrics.join(',')
})}`);
return ((_a = (await response.json()).component) === null || _a === void 0 ? void 0 : _a.measures) || [];
});
return async (repoName) => {
const currentSonarRepo = sonarRepos.find(({ name }) => name === repoName);
if (!currentSonarRepo)
return undefined;
const codeQuality = await withDiskCache(['sonar', project, repoName], async () => {
const response = await node_fetch_1.default(`${projectConfig.url}/api/measures/component?${qs_1.default.stringify({
component: currentSonarRepo.key,
metricKeys: aggregate_code_quality_1.requiredMetrics.join(',')
})}`);
return (await response.json()).component;
});
return codeQuality.measures;
};
};
};

@@ -13,2 +13,3 @@ "use strict";

const logNetworkIO = debug_1.default('network-io');
const logRecoveredError = debug_1.default('recovered-error');
const cachePath = path_1.join(process.cwd(), 'cache');

@@ -42,20 +43,29 @@ const createCachePath = fs_1.promises.mkdir(cachePath, { recursive: true });

};
exports.default = (config) => async (pathParts, fn) => {
await createCachePath;
const fileName = path_1.join(cachePath, `${pathParts.join('-')}.json`);
const canUseCache = ramda_1.and(...await Promise.all([
isCacheValid(config), fileExists(fileName)
]));
if (canUseCache) {
logDiskIO(fileName);
const contents = await fs_1.promises.readFile(fileName, 'utf8');
return JSON.parse(contents, parseDate);
}
logNetworkIO(pathParts.join(' '));
const result = await fn();
await Promise.all([
fs_1.promises.writeFile(fileName, JSON.stringify(result), 'utf8'),
fs_1.promises.writeFile(path_1.join(cachePath, 'last-fetch-date.txt'), new Date().toISOString(), 'utf8')
]);
return result;
exports.default = (config) => {
const isCacheValidPromise = isCacheValid(config);
return async (pathParts, fn) => {
await createCachePath;
const fileName = path_1.join(cachePath, `${pathParts.join('-')}.json`);
const canUseCache = ramda_1.and(...await Promise.all([
isCacheValidPromise, fileExists(fileName)
]));
if (canUseCache) {
logDiskIO(fileName);
const contents = await fs_1.promises.readFile(fileName, 'utf8');
try {
return JSON.parse(contents, parseDate);
}
catch (e) {
logRecoveredError(`Error parsing ${fileName}. Deleting and going to the network instead.`);
await fs_1.promises.unlink(fileName);
}
}
logNetworkIO(pathParts.join(' '));
const result = await fn();
await Promise.all([
fs_1.promises.writeFile(fileName, JSON.stringify(result), 'utf8'),
fs_1.promises.writeFile(path_1.join(cachePath, 'last-fetch-date.txt'), new Date().toISOString(), 'utf8')
]);
return result;
};
};

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.doesFileExist = exports.divideBy = exports.assertDefined = exports.isMaster = exports.shortDateFormat = exports.hours = exports.minutes = exports.statsStrings = exports.isWithinFortnight = exports.pastDate = void 0;
exports.doesFileExist = exports.filter = exports.getFirst = exports.divideBy = exports.assertDefined = exports.isMaster = exports.shortDateFormat = exports.hours = exports.minutes = exports.statsStrings = exports.isWithinFortnight = exports.pastDate = void 0;
const ms_1 = __importDefault(require("ms"));

@@ -68,2 +68,6 @@ const fs_1 = require("fs");

exports.divideBy = divideBy;
const getFirst = (list) => list[0];
exports.getFirst = getFirst;
const filter = (fn) => (xs) => xs.filter(fn);
exports.filter = filter;
const doesFileExist = async (filePath) => {

@@ -70,0 +74,0 @@ try {

{
"name": "project-health-tool",
"version": "0.1.2",
"version": "0.1.3",
"scripts": {

@@ -18,3 +18,4 @@ "watch:tailwind": "tailwind -i ui/tailwind.css -o ui/tailwind.output.css --watch",

"test": "jest server --testPathIgnorePatterns=./dist --coverage",
"scrape": "ts-node --project server/tsconfig.json ./server/dev-scrape.ts",
"scrape": "node -r ts-node/register ./server/dev-scrape.ts",
"scrape:profiler": "node -r ts-node/register --prof ./server/dev-scrape.ts",
"tsc": "tsc --build server ui",

@@ -21,0 +22,0 @@ "lint": "eslint \"./**/*.ts\" --cache",

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc