Socket
Socket
Sign inDemoInstall

@semantic-release/github

Package Overview
Dependencies
Maintainers
4
Versions
169
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@semantic-release/github - npm Package Compare versions

Comparing version 4.1.3 to 4.2.1

4

lib/fail.js

@@ -15,5 +15,5 @@ const {template} = require('lodash');

const {name: repo, owner} = parseGithubUrl(repositoryUrl);
const github = getClient(githubToken, githubUrl, githubApiPathPrefix);
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});
const body = failComment ? template(failComment)({branch, errors}) : getFailComment(branch, errors);
const srIssue = (await findSRIssues(github, failTitle, owner, repo))[0];
const [srIssue] = await findSRIssues(github, failTitle, owner, repo);

@@ -20,0 +20,0 @@ if (srIssue) {

const url = require('url');
const GitHubApi = require('@octokit/rest');
const {memoize} = require('lodash');
const Octokit = require('@octokit/rest');
const pRetry = require('p-retry');
const Bottleneck = require('bottleneck');
module.exports = (githubToken, githubUrl, githubApiPathPrefix) => {
/**
* Default exponential backoff configuration for retries.
*/
const DEFAULT_RETRY = {retries: 3, factor: 2, minTimeout: 1000};
/**
* Rate limit per API endpoints.
*
* See {@link https://developer.github.com/v3/search/#rate-limit|Search API rate limit}.
* See {@link https://developer.github.com/v3/#rate-limiting|Rate limiting}.
*/
const RATE_LIMITS = {
search: 60 * 1000 / 30, // 30 calls per minutes => 1 call per 2s
core: 60 * 60 * 1000 / 5000, // 5000 calls per hour => 1 call per 720ms
};
/**
* Global rate limit to prevent abuse.
*
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
*/
const GLOBAL_RATE_LIMIT = 1000;
/**
* Http error codes for which to not retry.
*/
const SKIP_RETRY_CODES = [400, 401, 403];
/**
* Create or retrieve the throttler function for a given rate limit group.
*
* @param {Array} rate The rate limit group.
* @param {String} limit The rate limits per API endpoints.
* @param {Bottleneck} globalThrottler The global throttler.
* @return {Bottleneck} The throller function for the given rate limit group.
*/
const getThrottler = memoize((rate, limit, globalThrottler) =>
new Bottleneck({minTime: limit[rate]}).chain(globalThrottler)
);
/**
* Create a`handler` for a `Proxy` wrapping an Octokit instance to:
* - Recursively wrap the child objects of the Octokit instance in a `Proxy`
* - Throttle and retry the Octokit instance functions
*
* @param {Object} retry The configuration to pass to `p-retry`.
* @param {Array} limit The rate limits per API endpoints.
* @param {String} endpoint The API endpoint to handle.
* @param {Throttler} globalThrottler The throller function for the global rate limit.
* @return {Function} The `handler` for a `Proxy` wrapping an Octokit instance.
*/
const handler = (retry, limit, globalThrottler, endpoint) => ({
/**
* If the target has the property as own, determine the rate limit based on the property name and recursively wrap the value in a `Proxy`. Otherwise returns the property value.
*
* @param {Object} target The target object.
* @param {String} name The name of the property to get.
* @param {Any} receiver The `Proxy` object.
* @return {Any} The property value or a `Proxy` of the property value.
*/
get: (target, name, receiver) =>
Object.prototype.hasOwnProperty.call(target, name)
? new Proxy(target[name], handler(retry, limit, globalThrottler, endpoint || name))
: Reflect.get(target, name, receiver),
/**
* Create a throlled version of the called function tehn call it and retry it if the call fails with certain error code.
*
* @param {Function} func The target function.
* @param {Any} that The this argument for the call.
* @param {Array} args The list of arguments for the call.
* @return {Promise<Any>} The result of the function called.
*/
apply: (func, that, args) => {
const throttler = getThrottler(limit[endpoint] ? endpoint : 'core', limit, globalThrottler);
return pRetry(async () => {
try {
return await throttler.wrap(func)(...args);
} catch (err) {
if (SKIP_RETRY_CODES.includes(err.code)) {
throw new pRetry.AbortError(err);
}
throw err;
}
}, retry);
},
});
module.exports = ({
githubToken,
githubUrl,
githubApiPathPrefix,
retry = DEFAULT_RETRY,
limit = RATE_LIMITS,
globalLimit = GLOBAL_RATE_LIMIT,
}) => {
const {port, protocol, hostname} = githubUrl ? url.parse(githubUrl) : {};
const github = new GitHubApi({
const github = new Octokit({
port,

@@ -13,3 +112,3 @@ protocol: (protocol || '').split(':')[0] || null,

github.authenticate({type: 'token', token: githubToken});
return github;
return new Proxy(github, handler(retry, limit, new Bottleneck({minTime: globalLimit})));
};
const {basename} = require('path');
const {isPlainObject, castArray, uniqWith} = require('lodash');
const pReduce = require('p-reduce');
const globby = require('globby');

@@ -9,42 +8,44 @@ const debug = require('debug')('semantic-release:github');

uniqWith(
(await pReduce(
assets,
async (result, asset) => {
// Wrap single glob definition in Array
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
if (glob.length <= 1 && glob[0].startsWith('!')) {
debug(
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
glob[0]
);
return result;
}
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
if (isPlainObject(asset)) {
if (globbed.length > 1) {
// If asset is an Object with a glob the `path` property that resolve to multiple files,
// Output an Object definition for each file matched and set each one with:
// - `path` of the matched file
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
// - other properties of the original asset definition
return [...result, ...globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}))];
}
// If asset is an Object, output an Object definition with:
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
// - other properties of the original asset definition
return [...result, Object.assign({}, asset, {path: globbed[0] || asset.path})];
}
if (globbed.length > 0) {
// If asset is a String definition, output each files matched
return [...result, ...globbed];
}
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
return [...result, ...glob];
},
[]
// Sort with Object first, to prioritize Object definition over Strings in dedup
)).sort(asset => !isPlainObject(asset)),
[]
.concat(
...(await Promise.all(
assets.map(async asset => {
// Wrap single glob definition in Array
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
if (glob.length <= 1 && glob[0].startsWith('!')) {
debug(
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
glob[0]
);
return [];
}
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
if (isPlainObject(asset)) {
if (globbed.length > 1) {
// If asset is an Object with a glob the `path` property that resolve to multiple files,
// Output an Object definition for each file matched and set each one with:
// - `path` of the matched file
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
// - other properties of the original asset definition
return globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}));
}
// If asset is an Object, output an Object definition with:
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
// - other properties of the original asset definition
return Object.assign({}, asset, {path: globbed[0] || asset.path});
}
if (globbed.length > 0) {
// If asset is a String definition, output each files matched
return globbed;
}
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
return glob;
})
// Sort with Object first, to prioritize Object definition over Strings in dedup
))
)
.sort(asset => !isPlainObject(asset)),
// Compare `path` property if Object definition, value itself if String
(a, b) => (isPlainObject(a) ? a.path : a) === (isPlainObject(b) ? b.path : b)
);

@@ -5,3 +5,2 @@ const {basename, extname} = require('path');

const parseGithubUrl = require('parse-github-url');
const pReduce = require('p-reduce');
const mime = require('mime');

@@ -16,3 +15,3 @@ const debug = require('debug')('semantic-release:github');

const {name: repo, owner} = parseGithubUrl(repositoryUrl);
const github = getClient(githubToken, githubUrl, githubApiPathPrefix);
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});
const release = {owner, repo, tag_name: gitTag, name: gitTag, target_commitish: branch, body: notes}; // eslint-disable-line camelcase

@@ -31,39 +30,39 @@

debug('globed assets: %o', globbedAssets);
// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
await pReduce(globbedAssets, async (_, asset) => {
const filePath = isPlainObject(asset) ? asset.path : asset;
let file;
try {
file = await stat(filePath);
} catch (err) {
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
return;
}
if (!file || !file.isFile()) {
logger.error('The asset %s is not a file, and will be ignored.', filePath);
return;
}
await Promise.all(
globbedAssets.map(async asset => {
const filePath = isPlainObject(asset) ? asset.path : asset;
let file;
const fileName = asset.name || basename(filePath);
const upload = {
owner,
repo,
url: uploadUrl,
file: await readFile(filePath),
contentType: mime.getType(extname(fileName)) || 'text/plain',
contentLength: file.size,
name: fileName,
};
try {
file = await stat(filePath);
} catch (err) {
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
return;
}
if (!file || !file.isFile()) {
logger.error('The asset %s is not a file, and will be ignored.', filePath);
return;
}
debug('file path: %o', filePath);
debug('file name: %o', fileName);
const fileName = asset.name || basename(filePath);
const upload = {
url: uploadUrl,
file: await readFile(filePath),
contentType: mime.getType(extname(fileName)) || 'text/plain',
contentLength: file.size,
name: fileName,
};
if (isPlainObject(asset) && asset.label) {
upload.label = asset.label;
}
debug('file path: %o', filePath);
debug('file name: %o', fileName);
const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
logger.log('Published file %s', downloadUrl);
});
if (isPlainObject(asset) && asset.label) {
upload.label = asset.label;
}
const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
logger.log('Published file %s', downloadUrl);
})
);
}

@@ -70,0 +69,0 @@

const {uniqBy, template} = require('lodash');
const parseGithubUrl = require('parse-github-url');
const pReduce = require('p-reduce');
const AggregateError = require('aggregate-error');

@@ -19,12 +18,12 @@ const issueParser = require('issue-parser')('github');

const {name: repo, owner} = parseGithubUrl(repositoryUrl);
const github = getClient(githubToken, githubUrl, githubApiPathPrefix);
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});
const releaseInfos = releases.filter(release => Boolean(release.name));
const prs = await pReduce(
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)),
async (prs, q) => {
const {data: {items}} = await github.search.issues({q});
return [...prs, ...items];
},
[]
const prs = [].concat(
...(await Promise.all(
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)).map(async q => {
const {data: {items}} = await github.search.issues({q});
return items;
})
))
);

@@ -50,18 +49,19 @@

// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
await pReduce([...prs, ...issues], async (_, issue) => {
const body = successComment
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
: getSuccessComment(issue, releaseInfos, nextRelease);
try {
const comment = {owner, repo, number: issue.number, body};
debug('create comment: %O', comment);
const {data: {html_url: url}} = await github.issues.createComment(comment);
logger.log('Added comment to issue #%d: %s', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to add a comment to the issue #%d.', issue.number);
// Don't throw right away and continue to update other issues
}
});
await Promise.all(
[...prs, ...issues].map(async issue => {
const body = successComment
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
: getSuccessComment(issue, releaseInfos, nextRelease);
try {
const comment = {owner, repo, number: issue.number, body};
debug('create comment: %O', comment);
const {data: {html_url: url}} = await github.issues.createComment(comment);
logger.log('Added comment to issue #%d: %s', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to add a comment to the issue #%d.', issue.number);
// Don't throw right away and continue to update other issues
}
})
);

@@ -72,15 +72,17 @@ const srIssues = await findSRIssues(github, failTitle, owner, repo);

await pReduce(srIssues, async (_, issue) => {
debug('close issue: %O', issue);
try {
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
debug('closing issue: %O', updateIssue);
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
logger.log('Closed issue #%d: %s.', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to close the issue #%d.', issue.number);
// Don't throw right away and continue to close other issues
}
});
await Promise.all(
srIssues.map(async issue => {
debug('close issue: %O', issue);
try {
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
debug('closing issue: %O', updateIssue);
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
logger.log('Closed issue #%d: %s.', issue.number, url);
} catch (err) {
errors.push(err);
logger.error('Failed to close the issue #%d.', issue.number);
// Don't throw right away and continue to close other issues
}
})
);

@@ -87,0 +89,0 @@ if (errors.length > 0) {

@@ -77,3 +77,3 @@ const {isString, isPlainObject, isUndefined, isArray} = require('lodash');

if (githubToken) {
const github = getClient(githubToken, githubUrl, githubApiPathPrefix);
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});

@@ -80,0 +80,0 @@ try {

{
"name": "@semantic-release/github",
"description": "Set of semantic-release plugins for publishing a GitHub release",
"version": "4.1.3",
"version": "4.2.1",
"author": "Pierre Vanduynslager (https://twitter.com/@pvdlg_)",

@@ -22,2 +22,3 @@ "bugs": {

"aggregate-error": "^1.0.0",
"bottleneck": "^2.0.1",
"debug": "^3.1.0",

@@ -29,3 +30,3 @@ "fs-extra": "^5.0.0",

"mime": "^2.0.3",
"p-reduce": "^1.0.0",
"p-retry": "^1.0.0",
"parse-github-url": "^1.0.1",

@@ -40,11 +41,9 @@ "url-join": "^4.0.0"

"cz-conventional-changelog": "^2.0.0",
"eslint-config-prettier": "^2.8.0",
"eslint-plugin-prettier": "^2.3.0",
"nock": "^9.1.0",
"nyc": "^11.2.1",
"prettier": "~1.10.0",
"semantic-release": "^12.2.2",
"proxyquire": "^1.8.0",
"semantic-release": "^14.0.0",
"sinon": "^4.0.0",
"tempy": "^0.2.1",
"xo": "^0.18.2"
"xo": "^0.20.0"
},

@@ -85,6 +84,3 @@ "engines": {

"prettier": {
"printWidth": 120,
"singleQuote": true,
"bracketSpacing": false,
"trailingComma": "es5"
"printWidth": 120
},

@@ -108,12 +104,5 @@ "publishConfig": {

"xo": {
"extends": [
"prettier"
],
"plugins": [
"prettier"
],
"rules": {
"prettier/prettier": 2
}
"prettier": true,
"space": true
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc