Skip to content

Commit 9ea2730

Browse files
pvdlggr2m
authored andcommitted
feat: add global throttler to comply with GitHub recommendation
1 parent c0773b6 commit 9ea2730

13 files changed

+272
-195
lines changed

lib/get-client.js

Lines changed: 31 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,13 @@ const RATE_LIMITS = {
2020
core: [5000, 60 * 60 * 1000],
2121
};
2222

23+
/**
24+
* Global rate limit to prevent abuse.
25+
*
26+
* See {@link https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits|Dealing with abuse rate limits}
27+
*/
28+
const GLOBAL_RATE_LIMIT = [1, 1000];
29+
2330
/**
2431
* Http error codes for which to not retry.
2532
*/
@@ -32,14 +39,22 @@ const SKIP_RETRY_CODES = [400, 401, 403];
3239
*/
3340

3441
/**
35-
* Create or retrieve the throller function for a given rate limit group.
42+
* Create or retrieve the throttler function for a given rate limit group.
3643
*
3744
* @param {Array} rate The rate limit group.
3845
* @param {String} limit The rate limits per API endpoints.
39-
* @type {Throttler} The throller function for the given rate limit group.
46+
* @return {Throttler} The throller function for the given rate limit group.
4047
*/
4148
const getThrottler = memoize((rate, limit) => pThrottle((func, ...args) => func(...args), ...limit[rate]));
4249

50+
/**
51+
* Create the global throttler function to comply with GitHub abuse prevention recommandations.
52+
*
53+
* @param {Array} globalLimit The global rate limit.
54+
* @return {Throttler} The throller function for the global rate limit.
55+
*/
56+
const getGlobalThrottler = globalLimit => pThrottle((func, ...args) => func(...args), ...globalLimit);
57+
4358
/**
4459
* Create a`handler` for a `Proxy` wrapping an Octokit instance to:
4560
* - Recursively wrap the child objects of the Octokit instance in a `Proxy`
@@ -48,9 +63,10 @@ const getThrottler = memoize((rate, limit) => pThrottle((func, ...args) => func(
4863
* @param {Object} retry The configuration to pass to `p-retry`.
4964
* @param {Array} limit The rate limits per API endpoints.
5065
* @param {String} endpoint The API endpoint to handle.
66+
* @param {Throttler} globalThrottler The throller function for the global rate limit.
5167
* @return {Function} The `handler` for a `Proxy` wrapping an Octokit instance.
5268
*/
53-
const handler = (retry, limit, endpoint) => ({
69+
const handler = (retry, limit, globalThrottler, endpoint) => ({
5470
/**
5571
* If the target has the property as own, determine the rate limit based on the property name and recursively wrap the value in a `Proxy`. Otherwise returns the property value.
5672
*
@@ -61,7 +77,7 @@ const handler = (retry, limit, endpoint) => ({
6177
*/
6278
get: (target, name, receiver) =>
6379
Object.prototype.hasOwnProperty.call(target, name)
64-
? new Proxy(target[name], handler(retry, limit, endpoint || name))
80+
? new Proxy(target[name], handler(retry, limit, globalThrottler, endpoint || name))
6581
: Reflect.get(target, name, receiver),
6682

6783
/**
@@ -74,9 +90,10 @@ const handler = (retry, limit, endpoint) => ({
7490
*/
7591
apply: (func, that, args) => {
7692
const throttler = getThrottler(limit[endpoint] ? endpoint : 'core', limit);
93+
7794
return pRetry(async () => {
7895
try {
79-
return await throttler(func, ...args);
96+
return await globalThrottler((func, ...args) => throttler(func, ...args), func, ...args);
8097
} catch (err) {
8198
if (SKIP_RETRY_CODES.includes(err.code)) {
8299
throw new pRetry.AbortError(err);
@@ -87,7 +104,14 @@ const handler = (retry, limit, endpoint) => ({
87104
},
88105
});
89106

90-
module.exports = ({githubToken, githubUrl, githubApiPathPrefix, retry = DEFAULT_RETRY, limit = RATE_LIMITS}) => {
107+
module.exports = ({
108+
githubToken,
109+
githubUrl,
110+
githubApiPathPrefix,
111+
retry = DEFAULT_RETRY,
112+
limit = RATE_LIMITS,
113+
globalLimit = GLOBAL_RATE_LIMIT,
114+
}) => {
91115
const {port, protocol, hostname} = githubUrl ? url.parse(githubUrl) : {};
92116
const github = new Octokit({
93117
port,
@@ -96,5 +120,5 @@ module.exports = ({githubToken, githubUrl, githubApiPathPrefix, retry = DEFAULT_
96120
pathPrefix: githubApiPathPrefix,
97121
});
98122
github.authenticate({type: 'token', token: githubToken});
99-
return new Proxy(github, handler(retry, limit));
123+
return new Proxy(github, handler(retry, limit, getGlobalThrottler(globalLimit)));
100124
};

lib/glob-assets.js

Lines changed: 40 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,50 @@
11
const {basename} = require('path');
22
const {isPlainObject, castArray, uniqWith} = require('lodash');
3-
const pReduce = require('p-reduce');
43
const globby = require('globby');
54
const debug = require('debug')('semantic-release:github');
65

76
module.exports = async assets =>
87
uniqWith(
9-
(await pReduce(
10-
assets,
11-
async (result, asset) => {
12-
// Wrap single glob definition in Array
13-
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
14-
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
15-
if (glob.length <= 1 && glob[0].startsWith('!')) {
16-
debug(
17-
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
18-
glob[0]
19-
);
20-
return result;
21-
}
22-
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
23-
if (isPlainObject(asset)) {
24-
if (globbed.length > 1) {
25-
// If asset is an Object with a glob the `path` property that resolve to multiple files,
26-
// Output an Object definition for each file matched and set each one with:
27-
// - `path` of the matched file
28-
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
29-
// - other properties of the original asset definition
30-
return [...result, ...globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}))];
31-
}
32-
// If asset is an Object, output an Object definition with:
33-
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
34-
// - other properties of the original asset definition
35-
return [...result, Object.assign({}, asset, {path: globbed[0] || asset.path})];
36-
}
37-
if (globbed.length > 0) {
38-
// If asset is a String definition, output each files matched
39-
return [...result, ...globbed];
40-
}
41-
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
42-
return [...result, ...glob];
43-
},
44-
[]
45-
// Sort with Object first, to prioritize Object definition over Strings in dedup
46-
)).sort(asset => !isPlainObject(asset)),
8+
[]
9+
.concat(
10+
...(await Promise.all(
11+
assets.map(async asset => {
12+
// Wrap single glob definition in Array
13+
const glob = castArray(isPlainObject(asset) ? asset.path : asset);
14+
// Skip solo negated pattern (avoid to include every non js file with `!**/*.js`)
15+
if (glob.length <= 1 && glob[0].startsWith('!')) {
16+
debug(
17+
'skipping the negated glob %o as its alone in its group and would retrieve a large amount of files ',
18+
glob[0]
19+
);
20+
return [];
21+
}
22+
const globbed = await globby(glob, {expandDirectories: true, gitignore: false, dot: true});
23+
if (isPlainObject(asset)) {
24+
if (globbed.length > 1) {
25+
// If asset is an Object with a glob the `path` property that resolve to multiple files,
26+
// Output an Object definition for each file matched and set each one with:
27+
// - `path` of the matched file
28+
// - `name` based on the actual file name (to avoid assets with duplicate `name`)
29+
// - other properties of the original asset definition
30+
return globbed.map(file => Object.assign({}, asset, {path: file, name: basename(file)}));
31+
}
32+
// If asset is an Object, output an Object definition with:
33+
// - `path` of the matched file if there is one, or the original `path` definition (will be considered as a missing file)
34+
// - other properties of the original asset definition
35+
return Object.assign({}, asset, {path: globbed[0] || asset.path});
36+
}
37+
if (globbed.length > 0) {
38+
// If asset is a String definition, output each files matched
39+
return globbed;
40+
}
41+
// If asset is a String definition but no match is found, output the elements of the original glob (each one will be considered as a missing file)
42+
return glob;
43+
})
44+
// Sort with Object first, to prioritize Object definition over Strings in dedup
45+
))
46+
)
47+
.sort(asset => !isPlainObject(asset)),
4748
// Compare `path` property if Object definition, value itself if String
4849
(a, b) => (isPlainObject(a) ? a.path : a) === (isPlainObject(b) ? b.path : b)
4950
);

lib/publish.js

Lines changed: 34 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ const {basename, extname} = require('path');
22
const {stat, readFile} = require('fs-extra');
33
const {isPlainObject} = require('lodash');
44
const parseGithubUrl = require('parse-github-url');
5-
const pReduce = require('p-reduce');
65
const mime = require('mime');
76
const debug = require('debug')('semantic-release:github');
87
const globAssets = require('./glob-assets.js');
@@ -26,43 +25,45 @@ module.exports = async (pluginConfig, {options: {branch, repositoryUrl}, nextRel
2625
if (assets && assets.length > 0) {
2726
const globbedAssets = await globAssets(assets);
2827
debug('globed assets: %o', globbedAssets);
29-
// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
30-
await pReduce(globbedAssets, async (_, asset) => {
31-
const filePath = isPlainObject(asset) ? asset.path : asset;
32-
let file;
3328

34-
try {
35-
file = await stat(filePath);
36-
} catch (err) {
37-
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
38-
return;
39-
}
40-
if (!file || !file.isFile()) {
41-
logger.error('The asset %s is not a file, and will be ignored.', filePath);
42-
return;
43-
}
29+
await Promise.all(
30+
globbedAssets.map(async asset => {
31+
const filePath = isPlainObject(asset) ? asset.path : asset;
32+
let file;
4433

45-
const fileName = asset.name || basename(filePath);
46-
const upload = {
47-
owner,
48-
repo,
49-
url: uploadUrl,
50-
file: await readFile(filePath),
51-
contentType: mime.getType(extname(fileName)) || 'text/plain',
52-
contentLength: file.size,
53-
name: fileName,
54-
};
34+
try {
35+
file = await stat(filePath);
36+
} catch (err) {
37+
logger.error('The asset %s cannot be read, and will be ignored.', filePath);
38+
return;
39+
}
40+
if (!file || !file.isFile()) {
41+
logger.error('The asset %s is not a file, and will be ignored.', filePath);
42+
return;
43+
}
5544

56-
debug('file path: %o', filePath);
57-
debug('file name: %o', fileName);
45+
const fileName = asset.name || basename(filePath);
46+
const upload = {
47+
owner,
48+
repo,
49+
url: uploadUrl,
50+
file: await readFile(filePath),
51+
contentType: mime.getType(extname(fileName)) || 'text/plain',
52+
contentLength: file.size,
53+
name: fileName,
54+
};
5855

59-
if (isPlainObject(asset) && asset.label) {
60-
upload.label = asset.label;
61-
}
56+
debug('file path: %o', filePath);
57+
debug('file name: %o', fileName);
6258

63-
const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
64-
logger.log('Published file %s', downloadUrl);
65-
});
59+
if (isPlainObject(asset) && asset.label) {
60+
upload.label = asset.label;
61+
}
62+
63+
const {data: {browser_download_url: downloadUrl}} = await github.repos.uploadAsset(upload);
64+
logger.log('Published file %s', downloadUrl);
65+
})
66+
);
6667
}
6768

6869
return {url, name: 'GitHub release'};

lib/success.js

Lines changed: 39 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
const {uniqBy, template} = require('lodash');
22
const parseGithubUrl = require('parse-github-url');
3-
const pReduce = require('p-reduce');
43
const AggregateError = require('aggregate-error');
54
const issueParser = require('issue-parser')('github');
65
const debug = require('debug')('semantic-release:github');
@@ -19,13 +18,13 @@ module.exports = async (
1918
const github = getClient({githubToken, githubUrl, githubApiPathPrefix});
2019
const releaseInfos = releases.filter(release => Boolean(release.name));
2120

22-
const prs = await pReduce(
23-
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)),
24-
async (prs, q) => {
25-
const {data: {items}} = await github.search.issues({q});
26-
return [...prs, ...items];
27-
},
28-
[]
21+
const prs = [].concat(
22+
...(await Promise.all(
23+
getSearchQueries(`repo:${owner}/${repo}+type:pr`, commits.map(commit => commit.hash)).map(async q => {
24+
const {data: {items}} = await github.search.issues({q});
25+
return items;
26+
})
27+
))
2928
);
3029

3130
debug('found pull requests: %O', prs.map(pr => pr.number));
@@ -46,40 +45,43 @@ module.exports = async (
4645

4746
const errors = [];
4847

49-
// Make requests serially to avoid hitting the rate limit (https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits)
50-
await pReduce([...prs, ...issues], async (_, issue) => {
51-
const body = successComment
52-
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
53-
: getSuccessComment(issue, releaseInfos, nextRelease);
54-
try {
55-
const comment = {owner, repo, number: issue.number, body};
56-
debug('create comment: %O', comment);
57-
const {data: {html_url: url}} = await github.issues.createComment(comment);
58-
logger.log('Added comment to issue #%d: %s', issue.number, url);
59-
} catch (err) {
60-
errors.push(err);
61-
logger.error('Failed to add a comment to the issue #%d.', issue.number);
62-
// Don't throw right away and continue to update other issues
63-
}
64-
});
48+
await Promise.all(
49+
[...prs, ...issues].map(async issue => {
50+
const body = successComment
51+
? template(successComment)({branch, lastRelease, commits, nextRelease, releases, issue})
52+
: getSuccessComment(issue, releaseInfos, nextRelease);
53+
try {
54+
const comment = {owner, repo, number: issue.number, body};
55+
debug('create comment: %O', comment);
56+
const {data: {html_url: url}} = await github.issues.createComment(comment);
57+
logger.log('Added comment to issue #%d: %s', issue.number, url);
58+
} catch (err) {
59+
errors.push(err);
60+
logger.error('Failed to add a comment to the issue #%d.', issue.number);
61+
// Don't throw right away and continue to update other issues
62+
}
63+
})
64+
);
6565

6666
const srIssues = await findSRIssues(github, failTitle, owner, repo);
6767

6868
debug('found semantic-release issues: %O', srIssues);
6969

70-
await pReduce(srIssues, async (_, issue) => {
71-
debug('close issue: %O', issue);
72-
try {
73-
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
74-
debug('closing issue: %O', updateIssue);
75-
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
76-
logger.log('Closed issue #%d: %s.', issue.number, url);
77-
} catch (err) {
78-
errors.push(err);
79-
logger.error('Failed to close the issue #%d.', issue.number);
80-
// Don't throw right away and continue to close other issues
81-
}
82-
});
70+
await Promise.all(
71+
srIssues.map(async issue => {
72+
debug('close issue: %O', issue);
73+
try {
74+
const updateIssue = {owner, repo, number: issue.number, state: 'closed'};
75+
debug('closing issue: %O', updateIssue);
76+
const {data: {html_url: url}} = await github.issues.edit(updateIssue);
77+
logger.log('Closed issue #%d: %s.', issue.number, url);
78+
} catch (err) {
79+
errors.push(err);
80+
logger.error('Failed to close the issue #%d.', issue.number);
81+
// Don't throw right away and continue to close other issues
82+
}
83+
})
84+
);
8385

8486
if (errors.length > 0) {
8587
throw new AggregateError(errors);

package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
"issue-parser": "^1.0.1",
2626
"lodash": "^4.17.4",
2727
"mime": "^2.0.3",
28-
"p-reduce": "^1.0.0",
2928
"p-retry": "^1.0.0",
3029
"p-throttle": "^1.1.0",
3130
"parse-github-url": "^1.0.1",

0 commit comments

Comments
 (0)