Skip to content
This repository has been archived by the owner on Jan 23, 2025. It is now read-only.

Commit

Permalink
feat: use cache when listing PRs and issues (#616)
Browse files Browse the repository at this point in the history
* feat: use cache when listing PRs and issues

* fix(deps): pin typescript to fix an unrelated issue

* test: add tests

* fix: use async-mutex
  • Loading branch information
alexander-fenster authored Aug 29, 2022
1 parent a046b29 commit 6572a87
Show file tree
Hide file tree
Showing 5 changed files with 246 additions and 25 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"dependencies": {
"@types/command-line-usage": "^5.0.2",
"@types/tmp": "^0.2.3",
"async-mutex": "^0.3.2",
"chalk": "^5.0.1",
"command-line-usage": "^6.1.3",
"extend": "^3.0.2",
Expand Down Expand Up @@ -47,7 +48,7 @@
"nock": "^13.2.9",
"proxyquire": "^2.1.3",
"sinon": "^14.0.0",
"typescript": "^4.7.4"
"typescript": "~4.7.4"
},
"scripts": {
"lint": "gts check",
Expand Down
1 change: 1 addition & 0 deletions src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ const cli = meow(
concurrency: {type: 'string'},
author: {type: 'string'},
yespleasedoit: {type: 'boolean'},
nocache: {type: 'boolean'},
},
}
);
Expand Down
71 changes: 47 additions & 24 deletions src/lib/asyncItemIterator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ const debug = debuglog('repo');

import * as configLib from './config.js';
import {GitHub, GitHubRepository, PullRequest, Issue} from './github.js';
import {CacheType, readFromCache, saveToCache} from './cache.js';

/**
* Retry the promise returned by a function if the promise throws
Expand Down Expand Up @@ -127,7 +128,7 @@ export interface IteratorOptions {
async function process(
cli: meow.Result<meow.AnyFlags>,
options: PRIteratorOptions | IssueIteratorOptions,
processIssues = false
type: CacheType
) {
if (
!cli.flags.title &&
Expand Down Expand Up @@ -175,44 +176,64 @@ async function process(

const orb1 = ora(
`[${scanned}/${repos.length}] Scanning repos for ${
processIssues ? 'issues' : 'PR'
type === 'issues' ? 'issues' : 'PR'
}s`
).start();

// Concurrently find all PRs or issues in all relevant repositories
let cached = 0;
const q = new Q({concurrency});
q.addAll(
repos.map(repo => {
return async () => {
try {
let localItems;
if (processIssues) {
localItems = await retryException<Issue[]>(async () => {
if (delay) await delayMs(nextDelay(delay));
return await repo.listIssues();
}, retryStrategy);
} else {
localItems = await retryException<PullRequest[]>(async () => {
if (delay) await delayMs(nextDelay(delay));
return await repo.listPullRequests();
}, retryStrategy);
if (!cli.flags.nocache) {
const cachedData = await readFromCache(repo, type);
if (cachedData !== null) {
localItems =
(type === 'issues' ? cachedData.issues : cachedData.prs) ?? [];
++cached;
}
}
if (!localItems) {
if (type === 'issues') {
localItems = await retryException<Issue[]>(async () => {
if (delay) await delayMs(nextDelay(delay));
return await repo.listIssues();
}, retryStrategy);
await saveToCache(repo, type, {prs: [], issues: localItems});
} else {
localItems = await retryException<PullRequest[]>(async () => {
if (delay) await delayMs(nextDelay(delay));
return await repo.listPullRequests();
}, retryStrategy);
await saveToCache(repo, type, {prs: localItems, issues: []});
}
}
items.push(
...localItems.map(item => {
return {repo, item};
})
);
scanned++;
orb1.text = `[${scanned}/${repos.length}] Scanning repos for PRs`;
orb1.text = `[${scanned}/${repos.length}] Scanning repos for ${
type === 'issues' ? 'issue' : 'PR'
}`;
} catch (err) {
error = `cannot list open ${processIssues ? 'issue' : 'PR'}s: ${(
error = `cannot list open ${type === 'issues' ? 'issue' : 'PR'}s: ${(
err as Error
).toString()}`;
}
};
})
);
await q.onIdle();
if (cached > 0) {
console.log(
`\nData for ${cached} repositories was taken from cache. Use --nocache to override.`
);
}

// Filter the list of PRs or Issues to ones who match the PR title and/or the branch name
items = items.filter(itemSet => itemSet.item.title.match(regex));
Expand Down Expand Up @@ -246,7 +267,7 @@ async function process(
orb1.succeed(
`[${scanned}/${repos.length}] repositories scanned, ${
items.length
} matching ${processIssues ? 'issue' : 'PR'}s found`
} matching ${type === 'issues' ? 'issue' : 'PR'}s found`
);

// Concurrently process each relevant PR or Issue
Expand All @@ -260,11 +281,11 @@ async function process(
if (title.match(regex)) {
orb2.text = `[${processed}/${items.length}] ${
options.commandActive
} ${processIssues ? 'issue' : 'PR'}s`;
} ${type === 'issues' ? 'issue' : 'PR'}s`;
let result;
// By setting the process issues flag, the iterator can be made to
// process a list of issues rather than PR:
if (processIssues) {
if (type === 'issues') {
const opts = options as IssueIteratorOptions;
result = await retryBoolean(async () => {
if (delay) await delayMs(nextDelay(delay));
Expand Down Expand Up @@ -293,15 +314,15 @@ async function process(
processed++;
orb2.text = `[${processed}/${items.length}] ${
options.commandActive
} ${processIssues ? 'issue' : 'PR'}s`;
} ${type === 'issues' ? 'issue' : 'PR'}s`;
}
};
})
);
await q.onIdle();

orb2.succeed(
`[${processed}/${items.length}] ${processIssues ? 'issue' : 'PR'}s ${
`[${processed}/${items.length}] ${type === 'issues' ? 'issue' : 'PR'}s ${
options.commandNamePastTense
}`
);
Expand All @@ -317,7 +338,7 @@ async function process(

console.log(
`Successfully processed: ${successful.length} ${
processIssues ? 'issue' : 'PR'
type === 'issues' ? 'issue' : 'PR'
}s`
);
for (const item of successful) {
Expand All @@ -326,7 +347,9 @@ async function process(

if (failed.length > 0) {
console.log(
`Unable to process: ${failed.length} ${processIssues ? 'issue' : 'PR'}(s)`
`Unable to process: ${failed.length} ${
type === 'issues' ? 'issue' : 'PR'
}(s)`
);
for (const item of failed) {
console.log(` ${item.html_url.padEnd(maxUrlLength, ' ')} ${item.title}`);
Expand All @@ -335,7 +358,7 @@ async function process(

if (error) {
console.log(
`Error when processing ${processIssues ? 'issue' : 'PR'}s: ${error}`
`Error when processing ${type === 'issues' ? 'issue' : 'PR'}s: ${error}`
);
}
}
Expand All @@ -345,13 +368,13 @@ export async function processPRs(
cli: meow.Result<meow.AnyFlags>,
options: PRIteratorOptions | IssueIteratorOptions
) {
return process(cli, options, false);
return process(cli, options, 'prs');
}

// Shorthand for processing list of issues:
export async function processIssues(
cli: meow.Result<meow.AnyFlags>,
options: PRIteratorOptions | IssueIteratorOptions
) {
return process(cli, options, true);
return process(cli, options, 'issues');
}
102 changes: 102 additions & 0 deletions src/lib/cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import {existsSync} from 'fs';
import {mkdir, readFile, stat, unlink, writeFile} from 'fs/promises';
import {tmpdir} from 'os';
import {join} from 'path';
import {Mutex} from 'async-mutex';
import {GitHubRepository, Issue, PullRequest} from './github';

const cacheDirectory = join(tmpdir(), 'google-repo-cache');
const cacheMaxAge = 60 * 60 * 1000; // 1 hour

export type CachedData = {issues?: Issue[]; prs?: PullRequest[]};
export type CacheType = 'prs' | 'issues';

const mutex = new Mutex();

async function initCache() {
if (!existsSync(cacheDirectory)) {
await mkdir(cacheDirectory);
}
}

function cacheFilename(repo: GitHubRepository, type: CacheType) {
const owner = repo.repository.owner.login;
const name = repo.repository.name;
return join(
cacheDirectory,
`${owner}-${name}`.replace(/\W/g, '-') + `-${type}`
);
}

export async function readFromCache(repo: GitHubRepository, type: CacheType) {
const release = await mutex.acquire();
try {
await initCache();
const cacheFile = cacheFilename(repo, type);
if (!existsSync(cacheFile)) {
return null;
}
const cacheStat = await stat(cacheFile);
const mtime = cacheStat.mtimeMs ?? cacheStat.ctimeMs;
const now = Date.now();
if (now - mtime >= cacheMaxAge) {
await unlink(cacheFile);
return null;
}

const content = await readFile(cacheFile);
const json = JSON.parse(content.toString()) as CachedData;
return json;
} finally {
release();
}
}

export async function saveToCache(
repo: GitHubRepository,
type: CacheType,
data: CachedData
) {
const release = await mutex.acquire();
try {
await initCache();
const cacheFile = cacheFilename(repo, type);
if (!data.issues) {
data.issues = [];
}
if (!data.prs) {
data.prs = [];
}
const content = JSON.stringify(data, null, ' ');
await writeFile(cacheFile, content);
} finally {
release();
}
}

export async function deleteCache(repo: GitHubRepository, type: CacheType) {
const release = await mutex.acquire();
try {
await initCache();
const cacheFile = cacheFilename(repo, type);
if (existsSync(cacheFile)) {
await unlink(cacheFile);
}
} finally {
release();
}
}
Loading

0 comments on commit 6572a87

Please sign in to comment.