New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@nerdwallet/shepherd

Package Overview
Dependencies
Maintainers
6
Versions
50
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nerdwallet/shepherd - npm Package Compare versions

Comparing version 1.16.0 to 2.3.1

4

lib/adapters/base.d.ts

@@ -7,3 +7,3 @@ export interface IRepo {

}
export declare type RetryMethod = (opts: number) => any;
export type RetryMethod = (opts: number) => any;
interface IRepoAdapter {

@@ -20,3 +20,3 @@ getCandidateRepos(onRetry: RetryMethod): Promise<IRepo[]>;

pushRepo(repo: IRepo, force: boolean): Promise<void>;
createPullRequest(repo: IRepo, message: string): Promise<void>;
createPullRequest(repo: IRepo, message: string, upstreamOwner: string): Promise<void>;
getPullRequestStatus(repo: IRepo): Promise<string[]>;

@@ -23,0 +23,0 @@ getRepoDir(repo: IRepo): string;

@@ -1,2 +0,1 @@

import { SimpleGit } from 'simple-git/promise';
import { IMigrationContext } from '../migration-context';

@@ -20,3 +19,3 @@ import IRepoAdapter, { IEnvironmentVariables, IRepo, RetryMethod } from './base';

pushRepo(repo: IRepo, force: boolean): Promise<void>;
abstract createPullRequest(repo: IRepo, message: string): Promise<void>;
abstract createPullRequest(repo: IRepo, message: string, upstreamOwner: string): Promise<void>;
abstract getPullRequestStatus(repo: IRepo): Promise<string[]>;

@@ -26,5 +25,5 @@ abstract getBaseBranch(repo: IRepo): string;

protected abstract getRepositoryUrl(repo: IRepo): string;
protected git(repo: IRepo): SimpleGit;
protected git(repo: IRepo): any;
protected isShepherdCommitMessage(message: string): boolean;
}
export default GitAdapter;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,3 +8,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const fs_extra_1 = __importDefault(require("fs-extra"));
const promise_1 = __importDefault(require("simple-git/promise"));
const simple_git_1 = require("simple-git");
class GitAdapter {

@@ -24,59 +15,49 @@ constructor(migrationContext) {

}
checkoutRepo(repo) {
return __awaiter(this, void 0, void 0, function* () {
const repoPath = this.getRepositoryUrl(repo);
const localPath = this.getRepoDir(repo);
if ((yield fs_extra_1.default.pathExists(localPath)) && (yield this.git(repo).checkIsRepo())) {
// Repo already exists; just fetch
yield this.git(repo).fetch('origin');
}
else {
const git = promise_1.default();
yield git.clone(repoPath, localPath, ['--depth', '1']);
}
// We'll immediately create and switch to a new branch
async checkoutRepo(repo) {
const repoPath = this.getRepositoryUrl(repo);
const localPath = this.getRepoDir(repo);
if ((await fs_extra_1.default.pathExists(localPath)) && (await this.git(repo).checkIsRepo())) {
// Repo already exists; just fetch
await this.git(repo).fetch('origin');
}
else {
const git = (0, simple_git_1.simpleGit)();
await git.clone(repoPath, localPath, ['--depth', '1']);
}
// We'll immediately create and switch to a new branch
try {
await this.git(repo).checkout(['-b', this.branchName, `origin/${this.branchName}`]);
}
catch (e) {
try {
yield this.git(repo).checkout(['-b', this.branchName, `origin/${this.branchName}`]);
await this.git(repo).checkoutLocalBranch(this.branchName);
}
catch (e) {
try {
yield this.git(repo).checkoutLocalBranch(this.branchName);
}
catch (e) {
// This branch probably already exists; we'll just switch to it
// to make sure we're on the right branch for the commit phase
yield this.git(repo).checkout(this.branchName);
}
// This branch probably already exists; we'll just switch to it
// to make sure we're on the right branch for the commit phase
await this.git(repo).checkout(this.branchName);
}
});
}
}
commitRepo(repo) {
return __awaiter(this, void 0, void 0, function* () {
const { migration: { spec } } = this.migrationContext;
yield this.git(repo).add('.');
yield this.git(repo).commit(`${spec.title} [shepherd]`);
});
async commitRepo(repo) {
const { migration: { spec }, } = this.migrationContext;
await this.git(repo).add('.');
await this.git(repo).commit(`${spec.title} [shepherd]`);
}
resetChangedFiles(repo) {
return __awaiter(this, void 0, void 0, function* () {
yield this.git(repo).reset(['--hard']);
yield this.git(repo).clean('f', ['-d']);
});
async resetChangedFiles(repo) {
await this.git(repo).reset(['--hard']);
await this.git(repo).clean('f', ['-d']);
}
pushRepo(repo, force) {
return __awaiter(this, void 0, void 0, function* () {
const options = force ? ['--force'] : undefined;
yield this.git(repo).push('origin', 'HEAD', options);
});
async pushRepo(repo, force) {
const options = force ? ['--force'] : undefined;
await this.git(repo).push('origin', 'HEAD', options);
}
getEnvironmentVariables(repo) {
return __awaiter(this, void 0, void 0, function* () {
const revision = yield this.git(repo).revparse(['HEAD']);
return {
SHEPHERD_GIT_REVISION: revision,
};
});
async getEnvironmentVariables(repo) {
const revision = await this.git(repo).revparse(['HEAD']);
return {
SHEPHERD_GIT_REVISION: revision,
};
}
git(repo) {
return promise_1.default(this.getRepoDir(repo));
return (0, simple_git_1.simpleGit)(this.getRepoDir(repo));
}

@@ -83,0 +64,0 @@ isShepherdCommitMessage(message) {

@@ -15,6 +15,7 @@ import { IMigrationContext } from '../migration-context';

pushRepo(repo: IRepo, force: boolean): Promise<void>;
createPullRequest(repo: IRepo, message: string): Promise<void>;
createPullRequest(repo: IRepo, message: string, upstreamOwner: string): Promise<void>;
getPullRequestStatus(repo: IRepo): Promise<string[]>;
getRepoDir(repo: IRepo): string;
getDataDir(repo: IRepo): string;
getOwnerName(owner: string): string;
getBaseBranch(repo: IRepo): string;

@@ -21,0 +22,0 @@ getEnvironmentVariables(repo: IRepo): Promise<IEnvironmentVariables>;

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -20,2 +11,4 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const git_1 = __importDefault(require("./git"));
const { SHEPHERD_GITHUB_ENTERPRISE_URL } = process.env;
const shepherdGitHubEnterpriseUrl = SHEPHERD_GITHUB_ENTERPRISE_URL || 'api.github.com';
var SafetyStatus;

@@ -33,31 +26,30 @@ (function (SafetyStatus) {

}
getCandidateRepos() {
return __awaiter(this, void 0, void 0, function* () {
const { org, search_type = 'code', search_query } = this.migrationContext.migration.spec.adapter;
let repoNames;
// list all of an orgs active repos
if (org) {
if (search_query) {
throw new Error('Cannot use both "org" and "search_query" in GitHub adapter. Pick one.');
}
repoNames = yield this.githubService.getActiveReposForOrg({ org });
async getCandidateRepos() {
const { org, search_type = 'code', search_query, } = this.migrationContext.migration.spec.adapter;
let repoNames;
// list all of an orgs active repos
if (org) {
if (search_query) {
throw new Error('Cannot use both "org" and "search_query" in GitHub adapter. Pick one.');
}
else {
repoNames = yield this.githubService.getActiveReposForSearchTypeAndQuery({
search_type,
search_query
});
}
return lodash_1.default.uniq(repoNames).map((r) => this.parseRepo(r));
});
repoNames = await this.githubService.getActiveReposForOrg({ org });
}
else {
repoNames = await this.githubService.getActiveReposForSearchTypeAndQuery({
search_type,
search_query,
});
}
return lodash_1.default.uniq(repoNames).map((r) => this.parseRepo(r));
}
mapRepoAfterCheckout(repo) {
return __awaiter(this, void 0, void 0, function* () {
const { owner, name } = repo;
const defaultBranch = yield this.githubService.getDefaultBranchForRepo({
owner,
repo: name,
});
return Object.assign(Object.assign({}, repo), { defaultBranch });
async mapRepoAfterCheckout(repo) {
const { owner, name } = repo;
const defaultBranch = await this.githubService.getDefaultBranchForRepo({
owner,
repo: name,
});
return {
...repo,
defaultBranch,
};
}

@@ -81,79 +73,58 @@ parseRepo(repo) {

}
resetRepoBeforeApply(repo, force) {
return __awaiter(this, void 0, void 0, function* () {
const { defaultBranch } = repo;
// First, get any changes from the remote
// --prune will ensure that any remote branch deletes are reflected here
yield this.git(repo).fetch(['origin', '--prune']);
if (!force) {
const safetyStatus = yield this.checkActionSafety(repo);
if (safetyStatus === SafetyStatus.PullRequestExisted) {
throw new Error('Remote branch did not exist, but a pull request does or did; try with --force-reset-branch?');
}
else if (safetyStatus === SafetyStatus.NonShepherdCommits) {
throw new Error('Found non-Shepherd commits on remote branch; try with --force-reset-branch?');
}
async resetRepoBeforeApply(repo, force) {
const { defaultBranch } = repo;
// First, get any changes from the remote
// --prune will ensure that any remote branch deletes are reflected here
await this.git(repo).fetch(['origin', '--prune']);
if (!force) {
const safetyStatus = await this.checkActionSafety(repo);
if (safetyStatus === SafetyStatus.PullRequestExisted) {
throw new Error('Remote branch did not exist, but a pull request does or did; try with --force-reset-branch?');
}
// If we got this far, we can go ahead and reset to the default branch
yield this.git(repo).reset(['--hard', `origin/${defaultBranch}`]);
});
else if (safetyStatus === SafetyStatus.NonShepherdCommits) {
throw new Error('Found non-Shepherd commits on remote branch; try with --force-reset-branch?');
}
}
// If we got this far, we can go ahead and reset to the default branch
await this.git(repo).reset(['--hard', `origin/${defaultBranch}`]);
}
pushRepo(repo, force) {
const _super = Object.create(null, {
pushRepo: { get: () => super.pushRepo }
});
return __awaiter(this, void 0, void 0, function* () {
let shouldForce = false;
// First, get any changes from the remote
// --prune will ensure that any remote branch deletes are reflected here
yield this.git(repo).fetch(['origin', '--prune']);
if (!force) {
const safetyStatus = yield this.checkActionSafety(repo);
if (safetyStatus === SafetyStatus.PullRequestExisted) {
throw new Error('Remote branch did not exist, but a pull request does or did; try with --force?');
}
else if (safetyStatus === SafetyStatus.NonShepherdCommits) {
throw new Error('Found non-Shepherd commits on remote branch; try with --force?');
}
// If we get to here, it's safe to force-push to this branch
shouldForce = true;
async pushRepo(repo, force) {
let shouldForce = false;
// First, get any changes from the remote
// --prune will ensure that any remote branch deletes are reflected here
await this.git(repo).fetch(['origin', '--prune']);
if (!force) {
const safetyStatus = await this.checkActionSafety(repo);
if (safetyStatus === SafetyStatus.PullRequestExisted) {
throw new Error('Remote branch did not exist, but a pull request does or did; try with --force?');
}
yield _super.pushRepo.call(this, repo, force || shouldForce);
else if (safetyStatus === SafetyStatus.NonShepherdCommits) {
throw new Error('Found non-Shepherd commits on remote branch; try with --force?');
}
// If we get to here, it's safe to force-push to this branch
shouldForce = true;
}
await super.pushRepo(repo, force || shouldForce);
}
async createPullRequest(repo, message, upstreamOwner) {
const { migration: { spec }, } = this.migrationContext;
const { owner, name, defaultBranch } = repo;
let baseOwner = owner;
if (upstreamOwner) {
baseOwner = upstreamOwner;
}
// Let's check if a PR already exists
const pullRequests = await this.githubService.listPullRequests({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
});
}
createPullRequest(repo, message) {
return __awaiter(this, void 0, void 0, function* () {
const { migration: { spec } } = this.migrationContext;
const { owner, name, defaultBranch } = repo;
// Let's check if a PR already exists
const pullRequests = yield this.githubService.listPullRequests({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
});
if (pullRequests && pullRequests.length) {
const pullRequest = pullRequests[0];
if (pullRequest.state === 'open') {
// A pull request exists and is open, let's update it
yield this.githubService.updatePullRequest({
owner,
repo: name,
pull_number: pullRequest.number,
title: spec.title,
body: message,
});
}
else {
// A pull request exists but it was already closed - don't update it
// TODO proper status reporting without errors
throw new Error('Could not update pull request; it was already closed');
}
}
else {
// No PR yet - we have to create it
yield this.githubService.createPullRequest({
owner,
if (pullRequests && pullRequests.length) {
const pullRequest = pullRequests[0];
if (pullRequest.state === 'open') {
// A pull request exists and is open, let's update it
await this.githubService.updatePullRequest({
owner: baseOwner,
repo: name,
head: this.branchName,
base: defaultBranch,
pull_number: pullRequest.number,
title: spec.title,

@@ -163,84 +134,98 @@ body: message,

}
else {
// A pull request exists but it was already closed - don't update it
// TODO proper status reporting without errors
throw new Error('Could not update pull request; it was already closed');
}
}
else {
// No PR yet - we have to create it
await this.githubService.createPullRequest({
owner: baseOwner,
repo: name,
head: `${owner}:${this.branchName}`,
base: defaultBranch,
title: spec.title,
body: message,
});
}
}
async getPullRequestStatus(repo) {
const { owner, name } = repo;
const status = [];
// First, check for a pull request
const pullRequests = await this.githubService.listPullRequests({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
state: 'all',
});
}
getPullRequestStatus(repo) {
return __awaiter(this, void 0, void 0, function* () {
const { owner, name } = repo;
const status = [];
// First, check for a pull request
const pullRequests = yield this.githubService.listPullRequests({
if (pullRequests && pullRequests.length) {
// GitHub's API is weird - you need a second query to get information about mergeability
const { data: pullRequest } = await this.githubService.getPullRequest({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
state: 'all',
pull_number: pullRequests[0].number,
});
if (pullRequests && pullRequests.length) {
// GitHub's API is weird - you need a second query to get information about mergeability
const { data: pullRequest } = yield this.githubService.getPullRequest({
status.push(`PR #${pullRequest.number} [${pullRequest.html_url}]`);
if (pullRequest.merged_at) {
status.push(chalk_1.default.magenta(`PR was merged at ${pullRequest.merged_at}`));
}
else if (pullRequest.mergeable && pullRequest.mergeable_state === 'clean') {
status.push(chalk_1.default.green('PR is mergeable!'));
}
else {
status.push(chalk_1.default.red('PR is not mergeable'));
// Let's see what's blocking us
// Sadly, we can only get information about failing status checks, not being blocked
// by things like required reviews
const combinedStatus = await this.githubService.getCombinedRefStatus({
owner,
repo: name,
pull_number: pullRequests[0].number,
ref: this.branchName,
});
status.push(`PR #${pullRequest.number} [${pullRequest.html_url}]`);
if (pullRequest.merged_at) {
status.push(chalk_1.default.magenta(`PR was merged at ${pullRequest.merged_at}`));
const { statuses } = combinedStatus.data;
const anyPending = statuses.some((s) => s.state === 'pending');
const anyFailing = statuses.some((s) => s.state === 'error' || s.state === 'failure');
const recordStatus = (s) => status.push(`${s.context} ${chalk_1.default.dim(`- ${s.description}`)}`);
if (anyPending) {
status.push(chalk_1.default.underline.yellow('Pending status checks'));
statuses.forEach((s) => {
if (s.state !== 'pending') {
return;
}
recordStatus(s);
});
}
else if (pullRequest.mergeable && pullRequest.mergeable_state === 'clean') {
status.push(chalk_1.default.green('PR is mergeable!'));
}
else {
status.push(chalk_1.default.red('PR is not mergeable'));
// Let's see what's blocking us
// Sadly, we can only get information about failing status checks, not being blocked
// by things like required reviews
const combinedStatus = yield this.githubService.getCombinedRefStatus({
owner,
repo: name,
ref: this.branchName,
if (anyFailing) {
status.push(chalk_1.default.underline.red('Failing status checks'));
statuses.forEach((s) => {
if (!(s.state === 'error' || s.state === 'failure')) {
return;
}
recordStatus(s);
});
const { statuses } = combinedStatus.data;
const anyPending = statuses.some((s) => s.state === 'pending');
const anyFailing = statuses.some((s) => s.state === 'error' || s.state === 'failure');
const recordStatus = (s) => status.push(`${s.context} ${chalk_1.default.dim(`- ${s.description}`)}`);
if (anyPending) {
status.push(chalk_1.default.underline.yellow('Pending status checks'));
statuses.forEach((s) => {
if (s.state !== 'pending') {
return;
}
recordStatus(s);
});
}
if (anyFailing) {
status.push(chalk_1.default.underline.red('Failing status checks'));
statuses.forEach((s) => {
if (!(s.state === 'error' || s.state === 'failure')) {
return;
}
recordStatus(s);
});
}
}
}
else {
try {
// This will throw an exception if the branch does not exist
yield this.githubService.getBranch({
owner,
repo: name,
branch: this.branchName,
});
status.push('No PR exists');
}
else {
try {
// This will throw an exception if the branch does not exist
await this.githubService.getBranch({
owner,
repo: name,
branch: this.branchName,
});
status.push('No PR exists');
}
catch (e) {
if (e.code === 404) {
status.push('No branch or PR exists');
}
catch (e) {
if (e.code === 404) {
status.push('No branch or PR exists');
}
else {
throw e;
}
else {
throw e;
}
}
return status;
});
}
return status;
}

@@ -253,55 +238,55 @@ getRepoDir(repo) {

}
getOwnerName(owner) {
return owner;
}
getBaseBranch(repo) {
return repo.defaultBranch;
}
getEnvironmentVariables(repo) {
const _super = Object.create(null, {
getEnvironmentVariables: { get: () => super.getEnvironmentVariables }
});
return __awaiter(this, void 0, void 0, function* () {
const superEnvVars = yield _super.getEnvironmentVariables.call(this, repo);
return Object.assign(Object.assign({}, superEnvVars), { SHEPHERD_GITHUB_REPO_OWNER: repo.owner, SHEPHERD_GITHUB_REPO_NAME: repo.name });
});
async getEnvironmentVariables(repo) {
const superEnvVars = await super.getEnvironmentVariables(repo);
return {
...superEnvVars,
SHEPHERD_GITHUB_REPO_OWNER: repo.owner,
SHEPHERD_GITHUB_REPO_NAME: repo.name,
};
}
getRepositoryUrl(repo) {
return `git@github.com:${repo.owner}/${repo.name}.git`;
return `git@${shepherdGitHubEnterpriseUrl}:${repo.owner}/${repo.name}.git`;
}
checkActionSafety(repo) {
return __awaiter(this, void 0, void 0, function* () {
const { owner, name } = repo;
// Get all branches and look for the remote branch
// @ts-ignore (typings are broken)
const { branches } = yield this.git(repo).branch();
if (branches[`remotes/origin/${this.branchName}`] === undefined) {
// This remote branch does not exist
// We need to figure out if that's because a PR was open and
// subsequently closed, or if it's because we just haven't pushed
// a branch yet
const pullRequests = yield this.githubService.listPullRequests({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
state: 'all',
});
if (pullRequests && pullRequests.length) {
// We'll assume that if a remote branch does not exist but a PR
// does/did, we don't want to apply to this branch
return SafetyStatus.PullRequestExisted;
}
async checkActionSafety(repo) {
const { owner, name } = repo;
// Get all branches and look for the remote branch
// @ts-ignore (typings are broken)
const { branches } = await this.git(repo).branch();
if (branches[`remotes/origin/${this.branchName}`] === undefined) {
// This remote branch does not exist
// We need to figure out if that's because a PR was open and
// subsequently closed, or if it's because we just haven't pushed
// a branch yet
const pullRequests = await this.githubService.listPullRequests({
owner,
repo: name,
head: `${owner}:${this.branchName}`,
state: 'all',
});
if (pullRequests && pullRequests.length) {
// We'll assume that if a remote branch does not exist but a PR
// does/did, we don't want to apply to this branch
return SafetyStatus.PullRequestExisted;
}
else {
// The remote branch exists!
// We'll get the list of all commits not on master and check if they're
// all from Shepherd. If they are, it's safe to reset the branch to
// master.
const upstreamBranch = `remotes/origin/${this.branchName}`;
const commits = yield this.git(repo).log([`HEAD..${upstreamBranch}`]);
const allShepherd = commits.all.every(({ message }) => this.isShepherdCommitMessage(message));
if (!allShepherd) {
// RIP.
return SafetyStatus.NonShepherdCommits;
}
}
else {
// The remote branch exists!
// We'll get the list of all commits not on master and check if they're
// all from Shepherd. If they are, it's safe to reset the branch to
// master.
const upstreamBranch = `remotes/origin/${this.branchName}`;
const commits = await this.git(repo).log([`HEAD..${upstreamBranch}`]);
const allShepherd = commits.all.every(({ message }) => this.isShepherdCommitMessage(message));
if (!allShepherd) {
// RIP.
return SafetyStatus.NonShepherdCommits;
}
return SafetyStatus.Success;
});
}
return SafetyStatus.Success;
}

@@ -308,0 +293,0 @@ }

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -22,3 +13,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

id: 'test-migration',
title: 'Test migration'
title: 'Test migration',
},

@@ -55,11 +46,9 @@ },

type: 'github',
search_query: 'topics:test'
search_query: 'topics:test',
};
const service = new github_1.default(context, mocktokit);
const adapter = new github_2.default(context, service);
return expect(adapter.getCandidateRepos())
.rejects
.toThrow('Cannot use both \"org\" and \"search_query\" in GitHub adapter. Pick one.');
return expect(adapter.getCandidateRepos()).rejects.toThrow('Cannot use both "org" and "search_query" in GitHub adapter. Pick one.');
});
it('performs org search if specified and returns expected result', () => __awaiter(void 0, void 0, void 0, function* () {
it('performs org search if specified and returns expected result', async () => {
const mocktokit = {};

@@ -69,3 +58,3 @@ const context = mockMigrationContext();

type: 'github',
org: 'testOrg'
org: 'testOrg',
};

@@ -75,7 +64,7 @@ const service = new github_1.default(context, mocktokit);

const adapter = new github_2.default(context, service);
const result = yield adapter.getCandidateRepos();
const result = await adapter.getCandidateRepos();
expect(service.getActiveReposForOrg).toBeCalledWith({ org: 'testOrg' });
expect(result).toStrictEqual([{ owner: 'testOrg', name: 'test-repo' }]);
}));
it(`performs repository search and returns expected result if 'respositories' is specified for search_type`, () => __awaiter(void 0, void 0, void 0, function* () {
});
it(`performs repository search and returns expected result if 'respositories' is specified for search_type`, async () => {
const mocktokit = {};

@@ -86,3 +75,3 @@ const context = mockMigrationContext();

search_type: 'repositories',
search_query: 'topics:test'
search_query: 'topics:test',
};

@@ -92,10 +81,10 @@ const service = new github_1.default(context, mocktokit);

const adapter = new github_2.default(context, service);
const result = yield adapter.getCandidateRepos();
const result = await adapter.getCandidateRepos();
expect(service.getActiveReposForSearchTypeAndQuery).toBeCalledWith({
search_type: 'repositories',
search_query: 'topics:test'
search_query: 'topics:test',
});
expect(result).toStrictEqual([{ owner: 'repoownername', name: 'test-repo' }]);
}));
it(`performs code search and returns expected result if search_type is 'code'`, () => __awaiter(void 0, void 0, void 0, function* () {
});
it(`performs code search and returns expected result if search_type is 'code'`, async () => {
const mocktokit = {};

@@ -106,3 +95,3 @@ const context = mockMigrationContext();

search_type: 'code',
search_query: 'path:/ filename:package.json in:path'
search_query: 'path:/ filename:package.json in:path',
};

@@ -112,11 +101,11 @@ const service = new github_1.default(context, mocktokit);

const adapter = new github_2.default(context, service);
const result = yield adapter.getCandidateRepos();
const result = await adapter.getCandidateRepos();
expect(service.getActiveReposForSearchTypeAndQuery).toBeCalledTimes(1);
expect(service.getActiveReposForSearchTypeAndQuery).toBeCalledWith({
search_type: 'code',
search_query: 'path:/ filename:package.json in:path'
search_query: 'path:/ filename:package.json in:path',
});
expect(result).toStrictEqual([{ owner: 'repoownername', name: 'test-repo' }]);
}));
it(`performs code search and returns expected result if search_type is not provided`, () => __awaiter(void 0, void 0, void 0, function* () {
});
it(`performs code search and returns expected result if search_type is not provided`, async () => {
const mocktokit = {};

@@ -126,3 +115,3 @@ const context = mockMigrationContext();

type: 'github',
search_query: 'path:/ filename:package.json in:path'
search_query: 'path:/ filename:package.json in:path',
};

@@ -132,10 +121,10 @@ const service = new github_1.default(context, mocktokit);

const adapter = new github_2.default(context, service);
const result = yield adapter.getCandidateRepos();
const result = await adapter.getCandidateRepos();
expect(service.getActiveReposForSearchTypeAndQuery).toBeCalledTimes(1);
expect(service.getActiveReposForSearchTypeAndQuery).toBeCalledWith({
search_type: 'code',
search_query: 'path:/ filename:package.json in:path'
search_query: 'path:/ filename:package.json in:path',
});
expect(result).toStrictEqual([{ owner: 'repoownername', name: 'test-repo' }]);
}));
});
});

@@ -157,3 +146,3 @@ describe('parseRepo', () => {

describe('mapRepoAfterCheckout', () => {
it('saves the default branch', () => __awaiter(void 0, void 0, void 0, function* () {
it('saves the default branch', async () => {
const context = mockMigrationContext();

@@ -168,7 +157,13 @@ const mocktokit = {};

const adapter = new github_2.default(context, service);
const mappedRepo = yield adapter.mapRepoAfterCheckout(repo);
const mappedRepo = await adapter.mapRepoAfterCheckout(repo);
expect(service.getDefaultBranchForRepo).toBeCalledTimes(1);
expect(service.getDefaultBranchForRepo).toBeCalledWith({ owner: repo.owner, repo: repo.name });
expect(mappedRepo).toEqual(Object.assign(Object.assign({}, repo), { defaultBranch: 'develop' }));
}));
expect(service.getDefaultBranchForRepo).toBeCalledWith({
owner: repo.owner,
repo: repo.name,
});
expect(mappedRepo).toEqual({
...repo,
defaultBranch: 'develop',
});
});
});

@@ -181,3 +176,3 @@ describe('prRepo', () => {

};
it('creates a new PR if one does not exist', () => __awaiter(void 0, void 0, void 0, function* () {
it('creates a new PR if one does not exist', async () => {
const context = mockMigrationContext();

@@ -188,3 +183,3 @@ const octokit = {};

const adapter = new github_2.default(context, service);
yield adapter.createPullRequest(REPO, 'Test PR message');
await adapter.createPullRequest(REPO, 'Test PR message', 'NerdWallet');
expect(service.listPullRequests).toBeCalledWith({

@@ -198,3 +193,3 @@ owner: 'NerdWallet',

repo: 'shepherd',
head: 'test-migration',
head: 'NerdWallet:test-migration',
base: 'master',

@@ -204,13 +199,15 @@ title: 'Test migration',

});
}));
it('updates a PR if one exists and is open', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('updates a PR if one exists and is open', async () => {
const context = mockMigrationContext();
const octokit = {};
const service = new github_1.default(context, octokit);
service.listPullRequests.mockResolvedValue([{
service.listPullRequests.mockResolvedValue([
{
number: 1234,
state: 'open',
}]);
},
]);
const adapter = new github_2.default(context, service);
yield adapter.createPullRequest(REPO, 'Test PR message, part 2');
await adapter.createPullRequest(REPO, 'Test PR message, part 2', 'NerdWallet');
expect(service.updatePullRequest).toBeCalledWith({

@@ -221,19 +218,21 @@ owner: 'NerdWallet',

body: 'Test PR message, part 2',
pull_number: 1234
pull_number: 1234,
});
}));
it('does not update a closed PR', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('does not update a closed PR', async () => {
const context = mockMigrationContext();
const octokit = {};
const service = new github_1.default(context, octokit);
service.listPullRequests.mockResolvedValue([{
service.listPullRequests.mockResolvedValue([
{
number: 1234,
state: 'closed',
}]);
},
]);
const adapter = new github_2.default(context, service);
yield expect(adapter.createPullRequest(REPO, 'Test PR message, part 2')).rejects.toThrow();
await expect(adapter.createPullRequest(REPO, 'Test PR message, part 2', 'NerdWallet')).rejects.toThrow();
expect(service.updatePullRequest).not.toBeCalled();
}));
});
});
});
//# sourceMappingURL=github.test.js.map
#!/usr/bin/env node
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,3 +7,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = __importDefault(require("commander"));
const commander_1 = require("commander");
const fs_extra_1 = __importDefault(require("fs-extra"));

@@ -37,4 +28,5 @@ const os_1 = require("os");

const logger_1 = __importDefault(require("./logger"));
const program = new commander_1.Command();
const { SHEPHERD_DOT_DIRECTORY } = process.env;
const shepherdDir = SHEPHERD_DOT_DIRECTORY || path_1.default.join(os_1.homedir(), '.shepherd');
const shepherdDir = SHEPHERD_DOT_DIRECTORY || path_1.default.join((0, os_1.homedir)(), '.shepherd');
const prefs = new preferences_1.default('com.nerdwallet.shepherd', {

@@ -48,7 +40,7 @@ workingDirectory: shepherdDir,

const logger = new logger_1.default();
const handleCommand = (handler) => (migration, options) => __awaiter(void 0, void 0, void 0, function* () {
const handleCommand = (handler) => async (migration, options) => {
try {
const spec = migration_spec_1.loadSpec(migration);
const spec = (0, migration_spec_1.loadSpec)(migration);
const migrationWorkingDirectory = path_1.default.join(prefs.workingDirectory, spec.id);
yield fs_extra_1.default.ensureDir(migrationWorkingDirectory);
await fs_extra_1.default.ensureDir(migrationWorkingDirectory);
// We can't use type-checking on this context just yet since we have to dynamically

@@ -67,3 +59,3 @@ // assign some properties

};
const adapter = adapters_1.adapterForName(spec.adapter.type, migrationContext);
const adapter = (0, adapters_1.adapterForName)(spec.adapter.type, migrationContext);
migrationContext.adapter = adapter;

@@ -73,4 +65,5 @@ const selectedRepos = options.repos && options.repos.map(adapter.parseRepo);

// The list of repos will be null if migration hasn't started yet
migrationContext.migration.repos = yield persisted_data_1.loadRepoList(migrationContext);
yield handler(migrationContext, options);
migrationContext.migration.repos = await (0, persisted_data_1.loadRepoList)(migrationContext);
migrationContext.migration.upstreamOwner = options.upstreamOwner;
await handler(migrationContext, options);
}

@@ -81,5 +74,5 @@ catch (e) {

}
});
};
const buildCommand = (name, description) => {
return commander_1.default.command(`${name} <migration>`).description(description);
return program.command(`${name} <migration>`).description(description);
};

@@ -89,2 +82,5 @@ const addReposOption = (command) => {

};
const addUpstreamOwnerOption = (command) => {
return command.option('--upstreamOwner <upstreamOwner>', 'Upstream Owner can be passed incase of trying to raise PR from fork to upstream');
};
const addCommand = (name, description, repos, handler) => {

@@ -94,2 +90,3 @@ const subprogram = buildCommand(name, description);

addReposOption(subprogram);
addUpstreamOwnerOption(subprogram);
}

@@ -101,3 +98,3 @@ subprogram.action(handleCommand(handler));

addReposOption(applyCommand);
applyCommand.option('--skip-reset-branch', 'Don\'t reset branch before applying', false);
applyCommand.option('--skip-reset-branch', "Don't reset branch before applying", false);
applyCommand.option('--force-reset-branch', 'Force a reset of the branch before applying', true);

@@ -117,10 +114,13 @@ applyCommand.option('--skip-reset-on-error', 'Keep changes in the working tree even if the migration fails', false);

addCommand('list', 'List all checked out repositories for the given migration', false, list_1.default);
commander_1.default.command('version').description('Print Shepherd version').action(() => __awaiter(void 0, void 0, void 0, function* () {
logger.info(yield version_1.default());
}));
commander_1.default.on('command:*', () => {
logger.error(`Error: no such command "${commander_1.default.args[0]}"`);
program
.command('version')
.description('Print Shepherd version')
.action(async () => {
logger.info(await (0, version_1.default)());
});
program.on('command:*', () => {
logger.error(`Error: no such command "${program.args[0]}"`);
process.exit(1);
});
commander_1.default.parse(process.argv);
program.parse(process.argv);
//# sourceMappingURL=cli.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,8 +8,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const for_each_repo_1 = __importDefault(require("../util/for-each-repo"));
exports.default = (context, options) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context, options) => {
const { adapter, logger } = context;
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const resetSpinner = logger.spinner('Removing uncommitted changes');
try {
yield adapter.resetChangedFiles(repo);
await adapter.resetChangedFiles(repo);
resetSpinner.succeed('Successfully reset repo');

@@ -37,3 +28,3 @@ }

try {
yield adapter.resetRepoBeforeApply(repo, options.forceResetBranch);
await adapter.resetRepoBeforeApply(repo, options.forceResetBranch);
resetBranchSpinner.succeed('Successfully reset branch');

@@ -48,3 +39,3 @@ }

logger.infoIcon('Running apply steps');
const stepsResults = yield execute_steps_1.default(context, repo, 'apply');
const stepsResults = await (0, execute_steps_1.default)(context, repo, 'apply');
if (stepsResults.succeeded) {

@@ -61,3 +52,3 @@ logger.succeedIcon('Completed all apply steps successfully');

try {
yield adapter.resetChangedFiles(repo);
await adapter.resetChangedFiles(repo);
spinner.succeed('Successfully reset repo');

@@ -70,4 +61,4 @@ }

}
}));
});
});
};
//# sourceMappingURL=apply.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -19,7 +10,7 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const persisted_data_1 = require("../util/persisted-data");
const removeRepoDirectories = (adapter, repo) => __awaiter(void 0, void 0, void 0, function* () {
yield fs_extra_1.default.remove(adapter.getRepoDir(repo));
yield fs_extra_1.default.remove(adapter.getDataDir(repo));
});
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
const removeRepoDirectories = async (adapter, repo) => {
await fs_extra_1.default.remove(adapter.getRepoDir(repo));
await fs_extra_1.default.remove(adapter.getDataDir(repo));
};
exports.default = async (context) => {
const { migration: { selectedRepos }, adapter, logger, } = context;

@@ -36,3 +27,3 @@ function onRetry(numSeconds) {

const spinner = logger.spinner('Loading candidate repos');
repos = yield adapter.getCandidateRepos(onRetry);
repos = await adapter.getCandidateRepos(onRetry);
spinner.succeed(`Loaded ${repos.length} repos`);

@@ -44,6 +35,6 @@ }

const options = { warnMissingDirectory: false };
yield for_each_repo_1.default(context, options, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, options, async (repo) => {
const spinner = logger.spinner('Checking out repo');
try {
yield adapter.checkoutRepo(repo);
await adapter.checkoutRepo(repo);
spinner.succeed('Checked out repo');

@@ -57,8 +48,8 @@ }

// We need to create the data directory before running should_migrate
yield fs_extra_1.default.mkdirs(adapter.getDataDir(repo));
await fs_extra_1.default.mkdirs(adapter.getDataDir(repo));
logger.info('> Running should_migrate steps');
const stepsResults = yield execute_steps_1.default(context, repo, 'should_migrate');
const stepsResults = await (0, execute_steps_1.default)(context, repo, 'should_migrate');
if (!stepsResults.succeeded) {
discardedRepos.push(repo);
yield removeRepoDirectories(adapter, repo);
await removeRepoDirectories(adapter, repo);
logger.failIcon('Error running should_migrate steps; skipping');

@@ -69,6 +60,6 @@ }

logger.info('> Running post_checkout steps');
const postCheckoutStepsResults = yield execute_steps_1.default(context, repo, 'post_checkout');
const postCheckoutStepsResults = await (0, execute_steps_1.default)(context, repo, 'post_checkout');
if (!postCheckoutStepsResults.succeeded) {
discardedRepos.push(repo);
yield removeRepoDirectories(adapter, repo);
await removeRepoDirectories(adapter, repo);
logger.failIcon('Error running post_checkout steps; skipping');

@@ -81,3 +72,3 @@ }

}
}));
});
logger.info('');

@@ -87,7 +78,7 @@ logger.info(`Checked out ${checkedOutRepos.length} out of ${repos.length} repos`);

for (const repo of checkedOutRepos) {
mappedCheckedOutRepos.push(yield adapter.mapRepoAfterCheckout(repo));
mappedCheckedOutRepos.push(await adapter.mapRepoAfterCheckout(repo));
}
// We'll persist this list of repos for use in future steps
yield persisted_data_1.updateRepoList(context, mappedCheckedOutRepos, discardedRepos);
});
await (0, persisted_data_1.updateRepoList)(context, mappedCheckedOutRepos, discardedRepos);
};
//# sourceMappingURL=checkout.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,8 +7,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const for_each_repo_1 = __importDefault(require("../util/for-each-repo"));
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
const { adapter, logger, } = context;
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context) => {
const { adapter, logger } = context;
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Committing changes');
try {
yield adapter.commitRepo(repo);
await adapter.commitRepo(repo);
spinner.succeed('Changes committed');

@@ -29,4 +20,4 @@ }

}
}));
});
});
};
//# sourceMappingURL=commit.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context) => {
const { migration: { repos }, logger, adapter, } = context;
for (const repo of (repos || [])) {
for (const repo of repos || []) {
logger.info(adapter.stringifyRepo(repo));
}
});
};
//# sourceMappingURL=list.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -18,3 +9,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const generate_pr_message_1 = require("../util/generate-pr-message");
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context) => {
const { migration: { spec }, logger, } = context;

@@ -25,5 +16,5 @@ if (!spec.hooks.pr_message || spec.hooks.pr_message.length === 0) {

}
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Generating PR message');
const stepResults = yield execute_steps_1.default(context, repo, 'pr_message', false);
const stepResults = await (0, execute_steps_1.default)(context, repo, 'pr_message', false);
if (!stepResults.succeeded) {

@@ -34,3 +25,3 @@ spinner.fail('Failed to generate PR message');

spinner.succeed('Generated PR message');
const message = generate_pr_message_1.generatePrMessageWithFooter(stepResults);
const message = (0, generate_pr_message_1.generatePrMessageWithFooter)(stepResults);
logger.info('=========');

@@ -45,4 +36,4 @@ if (message) {

}
}));
});
});
};
//# sourceMappingURL=pr-preview.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,8 +7,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const for_each_repo_1 = __importDefault(require("../util/for-each-repo"));
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context) => {
const { logger, adapter } = context;
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Determining repo PR status');
try {
const status = yield adapter.getPullRequestStatus(repo);
const status = await adapter.getPullRequestStatus(repo);
spinner.destroy();

@@ -30,4 +21,4 @@ status.forEach((s) => logger.info(s));

}
}));
});
});
};
//# sourceMappingURL=pr-status.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -18,4 +9,4 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const generate_pr_message_1 = require("../util/generate-pr-message");
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
const { migration: { spec }, logger, } = context;
exports.default = async (context) => {
const { migration: { spec, upstreamOwner }, logger, } = context;
if (!spec.hooks.pr_message || spec.hooks.pr_message.length === 0) {

@@ -25,5 +16,5 @@ logger.error('No pr_message hook specified in the migration spec');

}
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Generating PR message');
const stepResults = yield execute_steps_1.default(context, repo, 'pr_message', false);
const stepResults = await (0, execute_steps_1.default)(context, repo, 'pr_message', false);
if (!stepResults.succeeded) {

@@ -33,3 +24,3 @@ spinner.fail('Failed to generate PR message');

}
const message = generate_pr_message_1.generatePrMessageWithFooter(stepResults);
const message = (0, generate_pr_message_1.generatePrMessageWithFooter)(stepResults);
if (!message) {

@@ -42,3 +33,3 @@ spinner.warn('Generated PR message was empty');

try {
yield context.adapter.createPullRequest(repo, message);
await context.adapter.createPullRequest(repo, message, upstreamOwner);
prSpinner.succeed('Pull request created');

@@ -50,4 +41,4 @@ }

}
}));
});
});
};
//# sourceMappingURL=pr.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,8 +7,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const for_each_repo_1 = __importDefault(require("../util/for-each-repo"));
exports.default = (context, options) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context, options) => {
const { adapter, logger } = context;
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Pushing changes');
try {
yield adapter.pushRepo(repo, options.force);
await adapter.pushRepo(repo, options.force);
spinner.succeed('Changes pushed');

@@ -29,4 +20,4 @@ }

}
}));
});
});
};
//# sourceMappingURL=push.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,8 +7,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const for_each_repo_1 = __importDefault(require("../util/for-each-repo"));
exports.default = (context) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context) => {
const { adapter, logger } = context;
yield for_each_repo_1.default(context, (repo) => __awaiter(void 0, void 0, void 0, function* () {
await (0, for_each_repo_1.default)(context, async (repo) => {
const spinner = logger.spinner('Resetting changes');
try {
yield adapter.resetChangedFiles(repo);
await adapter.resetChangedFiles(repo);
spinner.succeed('Reset changes');

@@ -29,4 +20,4 @@ }

}
}));
});
});
};
//# sourceMappingURL=reset.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,7 +8,7 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const path_1 = __importDefault(require("path"));
exports.default = () => __awaiter(void 0, void 0, void 0, function* () {
const pack = yield fs_extra_1.default.readFile(path_1.default.resolve(__dirname, '../../package.json'), 'utf8');
exports.default = async () => {
const pack = await fs_extra_1.default.readFile(path_1.default.resolve(__dirname, '../../package.json'), 'utf8');
const { version } = JSON.parse(pack);
return version;
});
};
//# sourceMappingURL=version.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -19,8 +10,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

describe('reposEqual', () => {
it('recognizes two repos as equal', () => __awaiter(void 0, void 0, void 0, function* () {
const pack = yield fs_extra_1.default.readFile(path_1.default.resolve(__dirname, '../../package.json'), 'utf8');
it('recognizes two repos as equal', async () => {
const pack = await fs_extra_1.default.readFile(path_1.default.resolve(__dirname, '../../package.json'), 'utf8');
const { version: vers } = JSON.parse(pack);
expect(yield version_1.default()).toBe(vers);
}));
expect(await (0, version_1.default)()).toBe(vers);
});
});
//# sourceMappingURL=version.spec.js.map

@@ -45,3 +45,3 @@ "use strict";

}
this.oraInstance = ora_1.default(message).start();
this.oraInstance = (0, ora_1.default)(message).start();
this.spinnerActive = true;

@@ -104,4 +104,4 @@ this.oraInstance.start();

}
output === null || output === void 0 ? void 0 : output.write(color(util_1.format(message) + '\n'));
errOutput === null || errOutput === void 0 ? void 0 : errOutput.write(color(util_1.format(message) + '\n'));
output === null || output === void 0 ? void 0 : output.write(color((0, util_1.format)(message) + '\n'));
errOutput === null || errOutput === void 0 ? void 0 : errOutput.write(color((0, util_1.format)(message) + '\n'));
if (this.spinnerActive) {

@@ -108,0 +108,0 @@ // Resume the spinner!

@@ -12,2 +12,3 @@ import IRepoAdapter, { IRepo } from './adapters/base';

repos: IRepo[] | null;
upstreamOwner: string;
selectedRepos?: IRepo[];

@@ -14,0 +15,0 @@ }

@@ -24,4 +24,4 @@ import { Octokit } from '@octokit/rest';

getBranch(criteria: RestEndpointMethodTypes['repos']['getBranch']['parameters']): Promise<RestEndpointMethodTypes['repos']['getBranch']['response']>;
getActiveReposForSearchTypeAndQuery({ search_type, search_query }: SearchTypeAndQueryParams): Promise<string[]>;
getActiveReposForSearchTypeAndQuery({ search_type, search_query, }: SearchTypeAndQueryParams): Promise<string[]>;
}
export {};
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -21,2 +12,4 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const netrc_1 = __importDefault(require("netrc"));
const { SHEPHERD_GITHUB_ENTERPRISE_URL } = process.env;
const shepherdGitHubEnterpriseUrl = SHEPHERD_GITHUB_ENTERPRISE_URL || 'api.github.com';
const RetryableThrottledOctokit = rest_1.Octokit.plugin(plugin_throttling_1.throttling, plugin_retry_1.retry);

@@ -29,3 +22,3 @@ class GithubService {

else {
const netrcAuth = netrc_1.default();
const netrcAuth = (0, netrc_1.default)();
const token = process.env.GITHUB_TOKEN || lodash_1.default.get(netrcAuth['api.github.com'], 'password', undefined);

@@ -38,3 +31,5 @@ if (!token) {

auth: token,
baseUrl: `https://${shepherdGitHubEnterpriseUrl}/api/v3`,
throttle: {
enabled: false,
onRateLimit: (retryAfter, options) => {

@@ -45,7 +40,2 @@ context.logger.warn(`Hit rate limit for ${options.method} ${options.url}`);

},
onAbuseLimit: (retryAfter, options) => {
context.logger.warn(`Hit abuse limit for ${options.method} ${options.url}`);
context.logger.warn(`Retrying in ${retryAfter} second(s)`);
return options.request.retryCount < 5;
},
},

@@ -58,12 +48,8 @@ });

}
findReposByMetadata(criteria) {
return __awaiter(this, void 0, void 0, function* () {
const searchResults = yield this.paginateRest(this.octokit.search.repos, criteria);
return searchResults.map((r) => lodash_1.default.get(r, 'full_name')).sort();
});
async findReposByMetadata(criteria) {
const searchResults = await this.paginateRest(this.octokit.search.repos, criteria);
return searchResults.map((r) => lodash_1.default.get(r, 'full_name')).sort();
}
findReposByCode(criteria) {
return __awaiter(this, void 0, void 0, function* () {
return this.paginateRest(this.octokit.search.code, criteria);
});
async findReposByCode(criteria) {
return this.paginateRest(this.octokit.search.code, criteria);
}

@@ -73,19 +59,15 @@ getRepo(criteria) {

}
listOrgRepos({ org }) {
listOrgRepos({ org, }) {
return this.paginateRest(this.octokit.repos.listForOrg, { org });
}
getDefaultBranchForRepo(criteria) {
return __awaiter(this, void 0, void 0, function* () {
const { data } = yield this.getRepo(criteria);
return data.default_branch;
});
async getDefaultBranchForRepo(criteria) {
const { data } = await this.getRepo(criteria);
return data.default_branch;
}
getActiveReposForOrg(criteria) {
return __awaiter(this, void 0, void 0, function* () {
const allOrgRepos = yield this.listOrgRepos({ org: criteria.org });
return allOrgRepos
.filter((r) => !r.archived)
.map((r) => r.full_name)
.sort();
});
async getActiveReposForOrg(criteria) {
const allOrgRepos = await this.listOrgRepos({ org: criteria.org });
return allOrgRepos
.filter((r) => !r.archived)
.map((r) => r.full_name)
.sort();
}

@@ -110,22 +92,20 @@ getPullRequest(criteria) {

}
getActiveReposForSearchTypeAndQuery({ search_type, search_query }) {
return __awaiter(this, void 0, void 0, function* () {
switch (search_type) {
case 'repositories': {
return this.findReposByMetadata({ q: search_query });
}
case 'code': {
const repos = yield this.findReposByCode({ q: search_query });
const archived = yield Promise.all(repos.map((r) => __awaiter(this, void 0, void 0, function* () {
const { owner: { login: owner }, name } = r.repository;
const { data } = yield this.getRepo({ owner, repo: name });
return data.archived;
})));
return repos.filter((_r, i) => !archived[i]).map((r) => r.repository.full_name);
}
default: {
throw new Error(`Invalid search_type: ${search_type}`);
}
async getActiveReposForSearchTypeAndQuery({ search_type, search_query, }) {
switch (search_type) {
case 'repositories': {
return this.findReposByMetadata({ q: search_query });
}
});
case 'code': {
const repos = await this.findReposByCode({ q: search_query });
const archived = await Promise.all(repos.map(async (r) => {
const { owner: { login: owner }, name, } = r.repository;
const { data } = await this.getRepo({ owner, repo: name });
return data.archived;
}));
return repos.filter((_r, i) => !archived[i]).map((r) => r.repository.full_name);
}
default: {
throw new Error(`Invalid search_type: ${search_type}`);
}
}
}

@@ -132,0 +112,0 @@ }

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -19,3 +10,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

describe('getDefaultBranchForRepo', () => {
it('calls repos.get with provided criteria & returns default branch', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls repos.get with provided criteria & returns default branch', async () => {
const mocktokit = {

@@ -35,22 +26,22 @@ repos: {

};
const result = yield service.getDefaultBranchForRepo(searchCriteria);
const result = await service.getDefaultBranchForRepo(searchCriteria);
expect(mocktokit.repos.get).toBeCalledWith(searchCriteria);
expect(result).toEqual('master');
}));
});
});
describe('getActiveReposForOrg', () => {
it('calls octokit.paginate with criteria & returns sorted list of active repos', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls octokit.paginate with criteria & returns sorted list of active repos', async () => {
const orgRepos = [
{
archived: true,
full_name: 'testOrg/archived-repo'
full_name: 'testOrg/archived-repo',
},
{
archived: false,
full_name: 'testOrg/very-active-repo'
full_name: 'testOrg/very-active-repo',
},
{
archived: false,
full_name: 'testOrg/active-repo'
}
full_name: 'testOrg/active-repo',
},
];

@@ -60,3 +51,5 @@ const mocktokit = {

repos: {
listForOrg: () => { return null; },
listForOrg: () => {
return null;
},
get: jest.fn().mockResolvedValue({

@@ -71,9 +64,9 @@ data: {

const searchCriteria = { org: 'testOrg' };
const result = yield service.getActiveReposForOrg(searchCriteria);
const result = await service.getActiveReposForOrg(searchCriteria);
expect(mocktokit.paginate).toBeCalledWith(mocktokit.repos.listForOrg, searchCriteria);
expect(result).toEqual(['testOrg/active-repo', 'testOrg/very-active-repo']);
}));
});
});
describe('getPullRequest', () => {
it('calls pulls.get with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls pulls.get with provided criteria & returns results', async () => {
const samplePRResponse = {

@@ -85,4 +78,4 @@ data: {

mergable: true,
mergable_state: 'clean'
}
mergable_state: 'clean',
},
};

@@ -100,9 +93,9 @@ const mocktokit = {

};
const result = yield service.getPullRequest(searchCriteria);
const result = await service.getPullRequest(searchCriteria);
expect(mocktokit.pulls.get).toBeCalledWith(searchCriteria);
expect(result).toEqual(samplePRResponse);
}));
});
});
describe('listPullRequests', () => {
it('calls octokit.paginate with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls octokit.paginate with provided criteria & returns results', async () => {
const samplePRsResponse = [

@@ -113,4 +106,4 @@ {

{
number: 2
}
number: 2,
},
];

@@ -130,13 +123,13 @@ const mocktokit = {

};
const result = yield service.listPullRequests(searchCriteria);
const result = await service.listPullRequests(searchCriteria);
expect(mocktokit.paginate).toBeCalledWith(mocktokit.pulls.list, searchCriteria);
expect(result).toEqual(samplePRsResponse);
}));
});
});
describe('createPullRequest', () => {
it('calls pulls.create with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls pulls.create with provided criteria & returns results', async () => {
const prCreateResponse = {
url: 'https://api.github.com/repos/testOrg/test-repo/pulls/1',
id: 1,
html_url: 'https://github.com/testOrg/test-repo/pull/1'
html_url: 'https://github.com/testOrg/test-repo/pull/1',
};

@@ -157,13 +150,13 @@ const mocktokit = {

};
const result = yield service.createPullRequest(prCreateParams);
const result = await service.createPullRequest(prCreateParams);
expect(mocktokit.pulls.create).toBeCalledWith(prCreateParams);
expect(result).toEqual(prCreateResponse);
}));
});
});
describe('updatePullRequest', () => {
it('calls pulls.update with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls pulls.update with provided criteria & returns results', async () => {
const prUpdateResponse = {
url: 'https://api.github.com/repos/testOrg/test-repo/pulls/1',
id: 1,
html_url: 'https://github.com/testOrg/test-repo/pull/1'
html_url: 'https://github.com/testOrg/test-repo/pull/1',
};

@@ -183,9 +176,9 @@ const mocktokit = {

};
const result = yield service.updatePullRequest(prUpdateParams);
const result = await service.updatePullRequest(prUpdateParams);
expect(mocktokit.pulls.update).toBeCalledWith(prUpdateParams);
expect(result).toEqual(prUpdateResponse);
}));
});
});
describe('getCombinedRefStatus', () => {
it('calls repos.getCombinedStatusForRef with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls repos.getCombinedStatusForRef with provided criteria & returns results', async () => {
const combinedRefStatusResponse = {

@@ -195,10 +188,10 @@ data: {

statuses: {
state: 'pending'
}
}
state: 'pending',
},
},
};
const mocktokit = {
repos: {
getCombinedStatusForRef: jest.fn().mockResolvedValue(combinedRefStatusResponse)
}
getCombinedStatusForRef: jest.fn().mockResolvedValue(combinedRefStatusResponse),
},
};

@@ -211,19 +204,19 @@ const service = new github_1.default(mockMigrationContext(), mocktokit);

};
const result = yield service.getCombinedRefStatus(criteria);
const result = await service.getCombinedRefStatus(criteria);
expect(mocktokit.repos.getCombinedStatusForRef).toBeCalledWith(criteria);
expect(result).toEqual(combinedRefStatusResponse);
}));
});
});
describe('getBranch', () => {
it('calls repos.getBranch with provided criteria & returns results', () => __awaiter(void 0, void 0, void 0, function* () {
it('calls repos.getBranch with provided criteria & returns results', async () => {
const branchResponse = {
name: 'mass-update',
commit: {
url: 'https://github.com/testOrg/test-repo/tree/mass-update'
}
url: 'https://github.com/testOrg/test-repo/tree/mass-update',
},
};
const mocktokit = {
repos: {
getBranch: jest.fn().mockResolvedValue(branchResponse)
}
getBranch: jest.fn().mockResolvedValue(branchResponse),
},
};

@@ -236,9 +229,9 @@ const service = new github_1.default(mockMigrationContext(), mocktokit);

};
const result = yield service.getBranch(criteria);
const result = await service.getBranch(criteria);
expect(mocktokit.repos.getBranch).toBeCalledWith(criteria);
expect(result).toEqual(branchResponse);
}));
});
});
describe('getActiveReposForSearchTypeAndQuery', () => {
it('validates search_type is valid & throws if not', () => __awaiter(void 0, void 0, void 0, function* () {
it('validates search_type is valid & throws if not', async () => {
const mocktokit = {};

@@ -248,8 +241,8 @@ const service = new github_1.default(mockMigrationContext(), mocktokit);

search_type: 'invalid_search_type',
search_query: 'any'
search_query: 'any',
};
// @ts-expect-error -- Testing invalid `search_type`
yield expect(service.getActiveReposForSearchTypeAndQuery(criteria)).rejects.toThrow('Invalid search_type: invalid_search_type');
}));
it('finds repos by metadata if repository search is specified', () => __awaiter(void 0, void 0, void 0, function* () {
await expect(service.getActiveReposForSearchTypeAndQuery(criteria)).rejects.toThrow('Invalid search_type: invalid_search_type');
});
it('finds repos by metadata if repository search is specified', async () => {
const repoSearchResponse = [

@@ -269,3 +262,3 @@ {

},
}
},
];

@@ -275,15 +268,15 @@ const mocktokit = {

search: {
repos: jest.fn()
}
repos: jest.fn(),
},
};
const service = new github_1.default(mockMigrationContext(), mocktokit);
const SEARCH_QUERY = 'topics:test';
const result = yield service.getActiveReposForSearchTypeAndQuery({
const result = await service.getActiveReposForSearchTypeAndQuery({
search_type: 'repositories',
search_query: SEARCH_QUERY
search_query: SEARCH_QUERY,
});
expect(mocktokit.paginate).toBeCalledWith(mocktokit.search.repos, { q: SEARCH_QUERY });
expect(result).toEqual(repoSearchResponse.map((o) => o.full_name));
}));
it('finds repos by code if code search specified', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('finds repos by code if code search specified', async () => {
const codeSearchResponse = [

@@ -309,3 +302,3 @@ {

},
}
},
];

@@ -322,8 +315,8 @@ const mocktokit = {

search: {
code: jest.fn()
}
code: jest.fn(),
},
};
const service = new github_1.default(mockMigrationContext(), mocktokit);
const SEARCH_QUERY = 'org:testOrg path:/ filename:package.json in:path';
const result = yield service.getActiveReposForSearchTypeAndQuery({
const result = await service.getActiveReposForSearchTypeAndQuery({
search_type: 'code',

@@ -334,4 +327,4 @@ search_query: SEARCH_QUERY,

expect(result).toEqual(['testOrg/repo1', 'testOrg/repo2']);
}));
it('filters out archived repos when using code search', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('filters out archived repos when using code search', async () => {
const codeSearchResponse = [

@@ -357,3 +350,3 @@ {

},
}
},
];

@@ -379,8 +372,8 @@ const mocktokit = {

search: {
code: jest.fn()
}
code: jest.fn(),
},
};
const service = new github_1.default(mockMigrationContext(), mocktokit);
const SEARCH_QUERY = 'org:testOrg path:/ filename:package.json in:path';
const result = yield service.getActiveReposForSearchTypeAndQuery({
const result = await service.getActiveReposForSearchTypeAndQuery({
search_type: 'code',

@@ -391,5 +384,5 @@ search_query: SEARCH_QUERY,

expect(result).toEqual(['testOrg/repo1']);
}));
});
});
});
//# sourceMappingURL=github.test.js.map

@@ -0,1 +1,2 @@

/// <reference path="../../src/@types/child-process-promise/index.d.ts" />
/// <reference types="node" />

@@ -2,0 +3,0 @@ import { ChildProcessPromise } from 'child-process-promise';

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_promise_1 = require("child-process-promise");
exports.default = (context, repo, command) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context, repo, command) => {
const repoDir = context.adapter.getRepoDir(repo);

@@ -18,10 +9,17 @@ const dataDir = context.adapter.getDataDir(repo);

const migrationDir = context.migration.migrationDirectory;
const adapterEnvironmentVars = yield context.adapter.getEnvironmentVariables(repo);
const adapterEnvironmentVars = await context.adapter.getEnvironmentVariables(repo);
const execOptions = {
cwd: repoDir,
env: Object.assign(Object.assign(Object.assign({}, process.env), { SHEPHERD_REPO_DIR: repoDir, SHEPHERD_DATA_DIR: dataDir, SHEPHERD_MIGRATION_DIR: migrationDir, SHEPHERD_BASE_BRANCH: baseBranch }), adapterEnvironmentVars),
env: {
...process.env,
SHEPHERD_REPO_DIR: repoDir,
SHEPHERD_DATA_DIR: dataDir,
SHEPHERD_MIGRATION_DIR: migrationDir,
SHEPHERD_BASE_BRANCH: baseBranch,
...adapterEnvironmentVars,
},
shell: true,
capture: ['stdout', 'stderr'],
};
const promise = child_process_promise_1.spawn(command, [], execOptions);
const promise = (0, child_process_promise_1.spawn)(command, [], execOptions);
return {

@@ -31,3 +29,3 @@ promise,

};
});
};
//# sourceMappingURL=exec-in-repo.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,5 +8,5 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const exec_in_repo_1 = __importDefault(require("../util/exec-in-repo"));
exports.default = (context, repo, phase, showOutput = true) => __awaiter(void 0, void 0, void 0, function* () {
exports.default = async (context, repo, phase, showOutput = true) => {
var _a, _b;
const { migration: { spec: { hooks, }, }, logger, } = context;
const { migration: { spec: { hooks }, }, logger, } = context;
const results = {

@@ -29,3 +20,3 @@ succeeded: false,

try {
const { promise, childProcess } = yield exec_in_repo_1.default(context, repo, step);
const { promise, childProcess } = await (0, exec_in_repo_1.default)(context, repo, step);
if (showOutput) {

@@ -35,3 +26,3 @@ (_a = childProcess.stdout) === null || _a === void 0 ? void 0 : _a.on('data', (out) => logger.info(out.toString().trim()));

(_b = childProcess.stderr) === null || _b === void 0 ? void 0 : _b.on('data', (out) => logger.info(out.toString().trim()));
const childProcessResult = yield promise;
const childProcessResult = await promise;
logger.info(chalk_1.default.green(`Step "${step}" exited with 0`));

@@ -65,3 +56,3 @@ results.stepResults.push({

return results;
});
};
//# sourceMappingURL=execute-steps.js.map
import { IRepo } from '../adapters/base';
import { IMigrationContext } from '../migration-context';
declare type RepoHandler = (repo: IRepo) => Promise<void>;
type RepoHandler = (repo: IRepo) => Promise<void>;
interface IOptions {
warnMissingDirectory?: boolean;
}
declare const _default: (context: IMigrationContext, param1: (RepoHandler | IOptions), param2?: RepoHandler | undefined) => Promise<void>;
declare const _default: (context: IMigrationContext, param1: RepoHandler | IOptions, param2?: RepoHandler) => Promise<void>;
export default _default;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -17,4 +8,4 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const fs_extra_1 = __importDefault(require("fs-extra"));
exports.default = (context, param1, param2) => __awaiter(void 0, void 0, void 0, function* () {
const { migration: { repos: migrationRepos, selectedRepos, }, logger, adapter, } = context;
exports.default = async (context, param1, param2) => {
const { migration: { repos: migrationRepos, selectedRepos }, logger, adapter, } = context;
let handler;

@@ -55,8 +46,8 @@ let options;

const repoDir = adapter.getRepoDir(repo);
if (warnMissingDirectory && !(yield fs_extra_1.default.pathExists(repoDir))) {
if (warnMissingDirectory && !(await fs_extra_1.default.pathExists(repoDir))) {
logger.error(`Directory ${repoDir} does not exist`);
}
yield handler(repo);
await handler(repo);
}
});
};
//# sourceMappingURL=for-each-repo.js.map

@@ -5,12 +5,18 @@ "use strict";

const generate = (results) => {
return results.stepResults.map((r) => r.stdout).filter((r) => r).join('').trim();
return results.stepResults
.map((r) => r.stdout)
.filter((r) => r)
.join('')
.trim();
};
exports.default = generate;
exports.generatePrMessageWithFooter = (results) => {
const generatePrMessageWithFooter = (results) => {
let msg = generate(results);
// We'll add a friendly footer too
msg += '\n\n---\n\n';
msg += '*This change was executed automatically with [Shepherd](https://github.com/NerdWalletOSS/shepherd).* 💚🤖';
msg +=
'*This change was executed automatically with [Shepherd](https://github.com/NerdWalletOSS/shepherd).* 💚🤖';
return msg;
};
exports.generatePrMessageWithFooter = generatePrMessageWithFooter;
//# sourceMappingURL=generate-pr-message.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -16,41 +7,37 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const generate_pr_message_1 = __importDefault(require("./generate-pr-message"));
const specs = [{
const specs = [
{
name: 'handles empty results',
stepOutput: [],
expected: '',
}, {
},
{
name: 'handles a single step',
stepOutput: ['hello, world!'],
expected: 'hello, world!',
}, {
},
{
name: 'strips trailing newlines from single step',
stepOutput: ['hello, world!\n\n'],
expected: 'hello, world!',
}, {
},
{
name: 'handles multiple steps',
stepOutput: [
'hello, world!\n',
'goodbye, world.',
],
stepOutput: ['hello, world!\n', 'goodbye, world.'],
expected: 'hello, world!\ngoodbye, world.',
}, {
},
{
name: 'maintains newlines between steps',
stepOutput: [
'hello, world!\n\n\n',
'goodbye, world.',
],
stepOutput: ['hello, world!\n\n\n', 'goodbye, world.'],
expected: 'hello, world!\n\n\ngoodbye, world.',
}, {
},
{
name: 'excludes empty steps',
stepOutput: [
'hello, world!\n',
'',
undefined,
'goodbye, world.',
],
stepOutput: ['hello, world!\n', '', undefined, 'goodbye, world.'],
expected: 'hello, world!\ngoodbye, world.',
}];
},
];
describe('generate-pr-message', () => {
specs.forEach((spec) => {
it(spec.name, () => __awaiter(void 0, void 0, void 0, function* () {
it(spec.name, async () => {
const results = {

@@ -64,6 +51,6 @@ succeeded: true,

};
expect(generate_pr_message_1.default(results)).toEqual(spec.expected);
}));
expect((0, generate_pr_message_1.default)(results)).toEqual(spec.expected);
});
});
});
//# sourceMappingURL=generate-pr-message.test.js.map

@@ -9,3 +9,3 @@ import Joi from 'joi';

}
export declare type MigrationPhase = [keyof IMigrationHooks];
export type MigrationPhase = [keyof IMigrationHooks];
export interface IMigrationSpec {

@@ -22,2 +22,2 @@ id: string;

export declare function normalizeSpec(originalSpec: any): IMigrationSpec;
export declare function validateSpec(spec: any): Joi.ValidationResult;
export declare function validateSpec(spec: any): Joi.ValidationResult<any>;
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -9,3 +32,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const fs_1 = __importDefault(require("fs"));
const js_yaml_1 = __importDefault(require("js-yaml"));
const yaml = __importStar(require("js-yaml"));
const lodash_1 = require("lodash");

@@ -15,3 +38,3 @@ const path_1 = __importDefault(require("path"));

const docPath = path_1.default.join(directory, 'shepherd.yml');
const spec = js_yaml_1.default.safeLoad(fs_1.default.readFileSync(docPath, 'utf8'));
const spec = yaml.load(fs_1.default.readFileSync(docPath, 'utf8'));
const normalizedSpec = normalizeSpec(spec);

@@ -26,5 +49,5 @@ const validationResult = validateSpec(normalizedSpec);

function normalizeSpec(originalSpec) {
const spec = lodash_1.cloneDeep(originalSpec);
const spec = (0, lodash_1.cloneDeep)(originalSpec);
if (spec.hooks) {
spec.hooks = lodash_1.mapValues(spec.hooks, (steps, phase) => {
spec.hooks = (0, lodash_1.mapValues)(spec.hooks, (steps, phase) => {
if (typeof steps === 'string') {

@@ -54,3 +77,5 @@ return [steps];

type: joi_1.default.string().valid('github').required(),
}).unknown(true).required(),
})
.unknown(true)
.required(),
hooks: joi_1.default.object({

@@ -57,0 +82,0 @@ should_migrate: hookSchema,

@@ -15,9 +15,6 @@ "use strict";

hooks: {
apply: [
'echo hi',
'echo bye',
],
apply: ['echo hi', 'echo bye'],
},
};
expect(migration_spec_1.normalizeSpec(spec)).toEqual(spec);
expect((0, migration_spec_1.normalizeSpec)(spec)).toEqual(spec);
});

@@ -35,4 +32,4 @@ it('creates a deep copy of the spec and does not modify the original', () => {

};
const originalSpec = lodash_1.cloneDeep(spec);
expect(migration_spec_1.normalizeSpec(spec)).not.toBe(spec);
const originalSpec = (0, lodash_1.cloneDeep)(spec);
expect((0, migration_spec_1.normalizeSpec)(spec)).not.toBe(spec);
expect(spec).toEqual(originalSpec);

@@ -54,3 +51,3 @@ });

};
expect(migration_spec_1.normalizeSpec(spec)).toEqual({
expect((0, migration_spec_1.normalizeSpec)(spec)).toEqual({
name: 'testspec',

@@ -79,25 +76,22 @@ adapter: {

hooks: {
apply: [
'echo hi',
'echo bye',
],
apply: ['echo hi', 'echo bye'],
},
};
it('accepts a valid spec', () => {
const spec = lodash_1.cloneDeep(baseSpec);
expect(migration_spec_1.validateSpec(spec).error).toBe(undefined);
const spec = (0, lodash_1.cloneDeep)(baseSpec);
expect((0, migration_spec_1.validateSpec)(spec).error).toBe(undefined);
});
['id', 'title', 'adapter'].forEach((prop) => {
it(`rejects a spec with a missing ${prop}`, () => {
const spec = lodash_1.cloneDeep(baseSpec);
const spec = (0, lodash_1.cloneDeep)(baseSpec);
delete spec[prop];
expect(migration_spec_1.validateSpec(spec).error).not.toBe(undefined);
expect((0, migration_spec_1.validateSpec)(spec).error).not.toBe(undefined);
});
});
it('rejects a spec with a missing adapter type', () => {
const spec = lodash_1.cloneDeep(baseSpec);
const spec = (0, lodash_1.cloneDeep)(baseSpec);
delete spec.adapter.type;
expect(migration_spec_1.validateSpec(spec).error).not.toBe(undefined);
expect((0, migration_spec_1.validateSpec)(spec).error).not.toBe(undefined);
});
});
//# sourceMappingURL=migration-spec.test.js.map

@@ -5,2 +5,2 @@ import { IRepo } from '../adapters/base';

declare const updateRepoList: (migrationContext: IMigrationContext, checkedOutRepos: IRepo[], discardedRepos: IRepo[]) => Promise<IRepo[]>;
export { updateRepoList, loadRepoList, };
export { updateRepoList, loadRepoList };
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -26,10 +17,10 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

*/
const migrateToJsonIfNeeded = (migrationContext) => __awaiter(void 0, void 0, void 0, function* () {
const migrateToJsonIfNeeded = async (migrationContext) => {
const legacyFile = getLegacyRepoListFile(migrationContext);
if (yield fs_extra_1.default.pathExists(legacyFile)) {
const data = js_yaml_1.default.safeLoad(yield fs_extra_1.default.readFile(legacyFile, 'utf8'));
yield fs_extra_1.default.outputFile(getRepoListFile(migrationContext), jsonStringify(data));
yield fs_extra_1.default.remove(legacyFile);
if (await fs_extra_1.default.pathExists(legacyFile)) {
const data = js_yaml_1.default.load(await fs_extra_1.default.readFile(legacyFile, 'utf8'));
await fs_extra_1.default.outputFile(getRepoListFile(migrationContext), jsonStringify(data));
await fs_extra_1.default.remove(legacyFile);
}
});
};
const getRepoListFile = (migrationContext) => {

@@ -41,12 +32,12 @@ return path_1.default.join(migrationContext.migration.workingDirectory, 'repos.json');

};
const loadRepoList = (migrationContext) => __awaiter(void 0, void 0, void 0, function* () {
yield migrateToJsonIfNeeded(migrationContext);
const loadRepoList = async (migrationContext) => {
await migrateToJsonIfNeeded(migrationContext);
const repoListFile = getRepoListFile(migrationContext);
if (!(yield fs_extra_1.default.pathExists(repoListFile))) {
if (!(await fs_extra_1.default.pathExists(repoListFile))) {
return null;
}
return JSON.parse(yield fs_extra_1.default.readFile(repoListFile, 'utf8'));
});
return JSON.parse(await fs_extra_1.default.readFile(repoListFile, 'utf8'));
};
exports.loadRepoList = loadRepoList;
const updateRepoList = (migrationContext, checkedOutRepos, discardedRepos) => __awaiter(void 0, void 0, void 0, function* () {
const updateRepoList = async (migrationContext, checkedOutRepos, discardedRepos) => {
// We need to keep the list of repos in sync with what's actually on disk

@@ -56,14 +47,14 @@ // To do this, we'll load the existing list, delete any repos that were not

// and add any repos that were newly checked out, removing duplicates as appropriate
const existingRepos = yield loadRepoList(migrationContext);
const existingRepos = await loadRepoList(migrationContext);
if (!existingRepos) {
// No repos stored yet, we can update this list directly
yield fs_extra_1.default.outputFile(getRepoListFile(migrationContext), JSON.stringify(checkedOutRepos));
await fs_extra_1.default.outputFile(getRepoListFile(migrationContext), JSON.stringify(checkedOutRepos));
return checkedOutRepos;
}
const { reposEqual } = migrationContext.adapter;
const repos = lodash_1.unionWith(lodash_1.differenceWith(existingRepos, discardedRepos, reposEqual), checkedOutRepos, reposEqual);
yield fs_extra_1.default.outputFile(getRepoListFile(migrationContext), JSON.stringify(repos));
const repos = (0, lodash_1.unionWith)((0, lodash_1.differenceWith)(existingRepos, discardedRepos, reposEqual), checkedOutRepos, reposEqual);
await fs_extra_1.default.outputFile(getRepoListFile(migrationContext), JSON.stringify(repos));
return repos;
});
};
exports.updateRepoList = updateRepoList;
//# sourceMappingURL=persisted-data.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -25,17 +16,17 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

adapter: {
reposEqual: (r1, r2) => lodash_1.isEqual(r1, r2),
reposEqual: (r1, r2) => (0, lodash_1.isEqual)(r1, r2),
},
});
describe('persisted-data', () => {
it('loads repo list from a file', () => __awaiter(void 0, void 0, void 0, function* () {
it('loads repo list from a file', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/load-repo-from-file/');
const repos = yield persisted_data_1.loadRepoList(makeContext(workingDirectory));
expect(repos).toEqual([{ 'defaultBranch': 'master', 'name': 'shepherd', 'owner': 'NerdWalletOSS' }]);
}));
it('returns null if the file does not exist', () => __awaiter(void 0, void 0, void 0, function* () {
const repos = await (0, persisted_data_1.loadRepoList)(makeContext(workingDirectory));
expect(repos).toEqual([{ defaultBranch: 'master', name: 'shepherd', owner: 'NerdWalletOSS' }]);
});
it('returns null if the file does not exist', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/no-repos-json/');
const repos = yield persisted_data_1.loadRepoList(makeContext(workingDirectory));
const repos = await (0, persisted_data_1.loadRepoList)(makeContext(workingDirectory));
expect(repos).toEqual(null);
}));
it('migrates from a YAML file to a JSON file', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('migrates from a YAML file to a JSON file', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/yaml-to-json/');

@@ -45,37 +36,43 @@ const source = path_1.default.join(workingDirectory, 'repos-artifact.yml');

fs_extra_1.default.copyFileSync(source, destination);
const repos = yield persisted_data_1.loadRepoList(makeContext(workingDirectory));
const repos = await (0, persisted_data_1.loadRepoList)(makeContext(workingDirectory));
fs_extra_1.default.unlinkSync(path_1.default.join(workingDirectory, 'repos.json'));
expect(repos).toEqual([{ 'defaultBranch': 'master', 'name': 'shepherd', 'owner': 'NerdWalletOSS' }]);
}));
it('creates a new repos file if one does not exist', () => __awaiter(void 0, void 0, void 0, function* () {
expect(repos).toEqual([{ defaultBranch: 'master', name: 'shepherd', owner: 'NerdWalletOSS' }]);
});
it('creates a new repos file if one does not exist', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/creates-new-repos-file/');
const checkedOutRepos = [{
const checkedOutRepos = [
{
name: 'test2',
owner: 'NerdWallet',
}];
const repos = yield persisted_data_1.updateRepoList(makeContext(workingDirectory), checkedOutRepos, []);
const expected = [{
},
];
const repos = await (0, persisted_data_1.updateRepoList)(makeContext(workingDirectory), checkedOutRepos, []);
const expected = [
{
name: 'test2',
owner: 'NerdWallet',
}];
},
];
expect(repos).toEqual(expected);
const filePath = path_1.default.join(workingDirectory, 'repos.json');
const result = JSON.parse((yield fs_extra_1.default.readFile(filePath)).toString());
const result = JSON.parse((await fs_extra_1.default.readFile(filePath)).toString());
fs_extra_1.default.unlinkSync(filePath);
expect(result).toEqual(expected);
}));
it('removes repo that was discarded', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('removes repo that was discarded', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/removes-repo/');
const discardedRepos = [{
const discardedRepos = [
{
owner: 'NerdWallet',
name: 'test',
}];
const repos = yield persisted_data_1.updateRepoList(makeContext(workingDirectory), [], discardedRepos);
},
];
const repos = await (0, persisted_data_1.updateRepoList)(makeContext(workingDirectory), [], discardedRepos);
const filePath = path_1.default.join(workingDirectory, 'repos.json');
const result = JSON.parse((yield fs_extra_1.default.readFile(filePath)).toString());
const result = JSON.parse((await fs_extra_1.default.readFile(filePath)).toString());
fs_extra_1.default.unlinkSync(filePath);
expect(repos).toEqual([]);
expect(result).toEqual([]);
}));
it('adds repo that was checked out', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('adds repo that was checked out', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/adds-checked-out-repo/');

@@ -89,39 +86,49 @@ const source = path_1.default.join(workingDirectory, 'repos-org.json');

owner: 'NerdWallet',
}
},
];
yield persisted_data_1.updateRepoList(makeContext(workingDirectory), checkedOutRepos, [{
await (0, persisted_data_1.updateRepoList)(makeContext(workingDirectory), checkedOutRepos, [
{
name: 'test',
owner: 'NerdWallet',
}]);
const expected = [{
},
]);
const expected = [
{
name: 'test2',
owner: 'NerdWallet',
}];
},
];
const filePath = path_1.default.join(workingDirectory, 'repos.json');
const result = JSON.parse((yield fs_extra_1.default.readFile(filePath)).toString());
const result = JSON.parse((await fs_extra_1.default.readFile(filePath)).toString());
fs_extra_1.default.unlinkSync(filePath);
expect(result).toEqual(expected);
}));
it('removes and adds repos at the same time', () => __awaiter(void 0, void 0, void 0, function* () {
});
it('removes and adds repos at the same time', async () => {
const workingDirectory = path_1.default.join(__dirname, '../../fixtures/artifacts/.shepherd/removes-adds-repos/');
const checkedOutRepos = [{
const checkedOutRepos = [
{
name: 'test2',
owner: 'NerdWallet',
}];
const discardedRepos = [{
},
];
const discardedRepos = [
{
name: 'test',
owner: 'NerdWallet',
}];
const repos = yield persisted_data_1.updateRepoList(makeContext(workingDirectory), checkedOutRepos, discardedRepos);
const expected = [{
},
];
const repos = await (0, persisted_data_1.updateRepoList)(makeContext(workingDirectory), checkedOutRepos, discardedRepos);
const expected = [
{
owner: 'NerdWallet',
name: 'test2',
}];
},
];
expect(repos).toEqual(expected);
const filePath = path_1.default.join(workingDirectory, 'repos.json');
const result = JSON.parse((yield fs_extra_1.default.readFile(filePath)).toString());
const result = JSON.parse((await fs_extra_1.default.readFile(filePath)).toString());
fs_extra_1.default.unlinkSync(filePath);
expect(result).toEqual(expected);
}));
});
});
//# sourceMappingURL=persisted-data.test.js.map
{
"name": "@nerdwallet/shepherd",
"version": "1.16.0",
"version": "2.3.1",
"description": "A utility for applying code changes across many repositories",

@@ -20,2 +20,3 @@ "keywords": [

"author": "Nathan Walters",
"main": "./lib/cli.js",
"bin": {

@@ -25,6 +26,6 @@ "shepherd": "./lib/cli.js"

"scripts": {
"build": "tsc",
"build": "tsc -p tsconfig.cjs.json",
"build:watch": "yarn build --watch",
"fix-lint": "yarn lint --fix",
"lint": "eslint src/**/*.ts",
"fix-lint": "eslint src/**/*.ts --fix && prettier --write .",
"lint": "eslint src/**/*.ts && prettier --check .",
"prepublishOnly": "yarn test && yarn build",

@@ -44,3 +45,8 @@ "test": "jest --coverage src/"

"transform": {
"^.+\\.tsx?$": "ts-jest"
"^.+\\.tsx?$": [
"ts-jest",
{
"tsconfig": "tsconfig.cjs.json"
}
]
}

@@ -52,38 +58,41 @@ },

"dependencies": {
"@octokit/plugin-retry": "^3.0.9",
"@octokit/plugin-throttling": "^3.5.2",
"@octokit/rest": "^18.12.0",
"@types/js-yaml": "^3.12.6",
"chalk": "^4.1.0",
"@octokit/core": "^5.0.2",
"@octokit/plugin-retry": "^6.0.1",
"@octokit/plugin-throttling": "^8.1.3",
"@octokit/rest": "^20.0.2",
"@types/js-yaml": "^4.0.9",
"chalk": "^4.1.2",
"child-process-promise": "^2.2.1",
"commander": "^6.2.1",
"fs-extra": "^9.1.0",
"joi": "^17.4.0",
"js-yaml": "^3.14.1",
"lodash": "^4.17.19",
"log-symbols": "^4.0.0",
"commander": "^11.1.0",
"fs-extra": "^11.2.0",
"joi": "^17.11.0",
"js-yaml": "^4.1.0",
"lodash": "^4.17.21",
"log-symbols": "^4.1.0",
"netrc": "^0.1.4",
"ora": "^5.3.0",
"ora": "^5.4.1",
"preferences": "^2.0.2",
"simple-git": "^2.36.1"
"simple-git": "^3.22.0"
},
"devDependencies": {
"@octokit/plugin-rest-endpoint-methods": "^5.0.0",
"@octokit/types": "^6.8.3",
"@semantic-release/changelog": "^5.0.1",
"@semantic-release/git": "^9.0.0",
"@types/fs-extra": "^9.0.6",
"@types/jest": "^27.0.0",
"@types/lodash": "^4.14.167",
"@types/log-symbols": "^2.0.0",
"@types/node": "^16.0.0",
"@typescript-eslint/eslint-plugin": "^4.15.0",
"@typescript-eslint/parser": "^4.15.0",
"eslint": "^7.19.0",
"eslint-plugin-import": "^2.22.0",
"@octokit/plugin-rest-endpoint-methods": "^10.2.0",
"@octokit/types": "^12.4.0",
"@semantic-release/changelog": "^6.0.3",
"@semantic-release/git": "^10.0.1",
"@types/fs-extra": "^11.0.4",
"@types/jest": "^29.5.11",
"@types/lodash": "^4.14.202",
"@types/node": "^20.10.8",
"@typescript-eslint/eslint-plugin": "^6.18.1",
"@typescript-eslint/parser": "^6.18.1",
"conventional-changelog-conventionalcommits": "^7.0.2",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prefer-arrow": "^1.2.3",
"jest": "^27.0.0",
"semantic-release": "^17.3.8",
"ts-jest": "^27.0.0",
"typescript": "^3.9.9"
"jest": "^29.7.0",
"prettier": "^3.1.1",
"semantic-release": "^22.0.12",
"ts-jest": "^29.1.1",
"typescript": "^5.3.3"
},

@@ -90,0 +99,0 @@ "publishConfig": {

@@ -5,3 +5,4 @@ # Shepherd

![Travis status](https://img.shields.io/travis/NerdWalletOSS/shepherd/master.svg?style=flat-square)
[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/NerdWalletOSS/shepherd/release.yml?style=flat-square)](https://github.com/NerdWalletOSS/shepherd/actions)
[![semantic-release: conventionalcommits](https://img.shields.io/badge/semantic--release-conventionalcommits-e10079?logo=semantic-release)](https://github.com/semantic-release/semantic-release)
![npm version](https://img.shields.io/npm/v/@nerdwallet/shepherd.svg?style=flat-square)

@@ -12,5 +13,5 @@ ![GitHub issues](https://img.shields.io/github/issues/NerdWalletOSS/shepherd.svg?style=flat-square)

* **Powerful**: You can write migration scripts using your favorite Unix commands, tools like [`jscodeshift`](https://github.com/facebook/jscodeshift), or scripts in your preferred programming language.
* **Easy**: With just a few commands, you can checkout dozens of repositories, apply changes, commit those changes, and open pull requests with detailed messages.
* **Flexible**: Ships with support for Git/GitHub, but can easily be extended to work with other version control products like Bitbucket, GitLab, or SVN.
- **Powerful**: You can write migration scripts using your favorite Unix commands, tools like [`jscodeshift`](https://github.com/facebook/jscodeshift), or scripts in your preferred programming language.
- **Easy**: With just a few commands, you can checkout dozens of repositories, apply changes, commit those changes, and open pull requests with detailed messages.
- **Flexible**: Ships with support for Git/GitHub, but can easily be extended to work with other version control products like Bitbucket, GitLab, or SVN.

@@ -27,2 +28,14 @@ For more high level context, this [blog post](https://www.nerdwallet.com/blog/engineering/shepherd-automating-code-changes/) covers the basics.

If using GitHub Enterprise, ensure the following environment variable is exported:
```
export SHEPHERD_GITHUB_ENTERPRISE_URL={company_github_enterprise_url} # e.g., github.test.com
```
If using ssh, ensure that your GITHUB_TOKEN is exported:
```
export GITHUB_TOKEN=<PAT>
```
Shepherd will now be available as the `shepherd` command in your shell:

@@ -47,6 +60,6 @@

* The person updating that library must communicate the change to consumers of the library
* The consumer must understand the change and how they have to update their own code
* The consumer must make the necessary changes in their own code
* The consumer must test, merge, and deploy those changes
- The person updating that library must communicate the change to consumers of the library
- The consumer must understand the change and how they have to update their own code
- The consumer must make the necessary changes in their own code
- The consumer must test, merge, and deploy those changes

@@ -77,13 +90,13 @@ Shepherd aims to help shift responsibility for the first three steps to the person actually making the change to the library. Since they have the best understanding of their change, they can write a code migration to automate that change and then user Shepherd to automate the process of applying that change to all relevant repos. Then the owners of the affected repos (who have the best understanding of their own code) can review and merge the changes. This process is especially efficient for teams who rely on continuous integration: automated tests can help repository owners have confidence that the code changes are working as expected.

* `id` specifies a unique identifier for this migration. It will be used as a branch name for this migration, and will be used internally by Shepherd to track state about the migration.
* `title` specifies a human-readable title for the migration that will be used as the commit message.
* `adapter` specifies what version control adapter should be used for performing operations on repos, as well as extra options for that adapter. Currently Shepherd only has a GitHub adapter, but you could create a Bitbucket or GitLab adapter if you don't use GitHub. Note that `search_query` is specific to the GitHub adapter: it uses GitHub's [code search qualifiers](https://help.github.com/articles/searching-code/) to identify repositories that are candidates for a migration. If a repository contains a file matching the search, it will be considered a candidate for this migration. As an alternative to `search_query`, GitHub adapter can be configured with `org: YOURGITHUBORGANIZATION`. When using `org`, every repo in the organization that is visible will be considered as a candidate for this migration.
* `search_type` (optional): specifies search type - either 'code' or 'repositories'. If repositories is specified, it does a [Github repository search](https://docs.github.com/en/free-pro-team@latest/github/searching-for-information-on-github/searching-for-repositories). Defaults to code search if not specified.
- `id` specifies a unique identifier for this migration. It will be used as a branch name for this migration, and will be used internally by Shepherd to track state about the migration.
- `title` specifies a human-readable title for the migration that will be used as the commit message.
- `adapter` specifies what version control adapter should be used for performing operations on repos, as well as extra options for that adapter. Currently Shepherd only has a GitHub adapter, but you could create a Bitbucket or GitLab adapter if you don't use GitHub. Note that `search_query` is specific to the GitHub adapter: it uses GitHub's [code search qualifiers](https://help.github.com/articles/searching-code/) to identify repositories that are candidates for a migration. If a repository contains a file matching the search, it will be considered a candidate for this migration. As an alternative to `search_query`, GitHub adapter can be configured with `org: YOURGITHUBORGANIZATION`. When using `org`, every repo in the organization that is visible will be considered as a candidate for this migration.
- `search_type` (optional): specifies search type - either 'code' or 'repositories'. If repositories is specified, it does a [Github repository search](https://docs.github.com/en/free-pro-team@latest/github/searching-for-information-on-github/searching-for-repositories). Defaults to code search if not specified.
The options under `hooks` specify the meat of a migration. They tell Shepherd how to determine if a repo should be migrated, how to actually perform the migration, how to generate a pull request message for each repository, and more. Each hook consists of one or more standard executables that Shepherd will execute in sequence.
* `should_migrate` is a sequence of commands to execute to determine if a repo actually requires a migration. If any of them exit with a non-zero value, that signifies to Shepherd that the repo should not be migrated. For instance, the second step in the above `should_migrate` hook would fail if the repo was last modified in 2017, since `grep` would exit with a non-zero value.
* `post_checkout` is a sequence of commands to be executed once a repo has been checked out and passed any `should_migrate` checks. This is a convenient place to do anything that will only need to be done once per repo, such as installing any dependencies.
* `apply` is a sequence of commands that will actually execute the migration. This example is very simple: we're just using `mv` to rename a file. However, this hook could contain arbitrarily many, potentially complex commands, depending on the requirements of your particular migration.
* `pr_message` is a sequence of commands that will be used to generate a pull request message for a repository. In the simplest case, this can just be a static message, but you could also programmatically generate a message that calls out particular things that might need human attention. Anything written to `stdout` will be used for the message. If multiple commands are specified, the output from each one will be concatenated together.
- `should_migrate` is a sequence of commands to execute to determine if a repo actually requires a migration. If any of them exit with a non-zero value, that signifies to Shepherd that the repo should not be migrated. For instance, the second step in the above `should_migrate` hook would fail if the repo was last modified in 2017, since `grep` would exit with a non-zero value.
- `post_checkout` is a sequence of commands to be executed once a repo has been checked out and passed any `should_migrate` checks. This is a convenient place to do anything that will only need to be done once per repo, such as installing any dependencies.
- `apply` is a sequence of commands that will actually execute the migration. This example is very simple: we're just using `mv` to rename a file. However, this hook could contain arbitrarily many, potentially complex commands, depending on the requirements of your particular migration.
- `pr_message` is a sequence of commands that will be used to generate a pull request message for a repository. In the simplest case, this can just be a static message, but you could also programmatically generate a message that calls out particular things that might need human attention. Anything written to `stdout` will be used for the message. If multiple commands are specified, the output from each one will be concatenated together.

@@ -94,6 +107,6 @@ `should_migrate` and `post_checkout` are optional; `apply` and `pr_message` are required.

* `SHEPHERD_REPO_DIR` is the absolute path to the repository being operated on. This will be the working directory when commands are executed.
* `SHEPHERD_DATA_DIR` is the absolute path to a special directory that can be used to persist state between steps. This would be useful if, for instance, a `jscodeshift` codemod in your `apply` hook generates a list of files that need human attention and you want to use that list in your `pr_message` hook.
* `SHEPHERD_BASE_BRANCH` is the name of the branch Shepherd will set up a pull-request against. This will often, _but not always_, be master. Only available for `apply` and later steps.
* `SHEPHERD_MIGRATION_DIR` is the absolute path to the directory containing your migration's `shepherd.yml` file. This is useful if you want to include a script with your migration spec and need to reference that command in a hook. For instance, if I have a script `pr.sh` that will generate a PR message: my `pr_message` hook might look something like this:
- `SHEPHERD_REPO_DIR` is the absolute path to the repository being operated on. This will be the working directory when commands are executed.
- `SHEPHERD_DATA_DIR` is the absolute path to a special directory that can be used to persist state between steps. This would be useful if, for instance, a `jscodeshift` codemod in your `apply` hook generates a list of files that need human attention and you want to use that list in your `pr_message` hook.
- `SHEPHERD_BASE_BRANCH` is the name of the branch Shepherd will set up a pull-request against. This will often, _but not always_, be master. Only available for `apply` and later steps.
- `SHEPHERD_MIGRATION_DIR` is the absolute path to the directory containing your migration's `shepherd.yml` file. This is useful if you want to include a script with your migration spec and need to reference that command in a hook. For instance, if I have a script `pr.sh` that will generate a PR message: my `pr_message` hook might look something like this:

@@ -103,6 +116,7 @@ ```yml

```
* `SHEPHERD_GIT_REVISION` (`git` and `github` adapters) is the current revision of the repository being operated on.
* `SHEPHERD_GITHUB_REPO_OWNER` (`github` adapter) is the owner of the repository being operated on. For example, if operating on the repository `https://github.com/NerdWalletOSS/shepherd`, this would be `NerdWalletOSS`.
* `SHEPHERD_GITHUB_REPO_NAME` (`github` adapter) is the name of the repository being operated on. For example, if operating on the repository `https://github.com/NerdWalletOSS/shepherd`, this would be `shepherd`.
- `SHEPHERD_GIT_REVISION` (`git` and `github` adapters) is the current revision of the repository being operated on.
- `SHEPHERD_GITHUB_REPO_OWNER` (`github` adapter) is the owner of the repository being operated on. For example, if operating on the repository `https://github.com/NerdWalletOSS/shepherd`, this would be `NerdWalletOSS`.
- `SHEPHERD_GITHUB_REPO_NAME` (`github` adapter) is the name of the repository being operated on. For example, if operating on the repository `https://github.com/NerdWalletOSS/shepherd`, this would be `shepherd`.
Commands follow standard Unix conventions: an exit code of 0 indicates a command succeeded, a non-zero exit code indicates failure.

@@ -122,9 +136,9 @@

* `checkout`: Determines which repositories are candidates for migration and clones or updates the repositories on your machine. Clones are "shallow", containing no git history. Uses `should_migrate` to decide if a repository should be kept after it's checked out.
* `apply`: Performs the migration using the `apply` hook discussed above.
* `commit`: Makes a commit with any changes that were made during the `apply` step, including adding newly-created files. The migration's `title` will be prepended with `[shepherd]` and used as the commit message.
* `push`: Pushes all commits to their respective repositories.
* `pr-preview`: Prints the commit message that would be used for each repository without actually creating a PR; uses the `pr_message` hook.
* `pr`: Creates a PR for each repo with the message generated from the `pr_message` hook.
* `version`: Prints Shepherd version
- `checkout`: Determines which repositories are candidates for migration and clones or updates the repositories on your machine. Clones are "shallow", containing no git history. Uses `should_migrate` to decide if a repository should be kept after it's checked out.
- `apply`: Performs the migration using the `apply` hook discussed above.
- `commit`: Makes a commit with any changes that were made during the `apply` step, including adding newly-created files. The migration's `title` will be prepended with `[shepherd]` and used as the commit message.
- `push`: Pushes all commits to their respective repositories.
- `pr-preview`: Prints the commit message that would be used for each repository without actually creating a PR; uses the `pr_message` hook.
- `pr`: Creates a PR for each repo with the message generated from the `pr_message` hook.
- `version`: Prints Shepherd version

@@ -152,2 +166,2 @@ By default, `checkout` will use the adapter to figure out which repositories to check out, and the remaining commands will operate on all checked-out repos. To only checkout a specific repo or to operate on only a subset of the checked-out repos, you can use the `--repos` flag, which specifies a comma-separated list of repos:

We use [ESLint](https://eslint.org/) to ensure a consistent coding style and to help prevent certain classes of problems. Run `yarn lint` to run the linter, and `yarn fix-lint` to automatically fix applicable problems.
We use [ESLint](https://eslint.org/) to ensure a consistent coding style and to help prevent certain classes of problems. Run `yarn lint` to run the linter, and `yarn fix-lint` to automatically fix applicable problems.

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc