changelog add

This commit is contained in:
stojdza 2024-01-16 16:37:49 +00:00
parent 8d883f7d4a
commit 0aa8f62cf7
68 changed files with 6160 additions and 0 deletions

View File

@ -0,0 +1,4 @@
Significance: minor
Type: update
Expanding the coverage of Adding non-virtual product e2e test

View File

@ -0,0 +1,129 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const simple_git_1 = __importDefault(require("simple-git"));
const path_1 = __importDefault(require("path"));
const fs_1 = require("fs");
/**
* Internal dependencies
*/
const logger_1 = require("../core/logger");
const environment_1 = require("../core/environment");
const git_1 = require("../core/git");
const github_1 = require("./lib/github");
const projects_1 = require("./lib/projects");
const program = new extra_typings_1.Command('changefile')
.description('Changelog utilities')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-d --dev-repo-path <devRepoPath>', 'Path to existing repo. Use this option to avoid cloning a fresh repo for development purposes. Note that using this option assumes dependencies are already installed.')
.argument('<pr-number>', 'Pull request number')
.action((prNumber, options) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b;
const { owner, name, devRepoPath } = options;
logger_1.Logger.startTask(`Getting pull request data for PR number ${prNumber}`);
const { prBody, headOwner, branch, fileName, head, base } = yield (0, github_1.getPullRequestData)({ owner, name }, prNumber);
logger_1.Logger.endTask();
if (!(0, github_1.shouldAutomateChangelog)(prBody)) {
logger_1.Logger.notice(`PR #${prNumber} does not have the "Automatically create a changelog entry from the details" checkbox checked. No changelog will be created.`);
process.exit(0);
}
const details = (0, github_1.getChangelogDetails)(prBody);
const { significance, type, message, comment } = details;
const changelogDetailsError = (0, github_1.getChangelogDetailsError)(details);
if (changelogDetailsError) {
logger_1.Logger.error(changelogDetailsError);
}
logger_1.Logger.startTask(`Making a temporary clone of '${headOwner}/${name}'`);
const tmpRepoPath = devRepoPath
? devRepoPath
: yield (0, git_1.cloneAuthenticatedRepo)({ owner: headOwner, name }, false);
logger_1.Logger.endTask();
logger_1.Logger.notice(`Temporary clone of '${headOwner}/${name}' created at ${tmpRepoPath}`);
// If a pull request is coming from a contributor's fork's trunk branch, we don't nee to checkout the remote branch because its already available as part of the clone.
if (branch !== 'trunk') {
logger_1.Logger.notice(`Checking out remote branch ${branch}`);
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, branch, false);
}
logger_1.Logger.notice(`Getting all touched projects requiring a changelog`);
const touchedProjectsRequiringChangelog = yield (0, projects_1.getTouchedProjectsRequiringChangelog)(tmpRepoPath, base, head, fileName, owner, name);
try {
const allProjectPaths = yield (0, projects_1.getAllProjectPaths)(tmpRepoPath);
logger_1.Logger.notice('Removing existing changelog files in case a change is reverted and the entry is no longer needed');
allProjectPaths.forEach((projectPath) => {
var _a, _b;
const composerFilePath = path_1.default.join(tmpRepoPath, projectPath, 'composer.json');
if (!(0, fs_1.existsSync)(composerFilePath)) {
return;
}
// Figure out where the changelog files belong for this project.
const composerFile = JSON.parse((0, fs_1.readFileSync)(composerFilePath, {
encoding: 'utf-8',
}));
const changelogFilePath = path_1.default.join(tmpRepoPath, projectPath, (_b = (_a = composerFile.extra) === null || _a === void 0 ? void 0 : _a.changelogger['changes-dir']) !== null && _b !== void 0 ? _b : 'changelog', fileName);
if (!(0, fs_1.existsSync)(changelogFilePath)) {
return;
}
logger_1.Logger.notice(`Remove existing changelog file ${changelogFilePath}`);
(0, fs_1.rmSync)(changelogFilePath);
});
if (!touchedProjectsRequiringChangelog) {
logger_1.Logger.notice('No projects require a changelog');
process.exit(0);
}
for (const project in touchedProjectsRequiringChangelog) {
const projectPath = path_1.default.join(tmpRepoPath, touchedProjectsRequiringChangelog[project]);
logger_1.Logger.notice(`Generating changefile for ${project} (${projectPath}))`);
// Figure out where the changelog file belongs for this project.
const composerFile = JSON.parse((0, fs_1.readFileSync)(path_1.default.join(projectPath, 'composer.json'), { encoding: 'utf-8' }));
const changelogFilePath = path_1.default.join(projectPath, (_b = (_a = composerFile.extra) === null || _a === void 0 ? void 0 : _a.changelogger['changes-dir']) !== null && _b !== void 0 ? _b : 'changelog', fileName);
// Write the changefile using the correct format.
let fileContent = `Significance: ${significance}\n`;
fileContent += `Type: ${type}\n`;
if (comment) {
fileContent += `Comment: ${comment}\n`;
}
fileContent += `\n${message}`;
(0, fs_1.writeFileSync)(changelogFilePath, fileContent);
}
}
catch (e) {
logger_1.Logger.error(e);
}
const touchedProjectsString = Object.keys(touchedProjectsRequiringChangelog).join(', ');
logger_1.Logger.notice(`Changelogs created for ${touchedProjectsString}`);
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
if ((0, environment_1.isGithubCI)()) {
yield git.raw('config', '--global', 'user.email', 'github-actions@github.com');
yield git.raw('config', '--global', 'user.name', 'github-actions');
}
const shortStatus = yield git.raw(['status', '--short']);
if (shortStatus.length === 0) {
logger_1.Logger.notice(`No changes in changelog files. Skipping commit and push.`);
process.exit(0);
}
logger_1.Logger.notice(`Adding and committing changes`);
yield git.add('.');
yield git.commit(`Add changefile(s) from automation for the following project(s): ${touchedProjectsString}`);
yield git.push('origin', branch);
logger_1.Logger.notice(`Pushed changes to ${branch}`);
}));
exports.default = program;

View File

@ -0,0 +1,441 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Internal dependencies
*/
const github_1 = require("../github");
const logger_1 = require("../../../core/logger");
jest.mock('../../../core/logger', () => {
return {
Logger: {
error: jest.fn(),
},
};
});
describe('getChangelogSignificance', () => {
it('should return the selected significance', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const significance = (0, github_1.getChangelogSignificance)(body);
expect(significance).toBe('patch');
});
it('should error when no significance selected', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [ ] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const significance = (0, github_1.getChangelogSignificance)(body);
expect(significance).toBeUndefined();
expect(logger_1.Logger.error).toHaveBeenCalledWith('No changelog significance found');
});
it('should error when more than one significance selected', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [x] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const significance = (0, github_1.getChangelogSignificance)(body);
expect(significance).toBeUndefined();
expect(logger_1.Logger.error).toHaveBeenCalledWith('Multiple changelog significances found. Only one can be entered');
});
});
describe('getChangelogType', () => {
it('should return the selected changelog type', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const type = (0, github_1.getChangelogType)(body);
expect(type).toBe('fix');
});
it('should error when no type selected', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [ ] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const type = (0, github_1.getChangelogType)(body);
expect(type).toBeUndefined();
expect(logger_1.Logger.error).toHaveBeenCalledWith('No changelog type found');
});
it('should error more than one type selected', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [ ] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [x] Update - Update existing functionality\r\n' +
'- [x] Dev - Development related task\r\n' +
'- [x] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const type = (0, github_1.getChangelogType)(body);
expect(type).toBeUndefined();
expect(logger_1.Logger.error).toHaveBeenCalledWith('Multiple changelog types found. Only one can be entered');
});
});
describe('getChangelogDetails', () => {
it('should return the changelog details', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'\r\n' +
'</details>';
const details = (0, github_1.getChangelogDetails)(body);
expect(details.significance).toEqual('patch');
expect(details.type).toEqual('fix');
expect(details.message).toEqual('This is a very useful fix.');
expect(details.comment).toEqual('');
});
it('should provide comment and message when both are added', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'This is a very useful comment.\r\n' +
'\r\n' +
'</details>';
const details = (0, github_1.getChangelogDetails)(body);
expect(details.message).toEqual('This is a very useful fix.');
expect(details.comment).toEqual('This is a very useful comment.');
});
it('should remove newlines from message and comment', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Patch\r\n' +
'- [ ] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'This is a very useful fix.\r\n' +
'I promise!\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'This is a very useful comment.\r\n' +
"I don't promise!\r\n" +
'\r\n' +
'</details>';
const details = (0, github_1.getChangelogDetails)(body);
expect(details.message).toEqual('This is a very useful fix. I promise!');
expect(details.comment).toEqual("This is a very useful comment. I don't promise!");
});
it('should return a comment even when it is entered with a significance other than patch', () => {
const body = '### Changelog entry\r\n' +
'\r\n' +
'<!-- You can optionally choose to enter a changelog entry by checking the box and supplying data. -->\r\n' +
'\r\n' +
'- [x] Automatically create a changelog entry from the details below.\r\n' +
'\r\n' +
'<details>\r\n' +
'\r\n' +
'#### Significance\r\n' +
'<!-- Choose only one -->\r\n' +
'- [ ] Patch\r\n' +
'- [x] Minor\r\n' +
'- [ ] Major\r\n' +
'\r\n' +
'#### Type\r\n' +
'<!-- Choose only one -->\r\n' +
'- [x] Fix - Fixes an existing bug\r\n' +
'- [ ] Add - Adds functionality\r\n' +
'- [ ] Update - Update existing functionality\r\n' +
'- [ ] Dev - Development related task\r\n' +
'- [ ] Tweak - A minor adjustment to the codebase\r\n' +
'- [ ] Performance - Address performance issues\r\n' +
'- [ ] Enhancement\r\n' +
'\r\n' +
'#### Message ' +
'<!-- Add a changelog message here -->\r\n' +
'\r\n' +
'#### Comment ' +
`<!-- If the changes in this pull request don't warrant a changelog entry, you can alternatively supply a comment here. Note that comments are only accepted with a significance of "Patch" -->\r\n` +
'This is a very useful comment.\r\n' +
'\r\n' +
'</details>';
const details = (0, github_1.getChangelogDetails)(body);
expect(details.comment).toEqual('This is a very useful comment.');
expect(details.significance).toEqual('minor');
});
});
describe('getChangelogDetailsError', () => {
it('should return an error when both a message and comment provided', () => {
const error = (0, github_1.getChangelogDetailsError)({
message: 'message',
comment: 'comment',
type: 'fix',
significance: 'minor',
});
expect(error).toEqual('Both a message and comment were found. Only one can be entered');
});
it('should return an error when a comment is provided with a significance other than patch', () => {
const error = (0, github_1.getChangelogDetailsError)({
message: '',
comment: 'comment',
type: 'fix',
significance: 'minor',
});
expect(error).toEqual('Only patch changes can have a comment. Please change the significance to patch or remove the comment');
});
it('should return an error when no significance found', () => {
const error = (0, github_1.getChangelogDetailsError)({
message: 'message',
comment: '',
type: 'fix',
significance: '',
});
expect(error).toEqual('No changelog significance found');
});
it('should return an error when no type found', () => {
const error = (0, github_1.getChangelogDetailsError)({
message: 'message',
comment: '',
type: '',
significance: 'minor',
});
expect(error).toEqual('No changelog type found');
});
it('should return an error when neither a comment or message is provided', () => {
const error = (0, github_1.getChangelogDetailsError)({
message: '',
comment: '',
type: 'fix',
significance: 'minor',
});
expect(error).toEqual('No changelog message or comment found');
});
});

View File

@ -0,0 +1,112 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
jest.mock('uuid', () => {
return {
v4: jest.fn(() => 1),
};
});
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
/**
* Internal dependencies
*/
const projects_1 = require("../projects");
const sampleWorkspaceYaml = `
packages:
- 'folder-with-lots-of-projects/*'
- 'projects/cool-project'
- 'projects/very-cool-project'
- 'interesting-project'
`;
const tmpRepoPath = path_1.default.join(__dirname, 'test-repo');
describe('Changelog project functions', () => {
it('getAllProjectsPathsFromWorkspace should provide a list of all projects supplied by pnpm-workspace.yml', () => __awaiter(void 0, void 0, void 0, function* () {
const projects = yield (0, projects_1.getAllProjectsPathsFromWorkspace)(tmpRepoPath, sampleWorkspaceYaml);
const expectedProjects = [
'folder-with-lots-of-projects/project-b',
'folder-with-lots-of-projects/project-a',
'projects/cool-project',
'projects/very-cool-project',
'interesting-project',
];
expectedProjects.forEach((expectedProject) => {
expect(projects).toContain(expectedProject);
});
expect(projects).toHaveLength(expectedProjects.length);
}));
it('getChangeloggerProjectPaths should provide a list of all projects that use Jetpack changelogger', () => __awaiter(void 0, void 0, void 0, function* () {
const projects = yield (0, projects_1.getAllProjectsPathsFromWorkspace)(tmpRepoPath, sampleWorkspaceYaml);
const changeloggerProjects = yield (0, projects_1.getChangeloggerProjectPaths)(tmpRepoPath, projects);
const expectedChangeLoggerProjects = [
'folder-with-lots-of-projects/project-b',
'folder-with-lots-of-projects/project-a',
'projects/very-cool-project',
];
expectedChangeLoggerProjects.forEach((expectedChangeLoggerProject) => {
expect(changeloggerProjects).toContain(expectedChangeLoggerProject);
});
expect(changeloggerProjects).toHaveLength(expectedChangeLoggerProjects.length);
}));
it('getTouchedChangeloggerProjectsPathsMappedToProjects should combine touched and changelogger projects and return a list that is a subset of both', () => __awaiter(void 0, void 0, void 0, function* () {
const touchedFiles = [
'folder-with-lots-of-projects/project-b/src/index.js',
'projects/very-cool-project/src/index.js',
];
const changeLoggerProjects = [
'folder-with-lots-of-projects/project-b',
'folder-with-lots-of-projects/project-a',
'projects/very-cool-project',
];
const intersectedProjects = (0, projects_1.getTouchedChangeloggerProjectsPathsMappedToProjects)(touchedFiles, changeLoggerProjects);
expect(intersectedProjects).toMatchObject({
'folder-with-lots-of-projects/project-b': 'folder-with-lots-of-projects/project-b',
'projects/very-cool-project': 'projects/very-cool-project',
});
}));
it('getTouchedChangeloggerProjectsPathsMappedToProjects should map plugins and js packages to the correct name', () => __awaiter(void 0, void 0, void 0, function* () {
const touchedFiles = [
'plugins/beta-tester/src/index.js',
'plugins/woocommerce/src/index.js',
'packages/js/components/src/index.js',
'packages/js/data/src/index.js',
];
const changeLoggerProjects = [
'plugins/woocommerce',
'plugins/beta-tester',
'packages/js/data',
'packages/js/components',
];
const intersectedProjects = (0, projects_1.getTouchedChangeloggerProjectsPathsMappedToProjects)(touchedFiles, changeLoggerProjects);
expect(intersectedProjects).toMatchObject({
woocommerce: 'plugins/woocommerce',
'beta-tester': 'plugins/beta-tester',
'@woocommerce/components': 'packages/js/components',
'@woocommerce/data': 'packages/js/data',
});
}));
it('getTouchedChangeloggerProjectsPathsMappedToProjects should handle woocommerce-admin projects mapped to woocommerce core', () => __awaiter(void 0, void 0, void 0, function* () {
const touchedFiles = [
'plugins/beta-tester/src/index.js',
'plugins/woocommerce-admin/src/index.js',
];
const changeLoggerProjects = ['plugins/woocommerce'];
const intersectedProjects = (0, projects_1.getTouchedChangeloggerProjectsPathsMappedToProjects)(touchedFiles, changeLoggerProjects);
expect(intersectedProjects).toMatchObject({
woocommerce: 'plugins/woocommerce',
});
}));
});

View File

@ -0,0 +1,181 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getChangelogDetailsError = exports.getChangelogDetails = exports.getChangelogComment = exports.getChangelogMessage = exports.getChangelogType = exports.getChangelogSignificance = exports.shouldAutomateChangelog = exports.getPullRequestData = void 0;
/**
* Internal dependencies
*/
const repo_1 = require("../../core/github/repo");
const logger_1 = require("../../core/logger");
/**
* Get relevant data from a pull request.
*
* @param {Object} options
* @param {string} options.owner repository owner.
* @param {string} options.name repository name.
* @param {string} prNumber pull request number.
* @return {Promise<object>} pull request data.
*/
const getPullRequestData = (options, prNumber) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
const prData = yield (0, repo_1.getPullRequest)({ owner, name, prNumber });
const isCommunityPR = (0, repo_1.isCommunityPullRequest)(prData, owner, name);
const headOwner = isCommunityPR ? prData.head.repo.owner.login : owner;
const branch = prData.head.ref;
const fileName = `${prNumber}-${branch.replace(/\//g, '-')}`;
const prBody = prData.body;
const head = prData.head.sha;
const base = prData.base.sha;
return {
prBody,
isCommunityPR,
headOwner,
branch,
fileName,
head,
base,
};
});
exports.getPullRequestData = getPullRequestData;
/**
* Determine if a pull request description activates the changelog automation.
*
* @param {string} body pull request description.
* @return {boolean} if the pull request description activates the changelog automation.
*/
const shouldAutomateChangelog = (body) => {
const regex = /\[x\] Automatically create a changelog entry from the details/gm;
return regex.test(body);
};
exports.shouldAutomateChangelog = shouldAutomateChangelog;
/**
* Get the changelog significance from a pull request description.
*
* @param {string} body pull request description.
* @return {void|string} changelog significance.
*/
const getChangelogSignificance = (body) => {
const regex = /\[x\] (Patch|Minor|Major)\r\n/gm;
const matches = body.match(regex);
if (matches === null) {
logger_1.Logger.error('No changelog significance found');
// Logger.error has a process.exit( 1 ) call, this return is purely for testing purposes.
return;
}
if (matches.length > 1) {
logger_1.Logger.error('Multiple changelog significances found. Only one can be entered');
// Logger.error has a process.exit( 1 ) call, this return is purely for testing purposes.
return;
}
const significance = regex.exec(body);
return significance[1].toLowerCase();
};
exports.getChangelogSignificance = getChangelogSignificance;
/**
* Get the changelog type from a pull request description.
*
* @param {string} body pull request description.
* @return {void|string} changelog type.
*/
const getChangelogType = (body) => {
const regex = /\[x\] (Fix|Add|Update|Dev|Tweak|Performance|Enhancement) -/gm;
const matches = body.match(regex);
if (matches === null) {
logger_1.Logger.error('No changelog type found');
// Logger.error has a process.exit( 1 ) call, this return is purely for testing purposes.
return;
}
if (matches.length > 1) {
logger_1.Logger.error('Multiple changelog types found. Only one can be entered');
// Logger.error has a process.exit( 1 ) call, this return is purely for testing purposes.
return;
}
const type = regex.exec(body);
return type[1].toLowerCase();
};
exports.getChangelogType = getChangelogType;
/**
* Get the changelog message from a pull request description.
*
* @param {string} body pull request description.
* @return {void|string} changelog message.
*/
const getChangelogMessage = (body) => {
const messageRegex = /#### Message ?(<!--(.*)-->)?(.*)#### Comment/gms;
const match = messageRegex.exec(body);
if (!match) {
logger_1.Logger.error('No changelog message found');
}
let message = match[3].trim();
// Newlines break the formatting of the changelog, so we replace them with spaces.
message = message.replace(/\r\n|\n/g, ' ');
return message;
};
exports.getChangelogMessage = getChangelogMessage;
/**
* Get the changelog comment from a pull request description.
*
* @param {string} body pull request description.
* @return {void|string} changelog comment.
*/
const getChangelogComment = (body) => {
const commentRegex = /#### Comment ?(<!--(.*)-->)?(.*)<\/details>/gms;
const match = commentRegex.exec(body);
let comment = match ? match[3].trim() : '';
// Newlines break the formatting of the changelog, so we replace them with spaces.
comment = comment.replace(/\r\n|\n/g, ' ');
return comment;
};
exports.getChangelogComment = getChangelogComment;
/**
* Get the changelog details from a pull request description.
*
* @param {string} body Pull request description
* @return {Object} Changelog details
*/
const getChangelogDetails = (body) => {
return {
significance: (0, exports.getChangelogSignificance)(body),
type: (0, exports.getChangelogType)(body),
message: (0, exports.getChangelogMessage)(body),
comment: (0, exports.getChangelogComment)(body),
};
};
exports.getChangelogDetails = getChangelogDetails;
/**
* Determine if a pull request description contains changelog input errors.
*
* @param {Object} details changelog details.
* @param {string} details.significance changelog significance.
* @param {string} details.type changelog type.
* @param {string} details.message changelog message.
* @param {string} details.comment changelog comment.
* @return {string|null} error message, or null if none found
*/
const getChangelogDetailsError = ({ significance, type, message, comment, }) => {
if (comment && message) {
return 'Both a message and comment were found. Only one can be entered';
}
if (comment && significance !== 'patch') {
return 'Only patch changes can have a comment. Please change the significance to patch or remove the comment';
}
if (!significance) {
return 'No changelog significance found';
}
if (!type) {
return 'No changelog type found';
}
if (!comment && !message) {
return 'No changelog message or comment found';
}
return null;
};
exports.getChangelogDetailsError = getChangelogDetailsError;

View File

@ -0,0 +1,161 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getTouchedProjectsRequiringChangelog = exports.getAllProjectPaths = exports.getTouchedChangeloggerProjectsPathsMappedToProjects = exports.getTouchedFilePaths = exports.getChangeloggerProjectPaths = exports.getAllProjectsPathsFromWorkspace = void 0;
/**
* External dependencies
*/
const fs_1 = require("fs");
const promises_1 = require("fs/promises");
const path_1 = __importDefault(require("path"));
const glob_1 = require("glob");
const simple_git_1 = __importDefault(require("simple-git"));
/**
* Internal dependencies
*/
const git_1 = require("../../core/git");
/**
* Get all projects listed in the workspace yaml file.
*
* @param {string} tmpRepoPath Path to the temporary repository.
* @param {string} workspaceYaml Contents of the workspace yaml file.
* @return {Array<string>} List of projects.
*/
const getAllProjectsPathsFromWorkspace = (tmpRepoPath, workspaceYaml) => __awaiter(void 0, void 0, void 0, function* () {
const rawProjects = workspaceYaml.split('- ');
// remove heading
rawProjects.shift();
const globbedProjects = yield Promise.all(rawProjects
.map((project) => project.replace(/'/g, '').trim())
.map((project) => __awaiter(void 0, void 0, void 0, function* () {
if (project.includes('*')) {
return yield (0, glob_1.glob)(project, { cwd: tmpRepoPath });
}
return project;
})));
const r = globbedProjects.flat();
return r;
});
exports.getAllProjectsPathsFromWorkspace = getAllProjectsPathsFromWorkspace;
/**
* Get all projects that have Jetpack changelogger enabled
*
* @param {string} tmpRepoPath Path to the temporary repository.
* @param {Array<string>} projects all projects listed in the workspace yaml file
* @return {Array<string>} List of projects that have Jetpack changelogger enabled.
*/
const getChangeloggerProjectPaths = (tmpRepoPath, projects) => __awaiter(void 0, void 0, void 0, function* () {
const projectsWithComposer = projects.filter((project) => {
return (0, fs_1.existsSync)(`${tmpRepoPath}/${project}/composer.json`);
});
return projectsWithComposer.filter((project) => {
const composer = JSON.parse((0, fs_1.readFileSync)(`${tmpRepoPath}/${project}/composer.json`, 'utf8'));
return ((composer.require &&
composer.require['automattic/jetpack-changelogger']) ||
(composer['require-dev'] &&
composer['require-dev']['automattic/jetpack-changelogger']));
});
});
exports.getChangeloggerProjectPaths = getChangeloggerProjectPaths;
/**
* Get an array of all files changed in a PR.
*
* @param {string} tmpRepoPath Path to the temporary repository.
* @param {string} base base hash
* @param {string} head head hash
* @param {string} fileName changelog file name
* @param {string} baseOwner PR base owner
* @param {string} baseName PR base name
* @return {Array<string>} List of files changed in a PR.
*/
const getTouchedFilePaths = (tmpRepoPath, base, head, fileName, baseOwner, baseName) => __awaiter(void 0, void 0, void 0, function* () {
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
// make sure base sha is available.
yield git.addRemote(baseOwner, (0, git_1.getAuthenticatedRemote)({ owner: baseOwner, name: baseName }));
yield git.fetch(baseOwner, base);
const diff = yield git.raw([
'diff',
'--name-only',
`${base}...${head}`,
]);
return (diff
.split('\n')
.map((item) => item.trim())
// Don't count changelogs themselves as touched files.
.filter((item) => !item.includes(`/changelog/${fileName}`)));
});
exports.getTouchedFilePaths = getTouchedFilePaths;
/**
* Get an array of projects that have Jetpack changelogger enabled and have files changed in a PR. This function also maps names of projects that have been renamed in the monorepo from their paths.
*
* @param {Array<string>} touchedFiles List of files changed in a PR. touchedFiles
* @param {Array<string>} changeloggerProjects List of projects that have Jetpack changelogger enabled.
* @return {Object.<string, string>} Paths to projects that have files changed in a PR keyed by the project name.
*/
const getTouchedChangeloggerProjectsPathsMappedToProjects = (touchedFiles, changeloggerProjects) => {
const mappedTouchedFiles = touchedFiles.map((touchedProject) => {
if (touchedProject.includes('plugins/woocommerce-admin')) {
return touchedProject.replace('plugins/woocommerce-admin', 'plugins/woocommerce');
}
return touchedProject;
});
const touchedProjectPathsRequiringChangelog = changeloggerProjects.filter((project) => {
return mappedTouchedFiles.some((touchedProject) => touchedProject.includes(project + '/'));
});
const projectPaths = {};
for (const projectPath of touchedProjectPathsRequiringChangelog) {
let project = projectPath;
if (project.includes('plugins/')) {
project = project.replace('plugins/', '');
}
else if (project.includes('packages/js/')) {
project = project.replace('packages/js/', '@woocommerce/');
}
projectPaths[project] = projectPath;
}
return projectPaths;
};
exports.getTouchedChangeloggerProjectsPathsMappedToProjects = getTouchedChangeloggerProjectsPathsMappedToProjects;
/**
* Get all projects listed in the workspace yaml file.
*
* @param {string} tmpRepoPath Path to the temporary repository.
* @return {Array<string>} List of projects.
*/
const getAllProjectPaths = (tmpRepoPath) => __awaiter(void 0, void 0, void 0, function* () {
const workspaceYaml = yield (0, promises_1.readFile)(path_1.default.join(tmpRepoPath, 'pnpm-workspace.yaml'), 'utf8');
return yield (0, exports.getAllProjectsPathsFromWorkspace)(tmpRepoPath, workspaceYaml);
});
exports.getAllProjectPaths = getAllProjectPaths;
/**
* Get an array of projects that have Jetpack changelogger enabled and have files changed in a PR.
*
* @param {string} tmpRepoPath Path to the temporary repository.
* @param {string} base base hash
* @param {string} head head hash
* @param {string} fileName changelog file name
* @param {string} baseOwner PR base owner
* @param {string} baseName PR base name
* @return {Object.<string, string>} Paths to projects that have files changed in a PR keyed by the project name.
*/
const getTouchedProjectsRequiringChangelog = (tmpRepoPath, base, head, fileName, baseOwner, baseName) => __awaiter(void 0, void 0, void 0, function* () {
const allProjectPaths = yield (0, exports.getAllProjectPaths)(tmpRepoPath);
const changeloggerProjectsPaths = yield (0, exports.getChangeloggerProjectPaths)(tmpRepoPath, allProjectPaths);
const touchedFilePaths = yield (0, exports.getTouchedFilePaths)(tmpRepoPath, base, head, fileName, baseOwner, baseName);
return (0, exports.getTouchedChangeloggerProjectsPathsMappedToProjects)(touchedFilePaths, changeloggerProjectsPaths);
});
exports.getTouchedProjectsRequiringChangelog = getTouchedProjectsRequiringChangelog;

View File

@ -0,0 +1,32 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const logger_1 = require("../core/logger");
const project_graph_1 = require("./lib/project-graph");
const file_changes_1 = require("./lib/file-changes");
const job_processing_1 = require("./lib/job-processing");
const program = new extra_typings_1.Command('ci-jobs')
.description('Generates CI workflow jobs based on the changes since the base ref.')
.argument('<base-ref>', 'Base ref to compare the current ref against for change detection.')
.action((baseRef) => __awaiter(void 0, void 0, void 0, function* () {
const projectGraph = (0, project_graph_1.buildProjectGraph)();
const fileChanges = (0, file_changes_1.getFileChanges)(projectGraph, baseRef);
const jobs = (0, job_processing_1.createJobsForChanges)(projectGraph, fileChanges);
logger_1.Logger.notice(JSON.stringify(jobs, null, '\\t'));
}));
exports.default = program;

View File

@ -0,0 +1,156 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Internal dependencies
*/
const config_1 = require("../config");
describe('Config', () => {
describe('parseCIConfig', () => {
it('should parse empty config', () => {
const parsed = (0, config_1.parseCIConfig)({ name: 'foo', config: {} });
expect(parsed).toMatchObject({});
});
it('should parse lint config', () => {
const parsed = (0, config_1.parseCIConfig)({
name: 'foo',
config: {
ci: {
lint: {
changes: '/src\\/.*\\.[jt]sx?$/',
command: 'foo',
},
},
},
});
expect(parsed).toMatchObject({
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [new RegExp('/src\\/.*\\.[jt]sx?$/')],
command: 'foo',
},
],
});
});
it('should parse lint config with changes array', () => {
const parsed = (0, config_1.parseCIConfig)({
name: 'foo',
config: {
ci: {
lint: {
changes: [
'/src\\/.*\\.[jt]sx?$/',
'/test\\/.*\\.[jt]sx?$/',
],
command: 'foo',
},
},
},
});
expect(parsed).toMatchObject({
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [
new RegExp('/src\\/.*\\.[jt]sx?$/'),
new RegExp('/test\\/.*\\.[jt]sx?$/'),
],
command: 'foo',
},
],
});
});
it('should parse test config', () => {
const parsed = (0, config_1.parseCIConfig)({
name: 'foo',
config: {
ci: {
tests: [
{
name: 'default',
changes: '/src\\/.*\\.[jt]sx?$/',
command: 'foo',
},
],
},
},
});
expect(parsed).toMatchObject({
jobs: [
{
type: "test" /* JobType.Test */,
name: 'default',
changes: [new RegExp('/src\\/.*\\.[jt]sx?$/')],
command: 'foo',
},
],
});
});
it('should parse test config with environment', () => {
const parsed = (0, config_1.parseCIConfig)({
name: 'foo',
config: {
ci: {
tests: [
{
name: 'default',
changes: '/src\\/.*\\.[jt]sx?$/',
command: 'foo',
testEnv: {
start: 'bar',
config: {
wpVersion: 'latest',
},
},
},
],
},
},
});
expect(parsed).toMatchObject({
jobs: [
{
type: "test" /* JobType.Test */,
name: 'default',
changes: [new RegExp('/src\\/.*\\.[jt]sx?$/')],
command: 'foo',
testEnv: {
start: 'bar',
config: {
wpVersion: 'latest',
},
},
},
],
});
});
it('should parse test config with cascade', () => {
const parsed = (0, config_1.parseCIConfig)({
name: 'foo',
config: {
ci: {
tests: [
{
name: 'default',
changes: '/src\\/.*\\.[jt]sx?$/',
command: 'foo',
cascade: 'bar',
},
],
},
},
});
expect(parsed).toMatchObject({
jobs: [
{
type: "test" /* JobType.Test */,
name: 'default',
changes: [new RegExp('/src\\/.*\\.[jt]sx?$/')],
command: 'foo',
cascadeKeys: ['bar'],
},
],
});
});
});
});

View File

@ -0,0 +1,53 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const node_child_process_1 = require("node:child_process");
/**
* Internal dependencies
*/
const file_changes_1 = require("../file-changes");
jest.mock('node:child_process');
describe('File Changes', () => {
describe('getFileChanges', () => {
it('should associate git changes with projects', () => {
jest.mocked(node_child_process_1.execSync).mockImplementation((command) => {
if (command === 'git diff --name-only origin/trunk') {
return `test/project-a/package.json
foo/project-b/foo.js
bar/project-c/bar.js
baz/project-d/baz.js`;
}
throw new Error('Invalid command');
});
const fileChanges = (0, file_changes_1.getFileChanges)({
name: 'project-a',
path: 'test/project-a',
dependencies: [
{
name: 'project-b',
path: 'foo/project-b',
dependencies: [
{
name: 'project-c',
path: 'bar/project-c',
dependencies: [],
},
],
},
{
name: 'project-c',
path: 'bar/project-c',
dependencies: [],
},
],
}, 'origin/trunk');
expect(fileChanges).toMatchObject({
'project-a': ['package.json'],
'project-b': ['foo.js'],
'project-c': ['bar.js'],
});
});
});
});

View File

@ -0,0 +1,442 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const job_processing_1 = require("../job-processing");
const test_environment_1 = require("../test-environment");
jest.mock('../test-environment');
describe('Job Processing', () => {
describe('getFileChanges', () => {
it('should do nothing with no CI configs', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
dependencies: [],
}, {});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(0);
}));
it('should trigger lint job for single node', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test.js$/],
command: 'test-lint',
},
],
},
dependencies: [],
}, {
test: ['test.js'],
});
expect(jobs.lint).toHaveLength(1);
expect(jobs.lint).toContainEqual({
projectName: 'test',
command: 'test-lint',
});
expect(jobs.test).toHaveLength(0);
}));
it('should not trigger lint job for single node with no changes', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test.js$/],
command: 'test-lint',
},
],
},
dependencies: [],
}, {});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(0);
}));
it('should trigger lint job for project graph', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test.js$/],
command: 'test-lint',
},
],
},
dependencies: [
{
name: 'test-a',
path: 'test-a',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test-a.js$/],
command: 'test-lint-a',
},
],
},
dependencies: [],
},
{
name: 'test-b',
path: 'test-b',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test-b.js$/],
command: 'test-lint-b',
},
],
},
dependencies: [],
},
],
}, {
test: ['test.js'],
'test-a': ['test-ignored.js'],
'test-b': ['test-b.js'],
});
expect(jobs.lint).toHaveLength(2);
expect(jobs.lint).toContainEqual({
projectName: 'test',
command: 'test-lint',
});
expect(jobs.lint).toContainEqual({
projectName: 'test-b',
command: 'test-lint-b',
});
expect(jobs.test).toHaveLength(0);
}));
it('should trigger lint job for project graph with empty config parent', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
dependencies: [
{
name: 'test-a',
path: 'test-a',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test-a.js$/],
command: 'test-lint-a',
},
],
},
dependencies: [],
},
{
name: 'test-b',
path: 'test-b',
ciConfig: {
jobs: [
{
type: "lint" /* JobType.Lint */,
changes: [/test-b.js$/],
command: 'test-lint-b',
},
],
},
dependencies: [],
},
],
}, {
test: ['test.js'],
'test-a': ['test-a.js'],
'test-b': ['test-b.js'],
});
expect(jobs.lint).toHaveLength(2);
expect(jobs.lint).toContainEqual({
projectName: 'test-a',
command: 'test-lint-a',
});
expect(jobs.lint).toContainEqual({
projectName: 'test-b',
command: 'test-lint-b',
});
expect(jobs.test).toHaveLength(0);
}));
it('should trigger test job for single node', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
},
],
},
dependencies: [],
}, {
test: ['test.js'],
});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(1);
expect(jobs.test).toContainEqual({
projectName: 'test',
name: 'Default',
command: 'test-cmd',
});
}));
it('should not trigger test job for single node with no changes', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
},
],
},
dependencies: [],
}, {});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(0);
}));
it('should trigger test job for project graph', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
},
],
},
dependencies: [
{
name: 'test-a',
path: 'test-a',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default A',
changes: [/test-b.js$/],
command: 'test-cmd-a',
},
],
},
dependencies: [],
},
{
name: 'test-b',
path: 'test-b',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default B',
changes: [/test-b.js$/],
command: 'test-cmd-b',
},
],
},
dependencies: [],
},
],
}, {
test: ['test.js'],
'test-a': ['test-ignored.js'],
'test-b': ['test-b.js'],
});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(2);
expect(jobs.test).toContainEqual({
projectName: 'test',
name: 'Default',
command: 'test-cmd',
});
expect(jobs.test).toContainEqual({
projectName: 'test-b',
name: 'Default B',
command: 'test-cmd-b',
});
}));
it('should trigger test job for dependent without changes when dependency has matching cascade key', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
cascadeKeys: ['test'],
},
],
},
dependencies: [
{
name: 'test-a',
path: 'test-a',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default A',
changes: [/test-a.js$/],
command: 'test-cmd-a',
cascadeKeys: ['test-a', 'test'],
},
],
},
dependencies: [],
},
],
}, {
'test-a': ['test-a.js'],
});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(2);
expect(jobs.test).toContainEqual({
projectName: 'test',
name: 'Default',
command: 'test-cmd',
});
expect(jobs.test).toContainEqual({
projectName: 'test-a',
name: 'Default A',
command: 'test-cmd-a',
});
}));
it('should isolate dependency cascade keys to prevent cross-dependency matching', () => __awaiter(void 0, void 0, void 0, function* () {
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
cascadeKeys: ['test'],
},
],
},
dependencies: [
{
name: 'test-a',
path: 'test-a',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default A',
changes: [/test-a.js$/],
command: 'test-cmd-a',
cascadeKeys: ['test-a', 'test'],
},
],
},
dependencies: [],
},
{
name: 'test-b',
path: 'test-b',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default B',
changes: [/test-b.js$/],
command: 'test-cmd-b',
cascadeKeys: ['test-b', 'test'],
},
],
},
dependencies: [],
},
],
}, {
'test-a': ['test-a.js'],
});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(2);
expect(jobs.test).toContainEqual({
projectName: 'test',
name: 'Default',
command: 'test-cmd',
});
expect(jobs.test).toContainEqual({
projectName: 'test-a',
name: 'Default A',
command: 'test-cmd-a',
});
}));
it('should trigger test job for single node and parse test environment config', () => __awaiter(void 0, void 0, void 0, function* () {
jest.mocked(test_environment_1.parseTestEnvConfig).mockResolvedValue({
WP_ENV_CORE: 'https://wordpress.org/latest.zip',
});
const jobs = yield (0, job_processing_1.createJobsForChanges)({
name: 'test',
path: 'test',
ciConfig: {
jobs: [
{
type: "test" /* JobType.Test */,
name: 'Default',
changes: [/test.js$/],
command: 'test-cmd',
testEnv: {
start: 'test-start',
config: {
wpVersion: 'latest',
},
},
},
],
},
dependencies: [],
}, {
test: ['test.js'],
});
expect(jobs.lint).toHaveLength(0);
expect(jobs.test).toHaveLength(1);
expect(jobs.test).toContainEqual({
projectName: 'test',
name: 'Default',
command: 'test-cmd',
testEnv: {
start: 'test-start',
envVars: {
WP_ENV_CORE: 'https://wordpress.org/latest.zip',
},
},
});
}));
});
});

View File

@ -0,0 +1,62 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const node_fs_1 = __importDefault(require("node:fs"));
/**
* Internal dependencies
*/
const package_file_1 = require("../package-file");
jest.mock('node:fs');
describe('Package File', () => {
describe('loadPackage', () => {
it("should throw for file that doesn't exist", () => {
jest.mocked(node_fs_1.default.readFileSync).mockImplementation((path) => {
if (path === 'foo') {
throw new Error('ENOENT');
}
return '';
});
expect(() => (0, package_file_1.loadPackage)('foo')).toThrow('ENOENT');
});
it('should load package.json', () => {
jest.mocked(node_fs_1.default.readFileSync).mockImplementationOnce((path) => {
if (path === __dirname + '/test-package.json') {
return JSON.stringify({
name: 'foo',
});
}
throw new Error('ENOENT');
});
const loadedFile = (0, package_file_1.loadPackage)(__dirname + '/test-package.json');
expect(loadedFile).toMatchObject({
name: 'foo',
});
});
it('should cache using normalized paths', () => {
jest.mocked(node_fs_1.default.readFileSync).mockImplementationOnce((path) => {
if (path === __dirname + '/test-package.json') {
return JSON.stringify({
name: 'foo',
});
}
throw new Error('ENOENT');
});
(0, package_file_1.loadPackage)(__dirname + '/test-package.json');
// Just throw if it's called again so that we can make sure we're using the cache.
jest.mocked(node_fs_1.default.readFileSync).mockImplementationOnce(() => {
throw new Error('ENOENT');
});
const cachedFile = (0, package_file_1.loadPackage)(
// Use a token that needs to be normalized to match the cached path.
__dirname + '/./test-package.json');
expect(cachedFile).toMatchObject({
name: 'foo',
});
});
});
});

View File

@ -0,0 +1,103 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const node_child_process_1 = require("node:child_process");
const node_fs_1 = __importDefault(require("node:fs"));
/**
* Internal dependencies
*/
const config_1 = require("../config");
const package_file_1 = require("../package-file");
const project_graph_1 = require("../project-graph");
jest.mock('node:child_process');
jest.mock('../config');
jest.mock('../package-file');
describe('Project Graph', () => {
describe('buildProjectGraph', () => {
it('should build graph from pnpm list', () => {
jest.mocked(node_child_process_1.execSync).mockImplementation((command) => {
if (command === 'pnpm -w root') {
return '/test/monorepo/node_modules';
}
if (command === 'pnpm -r list --only-projects --json') {
return node_fs_1.default.readFileSync(__dirname + '/test-pnpm-list.json');
}
throw new Error('Invalid command');
});
jest.mocked(package_file_1.loadPackage).mockImplementation((path) => {
if (!path.endsWith('package.json')) {
throw new Error('Invalid path');
}
const matches = path.match(/\/([^/]+)\/package.json$/);
return {
name: matches[1],
};
});
jest.mocked(config_1.parseCIConfig).mockImplementation((packageFile) => {
expect(packageFile).toMatchObject({
name: expect.stringMatching(/project-[abcd]/),
});
return { jobs: [] };
});
const graph = (0, project_graph_1.buildProjectGraph)();
expect(package_file_1.loadPackage).toHaveBeenCalled();
expect(config_1.parseCIConfig).toHaveBeenCalled();
expect(graph).toMatchObject({
name: 'project-a',
path: 'project-a',
ciConfig: {
jobs: [],
},
dependencies: [
{
name: 'project-b',
path: 'project-b',
ciConfig: {
jobs: [],
},
dependencies: [
{
name: 'project-c',
path: 'project-c',
ciConfig: {
jobs: [],
},
dependencies: [],
},
],
},
{
name: 'project-c',
path: 'project-c',
ciConfig: {
jobs: [],
},
dependencies: [],
},
{
name: 'project-d',
path: 'project-d',
ciConfig: {
jobs: [],
},
dependencies: [
{
name: 'project-c',
path: 'project-c',
ciConfig: {
jobs: [],
},
dependencies: [],
},
],
},
],
});
});
});
});

View File

@ -0,0 +1,119 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const node_http_1 = require("node:http");
const node_stream_1 = require("node:stream");
/**
* Internal dependencies
*/
const test_environment_1 = require("../test-environment");
jest.mock('node:http');
describe('Test Environment', () => {
describe('parseTestEnvConfig', () => {
it('should parse empty configs', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({});
expect(envVars).toEqual({});
}));
describe('wpVersion', () => {
// We're going to mock an implementation of the request to the WordPress.org API.
// This simulates what happens when we call https.get() for it.
jest.mocked(node_http_1.get).mockImplementation((url, callback) => {
if (url !== 'http://api.wordpress.org/core/stable-check/1.0/') {
throw new Error('Invalid URL');
}
const getStream = new node_stream_1.Stream();
// Let the consumer set up listeners for the stream.
callback(getStream);
const wpVersions = {
'5.9': 'insecure',
'6.0': 'insecure',
'6.0.1': 'insecure',
'6.1': 'insecure',
'6.1.1': 'insecure',
'6.1.2': 'outdated',
'6.2': 'latest',
};
getStream.emit('data', JSON.stringify(wpVersions));
getStream.emit('end'); // this will trigger the promise resolve
return jest.fn();
});
it('should parse "master" and "trunk" branches', () => __awaiter(void 0, void 0, void 0, function* () {
let envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'master',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'WordPress/WordPress#master',
});
envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'trunk',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'WordPress/WordPress#master',
});
}));
it('should parse nightlies', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'nightly',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'https://wordpress.org/nightly-builds/wordpress-latest.zip',
});
}));
it('should parse latest', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'latest',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'https://wordpress.org/latest.zip',
});
}));
it('should parse specific minor version', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: '5.9.0',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'https://wordpress.org/wordpress-5.9.zip',
});
}));
it('should parse specific patch version', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: '6.0.1',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'https://wordpress.org/wordpress-6.0.1.zip',
});
}));
it('should throw for version that does not exist', () => __awaiter(void 0, void 0, void 0, function* () {
const expectation = () => (0, test_environment_1.parseTestEnvConfig)({
wpVersion: '1.0',
});
expect(expectation).rejects.toThrowError(/Failed to parse WP version/);
}));
it('should parse latest offset', () => __awaiter(void 0, void 0, void 0, function* () {
const envVars = yield (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'latest-1',
});
expect(envVars).toEqual({
WP_ENV_CORE: 'https://wordpress.org/wordpress-6.1.2.zip',
});
}));
it('should throw for latest offset that does not exist', () => __awaiter(void 0, void 0, void 0, function* () {
const expectation = () => (0, test_environment_1.parseTestEnvConfig)({
wpVersion: 'latest-10',
});
expect(expectation).rejects.toThrowError(/Failed to parse WP version/);
}));
});
});
});

View File

@ -0,0 +1,160 @@
"use strict";
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseCIConfig = exports.ConfigError = void 0;
/**
* A configuration error type.
*/
class ConfigError extends Error {
}
exports.ConfigError = ConfigError;
/**
* Parses and validates a raw change config entry.
*
* @param {string|string[]} raw The raw config to parse.
*/
function parseChangesConfig(raw) {
if (typeof raw === 'string') {
return [new RegExp(raw)];
}
if (!Array.isArray(raw)) {
throw new ConfigError('Changes configuration must be a string or array of strings.');
}
const changes = [];
for (const entry of raw) {
if (typeof entry !== 'string') {
throw new ConfigError('Changes configuration must be a string or array of strings.');
}
changes.push(new RegExp(entry));
}
return changes;
}
/**
* Parses the lint job configuration.
*
* @param {Object} raw The raw config to parse.
*/
function parseLintJobConfig(raw) {
if (!raw.changes) {
throw new ConfigError('A "changes" option is required for the lint job.');
}
if (!raw.command || typeof raw.command !== 'string') {
throw new ConfigError('A string "command" option is required for the lint job.');
}
return {
type: "lint" /* JobType.Lint */,
changes: parseChangesConfig(raw.changes),
command: raw.command,
};
}
/**
* Parses the test env config vars.
*
* @param {Object} raw The raw config to parse.
*/
function parseTestEnvConfigVars(raw) {
const config = {};
if (raw.wpVersion) {
if (typeof raw.wpVersion !== 'string') {
throw new ConfigError('The "wpVersion" option must be a string.');
}
config.wpVersion = raw.wpVersion;
}
if (raw.phpVersion) {
if (typeof raw.phpVersion !== 'string') {
throw new ConfigError('The "phpVersion" option must be a string.');
}
config.phpVersion = raw.phpVersion;
}
return config;
}
/**
* parses the cascade config.
*
* @param {string|string[]} raw The raw config to parse.
*/
function parseTestCascade(raw) {
if (typeof raw === 'string') {
return [raw];
}
if (!Array.isArray(raw)) {
throw new ConfigError('Cascade configuration must be a string or array of strings.');
}
const changes = [];
for (const entry of raw) {
if (typeof entry !== 'string') {
throw new ConfigError('Cascade configuration must be a string or array of strings.');
}
changes.push(entry);
}
return changes;
}
/**
* Parses the test job config.
*
* @param {Object} raw The raw config to parse.
*/
function parseTestJobConfig(raw) {
if (!raw.name || typeof raw.name !== 'string') {
throw new ConfigError('A string "name" option is required for test jobs.');
}
if (!raw.changes) {
throw new ConfigError('A "changes" option is required for the test jobs.');
}
if (!raw.command || typeof raw.command !== 'string') {
throw new ConfigError('A string "command" option is required for the test jobs.');
}
const config = {
type: "test" /* JobType.Test */,
name: raw.name,
changes: parseChangesConfig(raw.changes),
command: raw.command,
};
if (raw.testEnv) {
if (typeof raw.testEnv !== 'object') {
throw new ConfigError('The "testEnv" option must be an object.');
}
if (!raw.testEnv.start || typeof raw.testEnv.start !== 'string') {
throw new ConfigError('A string "start" option is required for test environments.');
}
config.testEnv = {
start: raw.testEnv.start,
config: parseTestEnvConfigVars(raw.testEnv.config),
};
}
if (raw.cascade) {
config.cascadeKeys = parseTestCascade(raw.cascade);
}
return config;
}
/**
* Parses the raw CI config.
*
* @param {Object} raw The raw config.
*/
function parseCIConfig(raw) {
var _a;
const config = {
jobs: [],
};
const ciConfig = (_a = raw.config) === null || _a === void 0 ? void 0 : _a.ci;
if (!ciConfig) {
return config;
}
if (ciConfig.lint) {
if (typeof ciConfig.lint !== 'object') {
throw new ConfigError('The "lint" option must be an object.');
}
config.jobs.push(parseLintJobConfig(ciConfig.lint));
}
if (ciConfig.tests) {
if (!Array.isArray(ciConfig.tests)) {
throw new ConfigError('The "tests" option must be an array.');
}
for (const rawTestConfig of ciConfig.tests) {
config.jobs.push(parseTestJobConfig(rawTestConfig));
}
}
return config;
}
exports.parseCIConfig = parseCIConfig;

View File

@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFileChanges = void 0;
/**
* External dependencies
*/
const node_child_process_1 = require("node:child_process");
/**
* Gets the project path for every project in the graph.
*
* @param {Object} graph The project graph to process.
* @return {Object} The project paths keyed by the project name.
*/
function getProjectPaths(graph) {
const projectPaths = {};
const queue = [graph];
const visited = {};
while (queue.length > 0) {
const node = queue.shift();
if (!node) {
continue;
}
if (visited[node.name]) {
continue;
}
projectPaths[node.name] = node.path;
visited[node.name] = true;
queue.push(...node.dependencies);
}
return projectPaths;
}
/**
* Checks the changed files and returns any that are relevant to the project.
*
* @param {string} projectPath The path to the project to get changed files for.
* @param {Array.<string>} changedFiles The files that have changed in the repo.
* @return {Array.<string>} The files that have changed in the project.
*/
function getChangedFilesForProject(projectPath, changedFiles) {
const projectChanges = [];
// Find all of the files that have changed in the project.
for (const filePath of changedFiles) {
if (!filePath.startsWith(projectPath)) {
continue;
}
// Track the file relative to the project.
projectChanges.push(filePath.slice(projectPath.length + 1));
}
return projectChanges;
}
/**
* Pulls all of the files that have changed in the project graph since the given git ref.
*
* @param {Object} projectGraph The project graph to assign changes for.
* @param {string} baseRef The git ref to compare against for changes.
* @return {Object} A map of changed files keyed by the project name.
*/
function getFileChanges(projectGraph, baseRef) {
const projectPaths = getProjectPaths(projectGraph);
// We're going to use git to figure out what files have changed.
const output = (0, node_child_process_1.execSync)(`git diff --name-only ${baseRef}`, {
encoding: 'utf8',
});
const changedFilePaths = output.split('\n');
const changes = {};
for (const projectName in projectPaths) {
// Projects with no paths have no changed files for us to identify.
if (!projectPaths[projectName]) {
continue;
}
const projectChanges = getChangedFilesForProject(projectPaths[projectName], changedFilePaths);
if (projectChanges.length === 0) {
continue;
}
changes[projectName] = projectChanges;
}
return changes;
}
exports.getFileChanges = getFileChanges;

View File

@ -0,0 +1,178 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createJobsForChanges = void 0;
const test_environment_1 = require("./test-environment");
/**
* Checks the config against the changes and creates one if it should be run.
*
* @param {string} projectName The name of the project that the job is for.
* @param {Object} config The config object for the lint job.
* @param {Array.<string>} changes The file changes that have occurred for the project.
* @return {Object|null} The job that should be run or null if no job should be run.
*/
function createLintJob(projectName, config, changes) {
let triggered = false;
// Projects can configure jobs to be triggered when a
// changed file matches a path regex.
for (const file of changes) {
for (const change of config.changes) {
if (change.test(file)) {
triggered = true;
break;
}
}
if (triggered) {
break;
}
}
if (!triggered) {
return null;
}
return {
projectName,
command: config.command,
};
}
/**
* Checks the config against the changes and creates one if it should be run.
*
* @param {string} projectName The name of the project that the job is for.
* @param {Object} config The config object for the test job.
* @param {Array.<string>} changes The file changes that have occurred for the project.
* @param {Array.<string>} cascadeKeys The cascade keys that have been triggered in dependencies.
* @return {Promise.<Object|null>} The job that should be run or null if no job should be run.
*/
function createTestJob(projectName, config, changes, cascadeKeys) {
return __awaiter(this, void 0, void 0, function* () {
let triggered = false;
// Some jobs can be configured to trigger when a dependency has a job that
// was triggered. For example, a code change in a dependency might mean
// that code is impacted in the current project even if no files were
// actually changed in this project.
if (config.cascadeKeys &&
config.cascadeKeys.some((value) => cascadeKeys.includes(value))) {
triggered = true;
}
// Projects can configure jobs to be triggered when a
// changed file matches a path regex.
if (!triggered) {
for (const file of changes) {
for (const change of config.changes) {
if (change.test(file)) {
triggered = true;
break;
}
}
if (triggered) {
break;
}
}
}
if (!triggered) {
return null;
}
const createdJob = {
projectName,
name: config.name,
command: config.command,
};
// We want to make sure that we're including the configuration for
// any test environment that the job will need in order to run.
if (config.testEnv) {
createdJob.testEnv = {
start: config.testEnv.start,
envVars: yield (0, test_environment_1.parseTestEnvConfig)(config.testEnv.config),
};
}
return createdJob;
});
}
/**
* Recursively checks the project for any jobs that should be executed and returns them.
*
* @param {Object} node The current project node to examine.
* @param {Object} changedFiles The files that have changed for the project.
* @param {Array.<string>} cascadeKeys The cascade keys that have been triggered in dependencies.
* @return {Promise.<Object>} The jobs that have been created for the project.
*/
function createJobsForProject(node, changedFiles, cascadeKeys) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// We're going to traverse the project graph and check each node for any jobs that should be triggered.
const newJobs = {
lint: [],
test: [],
};
// In order to simplify the way that cascades work we're going to recurse depth-first and check our dependencies
// for jobs before ourselves. This lets any cascade keys created in dependencies cascade to dependents.
const newCascadeKeys = [];
for (const dependency of node.dependencies) {
// Each dependency needs to have its own cascade keys so that they don't cross-contaminate.
const dependencyCascade = [...cascadeKeys];
const dependencyJobs = yield createJobsForProject(dependency, changedFiles, dependencyCascade);
newJobs.lint.push(...dependencyJobs.lint);
newJobs.test.push(...dependencyJobs.test);
// Track any new cascade keys added by the dependency.
// Since we're filtering out duplicates after the
// dependencies are checked we don't need to
// worry about their presence right now.
newCascadeKeys.push(...dependencyCascade);
}
// Now that we're done looking at the dependencies we can add the cascade keys that
// they created. Make sure to avoid adding duplicates so that we don't waste time
// checking the same keys multiple times when we create the jobs.
cascadeKeys.push(...newCascadeKeys.filter((value) => !cascadeKeys.includes(value)));
// Projects that don't have any CI configuration don't have any potential jobs for us to check for.
if (!node.ciConfig) {
return newJobs;
}
for (const jobConfig of node.ciConfig.jobs) {
switch (jobConfig.type) {
case "lint" /* JobType.Lint */: {
const created = createLintJob(node.name, jobConfig, (_a = changedFiles[node.name]) !== null && _a !== void 0 ? _a : []);
if (!created) {
break;
}
newJobs.lint.push(created);
break;
}
case "test" /* JobType.Test */: {
const created = yield createTestJob(node.name, jobConfig, (_b = changedFiles[node.name]) !== null && _b !== void 0 ? _b : [], cascadeKeys);
if (!created) {
break;
}
newJobs.test.push(created);
// We need to track any cascade keys that this job is associated with so that
// dependent projects can trigger jobs with matching keys. We are expecting
// the array passed to this function to be modified by reference so this
// behavior is intentional.
if (jobConfig.cascadeKeys) {
cascadeKeys.push(...jobConfig.cascadeKeys);
}
break;
}
}
}
return newJobs;
});
}
/**
* Creates jobs to run for the given project graph and file changes.
*
* @param {Object} root The root node for the project graph.
* @param {Object} changes The file changes that have occurred.
* @return {Promise.<Object>} The jobs that should be run.
*/
function createJobsForChanges(root, changes) {
return createJobsForProject(root, changes, []);
}
exports.createJobsForChanges = createJobsForChanges;

View File

@ -0,0 +1,31 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadPackage = void 0;
/**
* External dependencies
*/
const node_fs_1 = __importDefault(require("node:fs"));
const node_path_1 = __importDefault(require("node:path"));
// We're going to store a cache of package files so that we don't load
// ones that we have already loaded. The key is the normalized path
// to the package file that was loaded.
const packageCache = {};
/**
* Loads a package file's contents either from the cache or from the file system.
*
* @param {string} packagePath The package file to load.
* @return {Object} The package file's contents.
*/
function loadPackage(packagePath) {
// Use normalized paths to accomodate any path tokens.
packagePath = node_path_1.default.normalize(packagePath);
if (packageCache[packagePath]) {
return packageCache[packagePath];
}
packageCache[packagePath] = JSON.parse(node_fs_1.default.readFileSync(packagePath, 'utf8'));
return packageCache[packagePath];
}
exports.loadPackage = loadPackage;

View File

@ -0,0 +1,88 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildProjectGraph = void 0;
/**
* External dependencies
*/
const node_child_process_1 = require("node:child_process");
const node_path_1 = __importDefault(require("node:path"));
/**
* Internal dependencies
*/
const config_1 = require("./config");
const package_file_1 = require("./package-file");
/**
* Builds a dependency graph of all projects in the monorepo and returns the root node.
*/
function buildProjectGraph() {
// Get the root of the monorepo.
const monorepoRoot = node_path_1.default.join((0, node_child_process_1.execSync)('pnpm -w root', { encoding: 'utf-8' }), '..');
// PNPM provides us with a flat list of all projects
// in the workspace and their dependencies.
const workspace = JSON.parse((0, node_child_process_1.execSync)('pnpm -r list --only-projects --json', { encoding: 'utf-8' }));
// Start by building an object containing all of the nodes keyed by their project name.
// This will let us link them together quickly by iterating through the list of
// dependencies and adding the applicable nodes.
const nodes = {};
let rootNode;
for (const project of workspace) {
// Use a relative path to the project so that it's easier for us to work with
const projectPath = project.path.replace(new RegExp(`^${monorepoRoot.replace(/\\/g, '\\\\')}${node_path_1.default.sep}?`), '');
const packageFile = (0, package_file_1.loadPackage)(node_path_1.default.join(project.path, 'package.json'));
const ciConfig = (0, config_1.parseCIConfig)(packageFile);
const node = {
name: project.name,
path: projectPath,
ciConfig,
dependencies: [],
};
// The first entry that `pnpm list` returns is the workspace root.
// This will be the root node of our graph.
if (!rootNode) {
rootNode = node;
}
nodes[project.name] = node;
}
// One thing to keep in mind is that, technically, our dependency graph has multiple roots.
// Each package that has no dependencies is a "root", however, for simplicity, we will
// add these root packages under the monorepo root in order to have a clean graph.
// Since the monorepo root has no CI config this won't cause any problems.
// Track this by recording all of the dependencies and removing them
// from the rootless list if they are added as a dependency.
const rootlessDependencies = workspace.map((project) => project.name);
// Now we can scan through all of the nodes and hook them up to their respective dependency nodes.
for (const project of workspace) {
const node = nodes[project.name];
if (project.dependencies) {
for (const dependency in project.dependencies) {
node.dependencies.push(nodes[dependency]);
}
}
if (project.devDependencies) {
for (const dependency in project.devDependencies) {
node.dependencies.push(nodes[dependency]);
}
}
// Mark any dependencies that have a dependent as not being rootless.
// A rootless dependency is one that nothing depends on.
for (const dependency of node.dependencies) {
const index = rootlessDependencies.indexOf(dependency.name);
if (index > -1) {
rootlessDependencies.splice(index, 1);
}
}
}
// Track the rootless dependencies now that we have them.
for (const rootless of rootlessDependencies) {
// Don't add the root node as a dependency of itself.
if (rootless === rootNode.name) {
continue;
}
rootNode.dependencies.push(nodes[rootless]);
}
return rootNode;
}
exports.buildProjectGraph = buildProjectGraph;

View File

@ -0,0 +1,168 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseTestEnvConfig = void 0;
/**
* External dependencies
*/
const node_http_1 = __importDefault(require("node:http"));
/**
* Gets all of the available WordPress versions and their associated stability.
*
* @return {Promise.<Object>} The response from the WordPress.org API.
*/
function getWordPressVersions() {
return new Promise((resolve, reject) => {
// We're going to use the WordPress.org API to get information about available versions of WordPress.
const request = node_http_1.default.get('http://api.wordpress.org/core/stable-check/1.0/', (response) => {
// Listen for the response data.
let responseData = '';
response.on('data', (chunk) => {
responseData += chunk;
});
// Once we have the entire response we can process it.
response.on('end', () => resolve(JSON.parse(responseData)));
});
request.on('error', (error) => {
reject(error);
});
});
}
/**
* Uses the WordPress API to get the download URL to the latest version of an X.X version line. This
* also accepts "latest-X" to get an offset from the latest version of WordPress.
*
* @param {string} wpVersion The version of WordPress to look for.
* @return {Promise.<string>} The precise WP version download URL.
*/
function getPreciseWPVersionURL(wpVersion) {
return __awaiter(this, void 0, void 0, function* () {
const allVersions = yield getWordPressVersions();
// If we're requesting a "latest" offset then we need to figure out what version line we're offsetting from.
const latestSubMatch = wpVersion.match(/^latest(?:-([0-9]+))?$/i);
if (latestSubMatch) {
for (const version in allVersions) {
if (allVersions[version] !== 'latest') {
continue;
}
// We don't care about the patch version because we will
// the latest version from the version line below.
const versionParts = version.match(/^([0-9]+)\.([0-9]+)/);
// We're going to subtract the offset to figure out the right version.
let offset = latestSubMatch[1]
? parseInt(latestSubMatch[1], 10)
: 0;
let majorVersion = parseInt(versionParts[1], 10);
let minorVersion = parseInt(versionParts[2], 10);
while (offset > 0) {
minorVersion--;
if (minorVersion < 0) {
majorVersion--;
minorVersion = 9;
}
offset--;
}
// Set the version that we found in the offset.
wpVersion = majorVersion + '.' + minorVersion;
}
}
// Scan through all of the versions to find the latest version in the version line.
let latestVersion = null;
let latestPatch = -1;
for (const v in allVersions) {
// Parse the version so we can make sure we're looking for the latest.
const matches = v.match(/([0-9]+)\.([0-9]+)(?:\.([0-9]+))?/);
// We only care about the correct minor version.
const minor = `${matches[1]}.${matches[2]}`;
if (minor !== wpVersion) {
continue;
}
// Track the latest version in the line.
const patch = matches[3] === undefined ? 0 : parseInt(matches[3], 10);
if (patch > latestPatch) {
latestPatch = patch;
latestVersion = v;
}
}
if (!latestVersion) {
throw new Error(`Unable to find latest version for version line ${wpVersion}.`);
}
return `https://wordpress.org/wordpress-${latestVersion}.zip`;
});
}
/**
* Parses a display-friendly WordPress version and returns a link to download the given version.
*
* @param {string} wpVersion A display-friendly WordPress version. Supports ("master", "trunk", "nightly", "latest", "latest-X", "X.X" for version lines, and "X.X.X" for specific versions)
* @return {Promise.<string>} A link to download the given version of WordPress.
*/
function parseWPVersion(wpVersion) {
return __awaiter(this, void 0, void 0, function* () {
// Allow for download URLs in place of a version.
if (wpVersion.match(/[a-z]+:\/\//i)) {
return wpVersion;
}
// Start with versions we can infer immediately.
switch (wpVersion) {
case 'master':
case 'trunk': {
return 'WordPress/WordPress#master';
}
case 'nightly': {
return 'https://wordpress.org/nightly-builds/wordpress-latest.zip';
}
case 'latest': {
return 'https://wordpress.org/latest.zip';
}
}
// We can also infer X.X.X versions immediately.
const parsedVersion = wpVersion.match(/^([0-9]+)\.([0-9]+)\.([0-9]+)$/);
if (parsedVersion) {
// Note that X.X.0 versions use a X.X download URL.
let urlVersion = `${parsedVersion[1]}.${parsedVersion[2]}`;
if (parsedVersion[3] !== '0') {
urlVersion += `.${parsedVersion[3]}`;
}
return `https://wordpress.org/wordpress-${urlVersion}.zip`;
}
// Since we haven't found a URL yet we're going to use the WordPress.org API to try and infer one.
return getPreciseWPVersionURL(wpVersion);
});
}
/**
* Parses the test environment's configuration and returns any environment variables that
* should be set.
*
* @param {Object} config The test environment configuration.
* @return {Promise.<Object>} The environment variables for the test environment.
*/
function parseTestEnvConfig(config) {
return __awaiter(this, void 0, void 0, function* () {
const envVars = {};
// Convert `wp-env` configuration options to environment variables.
if (config.wpVersion) {
try {
envVars.WP_ENV_CORE = yield parseWPVersion(config.wpVersion);
}
catch (error) {
throw new Error(`Failed to parse WP version: ${error.message}.`);
}
}
if (config.phpVersion) {
envVars.WP_ENV_PHP_VERSION = config.phpVersion;
}
return envVars;
});
}
exports.parseTestEnvConfig = parseTestEnvConfig;

View File

@ -0,0 +1,113 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.acceleratedPrepCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const simple_git_1 = __importDefault(require("simple-git"));
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
const git_1 = require("../../../core/git");
const repo_1 = require("../../../core/github/repo");
const environment_1 = require("../../../core/environment");
const prep_1 = require("./lib/prep");
exports.acceleratedPrepCommand = new extra_typings_1.Command('accelerated-prep')
.description('Prep for an accelerated release')
.argument('<version>', 'Version to bump to use for changelog')
.argument('<date>', 'Release date to use in changelog')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-b --base <base>', 'Base branch to create the PR against. Default: trunk', 'trunk')
.option('-d --dry-run', 'Prepare the version bump and log a diff. Do not create a PR or push to branch', false)
.option('-c --commit-direct-to-base', 'Commit directly to the base branch. Do not create a PR just push directly to base branch', false)
.action((version, date, options) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, base, dryRun, commitDirectToBase } = options;
logger_1.Logger.startTask(`Making a temporary clone of '${owner}/${name}'`);
const source = `github.com/${owner}/${name}`;
const token = (0, environment_1.getEnvVar)('GITHUB_TOKEN', true);
const remote = `https://${owner}:${token}@${source}`;
const tmpRepoPath = yield (0, git_1.sparseCheckoutRepoShallow)(remote, 'woocommerce', [
'plugins/woocommerce/includes/class-woocommerce.php',
// All that's needed is the line above, but including these here for completeness.
'plugins/woocommerce/composer.json',
'plugins/woocommerce/package.json',
'plugins/woocommerce/readme.txt',
'plugins/woocommerce/woocommerce.php',
]);
logger_1.Logger.endTask();
logger_1.Logger.notice(`Temporary clone of '${owner}/${name}' created at ${tmpRepoPath}`);
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
const branch = `prep/${base}-accelerated`;
try {
if (commitDirectToBase) {
if (base === 'trunk') {
logger_1.Logger.error(`The --commit-direct-to-base option cannot be used with the trunk branch as a base. A pull request must be created instead.`);
}
logger_1.Logger.notice(`Checking out ${base}`);
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, base);
}
else {
const exists = yield git.raw('ls-remote', 'origin', branch);
if (!dryRun && exists.trim().length > 0) {
logger_1.Logger.error(`Branch ${branch} already exists. Run \`git push <remote> --delete ${branch}\` and rerun this command.`);
}
if (base !== 'trunk') {
// if the base is not trunk, we need to checkout the base branch first before creating a new branch.
logger_1.Logger.notice(`Checking out ${base}`);
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, base);
}
logger_1.Logger.notice(`Creating new branch ${branch}`);
yield git.checkoutBranch(branch, base);
}
const workingBranch = commitDirectToBase ? base : branch;
logger_1.Logger.notice(`Adding Woo header to main plugin file and creating changelog.txt on ${workingBranch} branch`);
(0, prep_1.addHeader)(tmpRepoPath);
(0, prep_1.createChangelog)(tmpRepoPath, version, date);
if (dryRun) {
const diff = yield git.diffSummary();
logger_1.Logger.notice(`The prep has been completed in the following files:`);
logger_1.Logger.warn(diff.files.map((f) => f.file).join('\n'));
logger_1.Logger.notice('Dry run complete. No pull was request created nor was a commit made.');
return;
}
logger_1.Logger.notice('Adding and committing changes');
yield git.add('.');
yield git.commit(`Add Woo header to main plugin file and create changelog in ${base}`);
logger_1.Logger.notice(`Pushing ${workingBranch} branch to Github`);
yield git.push('origin', workingBranch);
if (!commitDirectToBase) {
logger_1.Logger.startTask('Creating a pull request');
const pullRequest = yield (0, repo_1.createPullRequest)({
owner,
name,
title: `Add Woo header to main plugin file and create changelog in ${base}`,
body: `This PR adds the Woo header to the main plugin file and creates a changelog.txt file in ${base}.`,
head: branch,
base,
});
logger_1.Logger.notice(`Pull request created: ${pullRequest.html_url}`);
logger_1.Logger.endTask();
}
}
catch (error) {
logger_1.Logger.error(error);
}
}));

View File

@ -0,0 +1,60 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createChangelog = exports.addHeader = void 0;
/**
* External dependencies
*/
const promises_1 = require("fs/promises");
const path_1 = require("path");
/**
* Internal dependencies
*/
const logger_1 = require("../../../../core/logger");
/**
* Add Woo header to main plugin file.
*
* @param tmpRepoPath cloned repo path
*/
const addHeader = (tmpRepoPath) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce/woocommerce.php');
try {
const pluginFileContents = yield (0, promises_1.readFile)(filePath, 'utf8');
const updatedPluginFileContents = pluginFileContents.replace(' * @package WooCommerce\n */', ' *\n * Woo: 18734002369816:624a1b9ba2fe66bb06d84bcdd401c6a6\n *\n * @package WooCommerce\n */');
yield (0, promises_1.writeFile)(filePath, updatedPluginFileContents);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.addHeader = addHeader;
/**
* Create changelog file.
*
* @param tmpRepoPath cloned repo path
* @param version version for the changelog file
* @param date date of the release (Y-m-d)
*/
const createChangelog = (tmpRepoPath, version, date) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce/changelog.txt');
try {
const changelogContents = `*** WooCommerce ***
${date} - Version ${version}
* Update - Deploy of WooCommerce ${version}
`;
yield (0, promises_1.writeFile)(filePath, changelogContents);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.createChangelog = createChangelog;

View File

@ -0,0 +1,2 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View File

@ -0,0 +1,94 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.branchCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const semver_1 = require("semver");
const promptly_1 = require("promptly");
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const core_1 = require("@actions/core");
/**
* Internal dependencies
*/
const repo_1 = require("../../../core/github/repo");
const version_1 = require("../../../core/version");
const logger_1 = require("../../../core/logger");
const environment_1 = require("../../../core/environment");
const getNextReleaseBranch = (options) => __awaiter(void 0, void 0, void 0, function* () {
const latestReleaseVersion = yield (0, repo_1.getLatestGithubReleaseVersion)(options);
const nextReleaseVersion = (0, version_1.WPIncrement)(latestReleaseVersion);
const parsedNextReleaseVersion = (0, semver_1.parse)(nextReleaseVersion);
const nextReleaseMajorMinor = `${parsedNextReleaseVersion.major}.${parsedNextReleaseVersion.minor}`;
return `release/${nextReleaseMajorMinor}`;
});
exports.branchCommand = new extra_typings_1.Command('branch')
.description('Create a new release branch')
.option('-d --dryRun', 'Prepare the branch but do not create it.')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-b --branch <branch>', 'Release branch to create. The branch will be determined from Github if none is supplied')
.option('-s --source <source>', 'Branch to create the release branch from. Default: trunk', 'trunk')
.action((options) => __awaiter(void 0, void 0, void 0, function* () {
const { source, branch, owner, name, dryRun } = options;
const isGithub = (0, environment_1.isGithubCI)();
let nextReleaseBranch;
if (!branch) {
const versionSpinner = (0, ora_1.default)(chalk_1.default.yellow('No branch supplied, going off the latest release version')).start();
nextReleaseBranch = yield getNextReleaseBranch(options);
logger_1.Logger.warn(`The next release branch is ${nextReleaseBranch}`);
versionSpinner.succeed();
}
else {
nextReleaseBranch = branch;
}
const branchSpinner = (0, ora_1.default)(chalk_1.default.yellow(`Check to see if branch ${nextReleaseBranch} exists on ${owner}/${name}`)).start();
const branchExists = yield (0, repo_1.doesGithubBranchExist)(options, nextReleaseBranch);
branchSpinner.succeed();
if (branchExists) {
if (isGithub) {
logger_1.Logger.error(`Release branch ${nextReleaseBranch} already exists`);
// When in Github Actions, we don't want to prompt the user for input.
process.exit(0);
}
const deleteExistingReleaseBranch = yield (0, promptly_1.confirm)(chalk_1.default.yellow(`Release branch ${nextReleaseBranch} already exists on ${owner}/${name}, do you want to delete it and create a new one from ${source}? [y/n]`));
if (deleteExistingReleaseBranch) {
if (!dryRun) {
const deleteBranchSpinner = (0, ora_1.default)(chalk_1.default.yellow(`Delete branch ${nextReleaseBranch} on ${owner}/${name} and create new one from ${source}`)).start();
yield (0, repo_1.deleteGithubBranch)(options, nextReleaseBranch);
deleteBranchSpinner.succeed();
}
}
else {
logger_1.Logger.notice(`Branch ${nextReleaseBranch} already exist on ${owner}/${name}, no action taken.`);
process.exit(0);
}
}
const createBranchSpinner = (0, ora_1.default)(chalk_1.default.yellow(`Create branch ${nextReleaseBranch}`)).start();
if (dryRun) {
createBranchSpinner.succeed();
logger_1.Logger.notice(`DRY RUN: Skipping actual creation of branch ${nextReleaseBranch} on ${owner}/${name}`);
process.exit(0);
}
const ref = yield (0, repo_1.getRefFromGithubBranch)(options, source);
yield (0, repo_1.createGithubBranch)(options, nextReleaseBranch, ref);
createBranchSpinner.succeed();
if (isGithub) {
(0, core_1.setOutput)('nextReleaseBranch', nextReleaseBranch);
}
logger_1.Logger.notice(`Branch ${nextReleaseBranch} successfully created on ${owner}/${name}`);
}));

View File

@ -0,0 +1,58 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.changelogCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const child_process_1 = require("child_process");
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
const git_1 = require("../../../core/git");
const lib_1 = require("./lib");
exports.changelogCommand = new extra_typings_1.Command('changelog')
.description('Make changelog pull requests to trunk and release branch')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-d --dev-repo-path <devRepoPath>', 'Path to existing repo. Use this option to avoid cloning a fresh repo for development purposes. Note that using this option assumes dependencies are already installed.')
.option('-c --commit-direct-to-base', 'Commit directly to the base branch. Do not create a PR just push directly to base branch', false)
.option('-o, --override <override>', "Time Override: The time to use in checking whether the action should run (default: 'now').", 'now')
.requiredOption('-v, --version <version>', 'Version to bump to')
.action((options) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, version, devRepoPath } = options;
logger_1.Logger.startTask(`Making a temporary clone of '${owner}/${name}'`);
const cloneOptions = {
owner: owner ? owner : 'woocommerce',
name: name ? name : 'woocommerce',
};
// Use a supplied path, otherwise do a full clone of the repo, including history so that changelogs can be created with links to PRs.
const tmpRepoPath = devRepoPath
? devRepoPath
: yield (0, git_1.cloneAuthenticatedRepo)(cloneOptions, false);
logger_1.Logger.endTask();
logger_1.Logger.notice(`Temporary clone of '${owner}/${name}' created at ${tmpRepoPath}`);
// When a devRepoPath is provided, assume that the dependencies are already installed.
if (!devRepoPath) {
logger_1.Logger.notice(`Installing dependencies in ${tmpRepoPath}`);
(0, child_process_1.execSync)('pnpm install --filter woocommerce', {
cwd: tmpRepoPath,
stdio: 'inherit',
});
}
const releaseBranch = `release/${version}`;
// Update the release branch.
const releaseBranchChanges = yield (0, lib_1.updateReleaseBranchChangelogs)(options, tmpRepoPath, releaseBranch);
// Update trunk.
yield (0, lib_1.updateTrunkChangelog)(options, tmpRepoPath, releaseBranch, releaseBranchChanges);
}));

View File

@ -0,0 +1,173 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.updateTrunkChangelog = exports.updateReleaseBranchChangelogs = void 0;
/**
* External dependencies
*/
const simple_git_1 = __importDefault(require("simple-git"));
const child_process_1 = require("child_process");
const promises_1 = require("fs/promises");
const path_1 = __importDefault(require("path"));
/**
* Internal dependencies
*/
const logger_1 = require("../../../../core/logger");
const git_1 = require("../../../../core/git");
const repo_1 = require("../../../../core/github/repo");
const lib_1 = require("../../get-version/lib");
/**
* Perform changelog adjustments after Jetpack Changelogger has run.
*
* @param {string} override Time override.
* @param {string} tmpRepoPath Path where the temporary repo is cloned.
*/
const updateReleaseChangelogs = (override, tmpRepoPath) => __awaiter(void 0, void 0, void 0, function* () {
const today = (0, lib_1.getToday)(override);
const releaseTime = today.plus({
days: lib_1.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE,
});
const releaseDate = releaseTime.toISODate();
const readmeFile = path_1.default.join(tmpRepoPath, 'plugins', 'woocommerce', 'readme.txt');
const nextLogFile = path_1.default.join(tmpRepoPath, 'plugins', 'woocommerce', 'NEXT_CHANGELOG.md');
let readme = yield (0, promises_1.readFile)(readmeFile, 'utf-8');
let nextLog = yield (0, promises_1.readFile)(nextLogFile, 'utf-8');
nextLog = nextLog.replace(/= (\d+\.\d+\.\d+) YYYY-mm-dd =/, `= $1 ${releaseDate} =`);
// Convert PR number to markdown link.
nextLog = nextLog.replace(/\[#(\d+)\](?!\()/g, '[#$1](https://github.com/woocommerce/woocommerce/pull/$1)');
readme = readme.replace(/== Changelog ==\n(.*?)\[See changelog for all versions\]/s, `== Changelog ==\n\n${nextLog}\n\n[See changelog for all versions]`);
yield (0, promises_1.writeFile)(readmeFile, readme);
});
/**
* Perform changelog operations on release branch by submitting a pull request. The release branch is a remote branch.
*
* @param {Object} options CLI options
* @param {string} tmpRepoPath temp repo path
* @param {string} releaseBranch release branch name. The release branch is a remote branch on Github.
* @return {Object} update data
*/
const updateReleaseBranchChangelogs = (options, tmpRepoPath, releaseBranch) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, version, commitDirectToBase } = options;
try {
// Do a full checkout so that we can find the correct PR numbers for changelog entries.
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, releaseBranch, false);
}
catch (e) {
if (e.message.includes("couldn't find remote ref")) {
logger_1.Logger.error(`${releaseBranch} does not exist on ${owner}/${name}.`);
}
logger_1.Logger.error(e);
}
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
const branch = `update/${version}-changelog`;
try {
if (!commitDirectToBase) {
yield git.checkout({
'-b': null,
[branch]: null,
});
}
logger_1.Logger.notice(`Running the changelog script in ${tmpRepoPath}`);
(0, child_process_1.execSync)(`pnpm --filter=@woocommerce/plugin-woocommerce changelog write --add-pr-num -n -vvv --use-version ${version}`, {
cwd: tmpRepoPath,
stdio: 'inherit',
});
logger_1.Logger.notice(`Committing deleted files in ${tmpRepoPath}`);
//Checkout pnpm-lock.yaml to prevent issues in case of an out of date lockfile.
yield git.checkout('pnpm-lock.yaml');
yield git.add('plugins/woocommerce/changelog/');
yield git.commit(`Delete changelog files from ${version} release`);
const deletionCommitHash = yield git.raw(['rev-parse', 'HEAD']);
logger_1.Logger.notice(`git deletion hash: ${deletionCommitHash}`);
logger_1.Logger.notice(`Updating readme.txt in ${tmpRepoPath}`);
yield updateReleaseChangelogs(options.override, tmpRepoPath);
logger_1.Logger.notice(`Committing readme.txt changes in ${branch} on ${tmpRepoPath}`);
yield git.add('plugins/woocommerce/readme.txt');
yield git.commit(`Update the readme files for the ${version} release`);
yield git.push('origin', commitDirectToBase ? releaseBranch : branch);
yield git.checkout('.');
if (commitDirectToBase) {
logger_1.Logger.notice(`Changelog update was committed directly to ${releaseBranch}`);
return {
deletionCommitHash: deletionCommitHash.trim(),
prNumber: -1,
};
}
logger_1.Logger.notice(`Creating PR for ${branch}`);
const pullRequest = yield (0, repo_1.createPullRequest)({
owner,
name,
title: `Release: Prepare the changelog for ${version}`,
body: `This pull request was automatically generated during the code freeze to prepare the changelog for ${version}`,
head: branch,
base: releaseBranch,
});
logger_1.Logger.notice(`Pull request created: ${pullRequest.html_url}`);
return {
deletionCommitHash: deletionCommitHash.trim(),
prNumber: pullRequest.number,
};
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updateReleaseBranchChangelogs = updateReleaseBranchChangelogs;
/**
* Perform changelog operations on trunk by submitting a pull request.
*
* @param {Object} options CLI options
* @param {string} tmpRepoPath temp repo path
* @param {string} releaseBranch release branch name
* @param {Object} releaseBranchChanges update data from updateReleaseBranchChangelogs
* @param {Object} releaseBranchChanges.deletionCommitHash commit from the changelog deletions in updateReleaseBranchChangelogs
* @param {Object} releaseBranchChanges.prNumber pr number created in updateReleaseBranchChangelogs
*/
const updateTrunkChangelog = (options, tmpRepoPath, releaseBranch, releaseBranchChanges) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, version } = options;
const { deletionCommitHash, prNumber } = releaseBranchChanges;
logger_1.Logger.notice(`Deleting changelogs from trunk ${tmpRepoPath}`);
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
try {
yield git.checkout('trunk');
const branch = `delete/${version}-changelog`;
logger_1.Logger.notice(`Committing deletions in ${branch} on ${tmpRepoPath}`);
yield git.checkout({
'-b': null,
[branch]: null,
});
yield git.raw(['cherry-pick', deletionCommitHash]);
yield git.push('origin', branch);
logger_1.Logger.notice(`Creating PR for ${branch}`);
const pullRequest = yield (0, repo_1.createPullRequest)({
owner,
name,
title: `Release: Remove ${version} change files`,
body: `This pull request was automatically generated during the code freeze to remove the changefiles from ${version} that are compiled into the \`${releaseBranch}\` ${prNumber > 0 ? `branch via #${prNumber}` : ''}`,
head: branch,
base: 'trunk',
});
logger_1.Logger.notice(`Pull request created: ${pullRequest.html_url}`);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updateTrunkChangelog = updateTrunkChangelog;

View File

@ -0,0 +1,2 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View File

@ -0,0 +1,105 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getVersionCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const core_1 = require("@actions/core");
const chalk_1 = __importDefault(require("chalk"));
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
const environment_1 = require("../../../core/environment");
const index_1 = require("./lib/index");
const getRange = (override, between) => {
if ((0, environment_1.isGithubCI)()) {
logger_1.Logger.error('-b, --between option is not compatible with GitHub CI Output.');
process.exit(1);
}
const today = (0, index_1.getToday)(override);
const end = (0, index_1.getToday)(between);
const versions = (0, index_1.getVersionsBetween)(today, end);
logger_1.Logger.notice(chalk_1.default.greenBright.bold(`Releases Between ${today.toFormat('DDDD')} and ${end.toFormat('DDDD')}\n`));
logger_1.Logger.table(['Version', 'Development Begins', 'Freeze', 'Release'], versions.map((v) => Object.values(v).map((d) => typeof d.toFormat === 'function'
? d.toFormat('EEE, MMM dd, yyyy')
: d)));
process.exit(0);
};
exports.getVersionCommand = new extra_typings_1.Command('get-version')
.description('Get the release calendar for a given date')
.option('-o, --override <override>', "Time Override: The time to use in checking whether the action should run (default: 'now').", 'now')
.option('-b, --between <between>', 'When provided, instead of showing a single day, will show a releases in the range of <override> to <end>.')
.action(({ override, between }) => {
if (between) {
return getRange(override, between);
}
const today = (0, index_1.getToday)(override);
const acceleratedRelease = (0, index_1.getAcceleratedCycle)(today, false);
const acceleratedDevelopment = (0, index_1.getAcceleratedCycle)(today);
const monthlyRelease = (0, index_1.getMonthlyCycle)(today, false);
const monthlyDevelopment = (0, index_1.getMonthlyCycle)(today);
// Generate human-friendly output.
logger_1.Logger.notice(chalk_1.default.greenBright.bold(`Release Calendar for ${today.toFormat('DDDD')}\n`));
const table = [];
// We're not in a release cycle on Wednesday.
if (today.get('weekday') !== 3) {
table.push([
`${chalk_1.default.red('Accelerated Release Cycle')}`,
acceleratedRelease.version,
acceleratedRelease.begin.toFormat('EEE, MMM dd, yyyy'),
acceleratedRelease.freeze.toFormat('EEE, MMM dd, yyyy'),
acceleratedRelease.release.toFormat('EEE, MMM dd, yyyy'),
]);
}
table.push([
`${chalk_1.default.red('Accelerated Development Cycle')}`,
acceleratedDevelopment.version,
acceleratedDevelopment.begin.toFormat('EEE, MMM dd, yyyy'),
acceleratedDevelopment.freeze.toFormat('EEE, MMM dd, yyyy'),
acceleratedDevelopment.release.toFormat('EEE, MMM dd, yyyy'),
]);
// We're only in a release cycle if it is after the freeze day.
if (today > monthlyRelease.freeze) {
table.push([
`${chalk_1.default.red('Monthly Release Cycle')}`,
monthlyRelease.version,
monthlyRelease.begin.toFormat('EEE, MMM dd, yyyy'),
monthlyRelease.freeze.toFormat('EEE, MMM dd, yyyy'),
monthlyRelease.release.toFormat('EEE, MMM dd, yyyy'),
]);
}
table.push([
`${chalk_1.default.red('Monthly Development Cycle')}`,
monthlyDevelopment.version,
monthlyDevelopment.begin.toFormat('EEE, MMM dd, yyyy'),
monthlyDevelopment.freeze.toFormat('EEE, MMM dd, yyyy'),
monthlyDevelopment.release.toFormat('EEE, MMM dd, yyyy'),
]);
logger_1.Logger.table(['', 'Version', 'Development Begins', 'Freeze', 'Release'], table);
if ((0, environment_1.isGithubCI)()) {
// For the machines.
const isTodayAcceleratedFreeze = today.get('weekday') === 4;
const isTodayMonthlyFreeze = +today === +monthlyDevelopment.begin;
const monthlyVersionXY = monthlyRelease.version.substr(0, monthlyRelease.version.lastIndexOf('.'));
(0, core_1.setOutput)('isTodayAcceleratedFreeze', isTodayAcceleratedFreeze ? 'yes' : 'no');
(0, core_1.setOutput)('isTodayMonthlyFreeze', isTodayMonthlyFreeze ? 'yes' : 'no');
(0, core_1.setOutput)('acceleratedVersion', acceleratedRelease.version);
(0, core_1.setOutput)('monthlyVersion', monthlyRelease.version);
(0, core_1.setOutput)('monthlyVersionXY', monthlyVersionXY);
(0, core_1.setOutput)('releasesFrozenToday', JSON.stringify(Object.values(Object.assign(Object.assign({}, (isTodayMonthlyFreeze && {
monthlyVersion: `${monthlyRelease.version} (Monthly)`,
})), (isTodayAcceleratedFreeze && {
aVersion: `${acceleratedRelease.version} (AF)`,
})))));
(0, core_1.setOutput)('acceleratedBranch', `release/${acceleratedRelease.version}`);
(0, core_1.setOutput)('monthlyBranch', `release/${monthlyVersionXY}`);
(0, core_1.setOutput)('monthlyMilestone', monthlyDevelopment.version);
(0, core_1.setOutput)('acceleratedReleaseDate', acceleratedRelease.release.toISODate());
}
process.exit(0);
});

View File

@ -0,0 +1,129 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getVersionsBetween = exports.getAcceleratedCycle = exports.getMonthlyCycle = exports.getSecondTuesday = exports.getToday = exports.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE = void 0;
/**
* External dependencies
*/
const luxon_1 = require("luxon");
exports.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE = 19;
/**
* Get a DateTime object of now or the override time when specified. DateTime is normalized to start of day.
*
* @param {string} now The time to use in checking if today is the day of the code freeze. Default to now. Supports ISO formatted dates or 'now'.
*
* @return {DateTime} The DateTime object of now or the override time when specified.
*/
const getToday = (now = 'now') => {
const today = now === 'now'
? luxon_1.DateTime.now().setZone('utc')
: luxon_1.DateTime.fromISO(now, { zone: 'utc' });
if (isNaN(today.toMillis())) {
throw new Error('Invalid date: Check the override parameter (-o, --override) is a correct ISO formatted string or "now"');
}
return today.set({ hour: 0, minute: 0, second: 0, millisecond: 0 });
};
exports.getToday = getToday;
/**
* Get the second Tuesday of the month, given a DateTime.
*
* @param {DateTime} when A DateTime object.
*
* @return {DateTime} The second Tuesday of the month contained in the input.
*/
const getSecondTuesday = (when) => {
const year = when.get('year');
const month = when.get('month');
const firstDayOfMonth = luxon_1.DateTime.utc(year, month, 1);
const dayOfWeek = firstDayOfMonth.get('weekday');
const secondTuesday = dayOfWeek <= 2 ? 10 - dayOfWeek : 17 - dayOfWeek;
return luxon_1.DateTime.utc(year, month, secondTuesday);
};
exports.getSecondTuesday = getSecondTuesday;
const getMonthlyCycle = (when, development = true) => {
// July 12, 2023 is the start-point for 8.0.0, all versions follow that starting point.
const startTime = luxon_1.DateTime.fromObject({
year: 2023,
month: 7,
day: 12,
hour: 0,
minute: 0,
}, { zone: 'UTC' });
const currentMonthRelease = (0, exports.getSecondTuesday)(when);
const nextMonthRelease = (0, exports.getSecondTuesday)(currentMonthRelease.plus({ months: 1 }));
const release = when <= currentMonthRelease ? currentMonthRelease : nextMonthRelease;
const previousRelease = (0, exports.getSecondTuesday)(release.minus({ days: exports.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE + 2 }));
const nextRelease = (0, exports.getSecondTuesday)(release.plus({ months: 1 }));
const freeze = release.minus({
days: exports.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE + 1,
});
const monthNumber = (previousRelease.get('year') - startTime.get('year')) * 12 +
previousRelease.get('month') -
startTime.get('month');
const version = ((80 + monthNumber) / 10).toFixed(1) + '.0';
if (development) {
if (when > freeze) {
return (0, exports.getMonthlyCycle)(nextRelease, false);
}
}
const begin = previousRelease.minus({
days: exports.DAYS_BETWEEN_CODE_FREEZE_AND_RELEASE,
});
return {
version,
begin,
freeze,
release,
};
};
exports.getMonthlyCycle = getMonthlyCycle;
/**
* Get version and all dates / related to an accelerated cycle.
*
* @param {DateTime} when A DateTime object.
* @param {boolean} development When true, the active development cycle will be returned, otherwise the active release cycle.
* @return {Object} An object containing version and dates for a release.
*/
const getAcceleratedCycle = (when, development = true) => {
if (!development) {
when = when.minus({ week: 1 });
}
const dayOfWeek = when.get('weekday');
const daysTilWednesday = dayOfWeek < 4 ? 3 - dayOfWeek : 10 - dayOfWeek;
const freeze = when.plus({ days: daysTilWednesday });
const lastAccelerated = freeze.minus({ days: 1 });
const release = freeze.plus({ days: 6 });
const begin = freeze.minus({ days: 6 });
const currentMonthRelease = (0, exports.getSecondTuesday)(lastAccelerated);
const nextMonthRelease = (0, exports.getSecondTuesday)(currentMonthRelease.plus({ months: 1 }));
const monthlyRelease = freeze <= currentMonthRelease ? currentMonthRelease : nextMonthRelease;
const monthlyCycle = (0, exports.getMonthlyCycle)(monthlyRelease, false);
const previousMonthlyRelease = (0, exports.getSecondTuesday)(monthlyRelease.minus({ days: 28 }));
const aVersion = 10 *
(lastAccelerated.diff(previousMonthlyRelease, 'weeks').toObject()
.weeks +
1);
const version = `${monthlyCycle.version}.${aVersion}`;
return {
version,
begin,
freeze,
release,
};
};
exports.getAcceleratedCycle = getAcceleratedCycle;
const getVersionsBetween = (start, end) => {
if (start > end) {
return (0, exports.getVersionsBetween)(end, start);
}
const versions = {};
for (let i = start; i < end; i = i.plus({ days: 28 })) {
const monthly = (0, exports.getMonthlyCycle)(i, false);
versions[monthly.version] = monthly;
}
for (let i = start; i < end; i = i.plus({ days: 7 })) {
const accelerated = (0, exports.getAcceleratedCycle)(i, false);
versions[accelerated.version] = accelerated;
}
return Object.values(versions);
};
exports.getVersionsBetween = getVersionsBetween;

View File

@ -0,0 +1,24 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const get_version_1 = require("./get-version");
const milestone_1 = require("./milestone");
const branch_1 = require("./branch");
const version_bump_1 = require("./version-bump");
const changelog_1 = require("./changelog");
const accelerated_prep_1 = require("./accelerated-prep");
const program = new extra_typings_1.Command('code-freeze')
.description('Code freeze utilities')
.addCommand(get_version_1.getVersionCommand)
.addCommand(milestone_1.milestoneCommand)
.addCommand(branch_1.branchCommand)
.addCommand(version_bump_1.versionBumpCommand)
.addCommand(changelog_1.changelogCommand)
.addCommand(accelerated_prep_1.acceleratedPrepCommand);
exports.default = program;

View File

@ -0,0 +1,80 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.milestoneCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const ora_1 = __importDefault(require("ora"));
/**
* Internal dependencies
*/
const repo_1 = require("../../../core/github/repo");
const api_1 = require("../../../core/github/api");
const version_1 = require("../../../core/version");
const logger_1 = require("../../../core/logger");
exports.milestoneCommand = new extra_typings_1.Command('milestone')
.description('Create a milestone')
.option('-d --dryRun', 'Prepare the milestone but do not create it.')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-m --milestone <milestone>', 'Milestone to create. Next milestone is gathered from Github if none is supplied')
.action((options) => __awaiter(void 0, void 0, void 0, function* () {
var _a;
const { owner, name, dryRun, milestone } = options;
let nextMilestone;
let nextReleaseVersion;
if (milestone) {
logger_1.Logger.warn(`Manually creating milestone ${milestone} in ${owner}/${name}`);
nextMilestone = milestone;
}
else {
const versionSpinner = (0, ora_1.default)('No milestone supplied, going off the latest release version').start();
const latestReleaseVersion = yield (0, repo_1.getLatestGithubReleaseVersion)(options);
versionSpinner.succeed();
nextReleaseVersion = (0, version_1.WPIncrement)(latestReleaseVersion);
nextMilestone = (0, version_1.WPIncrement)(nextReleaseVersion);
logger_1.Logger.warn(`The latest release in ${owner}/${name} is version: ${latestReleaseVersion}`);
logger_1.Logger.warn(`The next release in ${owner}/${name} will be version: ${nextReleaseVersion}`);
logger_1.Logger.warn(`The next milestone in ${owner}/${name} will be: ${nextMilestone}`);
}
const milestoneSpinner = (0, ora_1.default)(`Creating a ${nextMilestone} milestone`).start();
if (dryRun) {
milestoneSpinner.succeed();
logger_1.Logger.notice(`DRY RUN: Skipping actual creation of milestone ${nextMilestone}`);
process.exit(0);
}
try {
yield (0, api_1.octokitWithAuth)().request(`POST /repos/${owner}/${name}/milestones`, {
title: nextMilestone,
});
}
catch (e) {
const milestoneAlreadyExistsError = (_a = e.response.data.errors) === null || _a === void 0 ? void 0 : _a.some((error) => error.code === 'already_exists');
if (milestoneAlreadyExistsError) {
milestoneSpinner.succeed();
logger_1.Logger.notice(`Milestone ${nextMilestone} already exists in ${owner}/${name}`);
process.exit(0);
}
else {
milestoneSpinner.fail();
logger_1.Logger.error(`\nFailed to create milestone ${nextMilestone} in ${owner}/${name}`);
logger_1.Logger.error(e.response.data.message);
process.exit(1);
}
}
milestoneSpinner.succeed();
logger_1.Logger.notice(`Successfully created milestone ${nextMilestone} in ${owner}/${name}`);
}));

View File

@ -0,0 +1,29 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.bumpFiles = void 0;
/**
* Internal dependencies
*/
const validate_1 = require("./lib/validate");
const update_1 = require("./lib/update");
const bumpFiles = (tmpRepoPath, version) => __awaiter(void 0, void 0, void 0, function* () {
let nextVersion = version;
yield (0, update_1.updatePluginFile)(tmpRepoPath, nextVersion);
// Any updated files besides the plugin file get a version stripped of prerelease parameters.
nextVersion = (0, validate_1.stripPrereleaseParameters)(nextVersion);
// Bumping the dev version means updating the readme's changelog.
yield (0, update_1.updateReadmeChangelog)(tmpRepoPath, nextVersion);
yield (0, update_1.updateJSON)('composer', tmpRepoPath, nextVersion);
yield (0, update_1.updateJSON)('package', tmpRepoPath, nextVersion);
yield (0, update_1.updateClassPluginFile)(tmpRepoPath, nextVersion);
});
exports.bumpFiles = bumpFiles;

View File

@ -0,0 +1,120 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.versionBumpCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const simple_git_1 = __importDefault(require("simple-git"));
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
const git_1 = require("../../../core/git");
const repo_1 = require("../../../core/github/repo");
const environment_1 = require("../../../core/environment");
const version_1 = require("../../../core/version");
const bump_1 = require("./bump");
const validate_1 = require("./lib/validate");
exports.versionBumpCommand = new extra_typings_1.Command('version-bump')
.description('Bump versions ahead of new development cycle')
.argument('<version>', 'Version to bump to')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-b --base <base>', 'Base branch to create the PR against. Default: trunk', 'trunk')
.option('-d --dry-run', 'Prepare the version bump and log a diff. Do not create a PR or push to branch', false)
.option('-c --commit-direct-to-base', 'Commit directly to the base branch. Do not create a PR just push directly to base branch', false)
.option('-f --force', 'Force a version bump, even when the new version is less than the existing version', false)
.option('-a --allow-accel', 'Allow accelerated versioning. When this option is not present, versions must be semantically correct', false)
.action((version, options) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, base, dryRun, commitDirectToBase } = options;
logger_1.Logger.startTask(`Making a temporary clone of '${owner}/${name}'`);
const source = `github.com/${owner}/${name}`;
const token = (0, environment_1.getEnvVar)('GITHUB_TOKEN', true);
const remote = `https://${owner}:${token}@${source}`;
const tmpRepoPath = yield (0, git_1.sparseCheckoutRepoShallow)(remote, 'woocommerce', [
'plugins/woocommerce/includes/class-woocommerce.php',
// All that's needed is the line above, but including these here for completeness.
'plugins/woocommerce/composer.json',
'plugins/woocommerce/package.json',
'plugins/woocommerce/readme.txt',
'plugins/woocommerce/woocommerce.php',
]);
logger_1.Logger.endTask();
logger_1.Logger.notice(`Temporary clone of '${owner}/${name}' created at ${tmpRepoPath}`);
const git = (0, simple_git_1.default)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
const majorMinor = (0, validate_1.getIsAccelRelease)(version)
? version
: (0, version_1.getMajorMinor)(version);
const branch = `prep/${base}-for-next-dev-cycle-${majorMinor}`;
try {
if (commitDirectToBase) {
if (base === 'trunk') {
logger_1.Logger.error(`The --commit-direct-to-base option cannot be used with the trunk branch as a base. A pull request must be created instead.`);
}
logger_1.Logger.notice(`Checking out ${base}`);
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, base);
}
else {
const exists = yield git.raw('ls-remote', 'origin', branch);
if (!dryRun && exists.trim().length > 0) {
logger_1.Logger.error(`Branch ${branch} already exists. Run \`git push <remote> --delete ${branch}\` and rerun this command.`);
}
if (base !== 'trunk') {
// if the base is not trunk, we need to checkout the base branch first before creating a new branch.
logger_1.Logger.notice(`Checking out ${base}`);
yield (0, git_1.checkoutRemoteBranch)(tmpRepoPath, base);
}
logger_1.Logger.notice(`Creating new branch ${branch}`);
yield git.checkoutBranch(branch, base);
}
logger_1.Logger.notice('Validating arguments');
yield (0, validate_1.validateArgs)(tmpRepoPath, version, options);
const workingBranch = commitDirectToBase ? base : branch;
logger_1.Logger.notice(`Bumping versions in ${owner}/${name} on ${workingBranch} branch`);
yield (0, bump_1.bumpFiles)(tmpRepoPath, version);
if (dryRun) {
const diff = yield git.diffSummary();
logger_1.Logger.notice(`The version has been bumped to ${version} in the following files:`);
logger_1.Logger.warn(diff.files.map((f) => f.file).join('\n'));
logger_1.Logger.notice('Dry run complete. No pull was request created nor was a commit made.');
return;
}
logger_1.Logger.notice('Adding and committing changes');
yield git.add('.');
yield git.commit(`Prep ${base} for ${majorMinor} cycle with version bump to ${version}`);
logger_1.Logger.notice(`Pushing ${workingBranch} branch to Github`);
yield git.push('origin', workingBranch);
if (!commitDirectToBase) {
logger_1.Logger.startTask('Creating a pull request');
const pullRequest = yield (0, repo_1.createPullRequest)({
owner,
name,
title: `Prep ${base} for ${majorMinor} cycle`,
body: `This PR updates the versions in ${base} to ${version}.`,
head: branch,
base,
});
logger_1.Logger.notice(`Pull request created: ${pullRequest.html_url}`);
logger_1.Logger.endTask();
}
}
catch (error) {
logger_1.Logger.error(error);
}
}));

View File

@ -0,0 +1,98 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.updatePluginFile = exports.updateJSON = exports.updateClassPluginFile = exports.updateReadmeChangelog = void 0;
/**
* External dependencies
*/
const promises_1 = require("fs/promises");
const fs_1 = require("fs");
const path_1 = require("path");
/**
* Internal dependencies
*/
const logger_1 = require("../../../../core/logger");
/**
* Update plugin readme changelog.
*
* @param tmpRepoPath cloned repo path
* @param nextVersion version to bump to
*/
const updateReadmeChangelog = (tmpRepoPath, nextVersion) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce/readme.txt');
try {
const readmeContents = yield (0, promises_1.readFile)(filePath, 'utf8');
const updatedReadmeContents = readmeContents.replace(/= \d+\.\d+\.\d+ \d\d\d\d-XX-XX =\n/m, `= ${nextVersion} ${new Date().getFullYear()}-XX-XX =\n`);
yield (0, promises_1.writeFile)(filePath, updatedReadmeContents);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updateReadmeChangelog = updateReadmeChangelog;
/**
* Update plugin class file.
*
* @param tmpRepoPath cloned repo path
* @param nextVersion version to bump to
*/
const updateClassPluginFile = (tmpRepoPath, nextVersion) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, `plugins/woocommerce/includes/class-woocommerce.php`);
if (!(0, fs_1.existsSync)(filePath)) {
logger_1.Logger.error("File 'class-woocommerce.php' does not exist.");
}
try {
const classPluginFileContents = yield (0, promises_1.readFile)(filePath, 'utf8');
const updatedClassPluginFileContents = classPluginFileContents.replace(/public \$version = '\d+\.\d+\.\d+';\n/m, `public $version = '${nextVersion}';\n`);
yield (0, promises_1.writeFile)(filePath, updatedClassPluginFileContents);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updateClassPluginFile = updateClassPluginFile;
/**
* Update plugin JSON files.
*
* @param {string} type plugin to update
* @param {string} tmpRepoPath cloned repo path
* @param {string} nextVersion version to bump to
*/
const updateJSON = (type, tmpRepoPath, nextVersion) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, `plugins/woocommerce/${type}.json`);
try {
const composerJson = JSON.parse(yield (0, promises_1.readFile)(filePath, 'utf8'));
composerJson.version = nextVersion;
yield (0, promises_1.writeFile)(filePath, JSON.stringify(composerJson, null, '\t') + '\n');
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updateJSON = updateJSON;
/**
* Update plugin main file.
*
* @param tmpRepoPath cloned repo path
* @param nextVersion version to bump to
*/
const updatePluginFile = (tmpRepoPath, nextVersion) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, `plugins/woocommerce/woocommerce.php`);
try {
const pluginFileContents = yield (0, promises_1.readFile)(filePath, 'utf8');
const updatedPluginFileContents = pluginFileContents.replace(/Version: \d+\.\d+\.\d+.*\n/m, `Version: ${nextVersion}\n`);
yield (0, promises_1.writeFile)(filePath, updatedPluginFileContents);
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.updatePluginFile = updatePluginFile;

View File

@ -0,0 +1,109 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.validateArgs = exports.stripPrereleaseParameters = exports.getCurrentVersion = exports.getIsAccelRelease = void 0;
/**
* External dependencies
*/
const semver_1 = require("semver");
const path_1 = require("path");
const promises_1 = require("fs/promises");
/**
* Internal dependencies
*/
const logger_1 = require("../../../../core/logger");
/**
* Determine whether a version is an accel release.
*
* @param {string} version Version number
* @return {boolean} True if the version corresponds with an accel release, otherwise false
*/
const getIsAccelRelease = (version) => {
const isAccelRelease = version.match(/^(?:\d+\.){3}\d+?$/);
return isAccelRelease !== null;
};
exports.getIsAccelRelease = getIsAccelRelease;
/**
* Get a plugin's current version.
*
* @param tmpRepoPath cloned repo path
*/
const getCurrentVersion = (tmpRepoPath) => __awaiter(void 0, void 0, void 0, function* () {
const filePath = (0, path_1.join)(tmpRepoPath, `plugins/woocommerce/woocommerce.php`);
try {
const data = yield (0, promises_1.readFile)(filePath, 'utf8');
const matches = data.match(/Version:\s*(.*)/);
return matches ? matches[1] : undefined;
}
catch (e) {
logger_1.Logger.error(e);
}
});
exports.getCurrentVersion = getCurrentVersion;
/**
* When given a prerelease version, return just the version.
*
* @param {string} prereleaseVersion version with prerelease params
* @return {string} version
*/
const stripPrereleaseParameters = (prereleaseVersion) => {
const parsedVersion = (0, semver_1.parse)(prereleaseVersion);
if (parsedVersion) {
const { major, minor, patch } = parsedVersion;
return `${major}.${minor}.${patch}`;
}
return prereleaseVersion;
};
exports.stripPrereleaseParameters = stripPrereleaseParameters;
/**
* Validate the arguments passed to the version bump command.
*
* @param tmpRepoPath cloned repo path
* @param version version to bump to
* @param options options passed to the command
*/
const validateArgs = (tmpRepoPath, version, options) => __awaiter(void 0, void 0, void 0, function* () {
const { allowAccel, base, force } = options;
const nextVersion = version;
const isAllowedAccelRelease = allowAccel && (0, exports.getIsAccelRelease)(nextVersion);
if (isAllowedAccelRelease) {
if (base === 'trunk') {
logger_1.Logger.error(`Version ${nextVersion} is not a development version bump and cannot be applied to trunk, which only accepts development version bumps.`);
}
}
else {
if (!(0, semver_1.valid)(nextVersion)) {
logger_1.Logger.error('Invalid version supplied, please pass in a semantically correct version or use the correct option for accel releases.');
}
const prereleaseParameters = (0, semver_1.prerelease)(nextVersion);
const isDevVersionBump = prereleaseParameters && prereleaseParameters[0] === 'dev';
if (!isDevVersionBump && base === 'trunk') {
logger_1.Logger.error(`Version ${nextVersion} is not a development version bump and cannot be applied to trunk, which only accepts development version bumps.`);
}
}
if (force) {
// When the force option is set, we do not compare currentVersion.
return;
}
const currentVersion = yield (0, exports.getCurrentVersion)(tmpRepoPath);
if (!currentVersion) {
logger_1.Logger.error('Unable to determine current version');
}
else if ((0, semver_1.lt)(nextVersion, currentVersion)) {
// Semver thinks -a.1 is less than -dev, but -a.1 from -dev will be a valid version bump.
if (nextVersion.includes('a.') &&
currentVersion.includes('dev')) {
return;
}
logger_1.Logger.error('The version supplied is less than the current version, please supply a valid version.');
}
});
exports.validateArgs = validateArgs;

View File

@ -0,0 +1,2 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View File

@ -0,0 +1,23 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Internal dependencies
*/
const environment_1 = require("../environment");
describe('isGithubCI', () => {
it('should return true if GITHUB_ACTIONS is true', () => {
process.env.GITHUB_ACTIONS = 'true';
expect((0, environment_1.isGithubCI)()).toBe(true);
});
it('should return false if GITHUB_ACTIONS is false', () => {
process.env.GITHUB_ACTIONS = 'false';
expect((0, environment_1.isGithubCI)()).toBe(false);
});
it('should return false if GITHUB_ACTIONS is not set', () => {
process.env.GITHUB_ACTIONS = undefined;
expect((0, environment_1.isGithubCI)()).toBe(false);
});
afterAll(() => {
delete process.env.GITHUB_ACTIONS;
});
});

View File

@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
jest.spyOn(global.console, 'error').mockImplementation(() => { });
// @ts-expect-error -- We're mocking process exit, it has never return type!
jest.spyOn(global.process, 'exit').mockImplementation(() => { });
/**
* External dependencies
*/
const chalk_1 = __importDefault(require("chalk"));
/**
* Internal dependencies
*/
const logger_1 = require("../logger");
describe('Logger', () => {
afterEach(() => {
jest.resetAllMocks();
});
describe('error', () => {
process.env.LOGGER_LEVEL = 'error';
it('should log a message for string messages', () => {
const message = 'test message';
logger_1.Logger.error(message);
expect(global.console.error).toHaveBeenCalledWith(chalk_1.default.red(message));
});
it('should log a message for errors', () => {
const error = new Error('test error');
logger_1.Logger.error(error);
expect(global.console.error).toHaveBeenCalledWith(chalk_1.default.red(`${error.message}\n${error.stack}`));
});
it('should json stringify for unknown types', () => {
logger_1.Logger.error({ foo: 'bar' });
expect(global.console.error).toHaveBeenCalledWith(chalk_1.default.red(JSON.stringify({ foo: 'bar' }, null, 2)));
});
it('should call process.exit by default', () => {
logger_1.Logger.error('test message');
expect(global.process.exit).toHaveBeenCalledWith(1);
});
it('should not call process.exit when failOnErr is false', () => {
logger_1.Logger.error('test message', false);
expect(global.process.exit).not.toHaveBeenCalled();
});
it('should not log errors if the Logger is in silent mode', () => {
process.env.LOGGER_LEVEL = 'silent';
logger_1.Logger.error('test message');
expect(global.console.error).not.toHaveBeenCalled();
});
});
});

View File

@ -0,0 +1,19 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isGithubCI = exports.getEnvVar = void 0;
/**
* Internal dependencies
*/
const logger_1 = require("./logger");
const getEnvVar = (varName, isRequired = false) => {
const value = process.env[varName];
if (value === undefined && isRequired) {
logger_1.Logger.error(`You need to provide a value for ${varName} in your environment either via an environment variable or the .env file.`);
}
return value || '';
};
exports.getEnvVar = getEnvVar;
const isGithubCI = () => {
return process.env.GITHUB_ACTIONS === 'true';
};
exports.isGithubCI = isGithubCI;

389
tools/monorepo-utils/dist/core/git.js vendored Normal file
View File

@ -0,0 +1,389 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.checkoutRemoteBranch = exports.generateDiff = exports.getPullRequestNumberFromHash = exports.getLineCommitHash = exports.getCommitHash = exports.diffHashes = exports.checkoutRef = exports.sparseCheckoutRepoShallow = exports.sparseCheckoutRepo = exports.cloneAuthenticatedRepo = exports.getAuthenticatedRemote = exports.cloneRepoShallow = exports.cloneRepo = exports.getPatches = exports.getStartingLineNumber = exports.getFilename = void 0;
/**
* External dependencies
*/
const child_process_1 = require("child_process");
const path_1 = require("path");
const os_1 = require("os");
const fs_1 = require("fs");
const simple_git_1 = require("simple-git");
const uuid_1 = require("uuid");
const promises_1 = require("fs/promises");
const node_url_1 = require("node:url");
/**
* Internal dependencies
*/
const environment_1 = require("./environment");
/**
* Get filename from patch
*
* @param {string} str String to extract filename from.
* @return {string} formatted filename.
*/
const getFilename = (str) => {
return str.replace(/^a(.*)\s.*/, '$1');
};
exports.getFilename = getFilename;
/**
* Get starting line number from patch
*
* @param {string} str String to extract starting line number from.
* @return {number} line number.
*/
const getStartingLineNumber = (str) => {
const lineNumber = str.replace(/^@@ -\d+,\d+ \+(\d+),\d+ @@.*?$/, '$1');
if (!lineNumber.match(/^\d+$/)) {
throw new Error('Unable to parse line number from patch');
}
return parseInt(lineNumber, 10);
};
exports.getStartingLineNumber = getStartingLineNumber;
/**
* Get patches
*
* @param {string} content Patch content.
* @param {RegExp} regex Regex to find specific patches.
* @return {string[]} Array of patches.
*/
const getPatches = (content, regex) => {
const patches = content.split('diff --git ');
const changes = [];
for (const p in patches) {
const patch = patches[p];
const id = patch.match(regex);
if (id) {
changes.push(patch);
}
}
return changes;
};
exports.getPatches = getPatches;
/**
* Check if a string is a valid url.
*
* @param {string} maybeURL - the URL string to check
* @return {boolean} whether the string is a valid URL or not.
*/
const isUrl = (maybeURL) => {
try {
new node_url_1.URL(maybeURL);
return true;
}
catch (e) {
return false;
}
};
/**
* Clone a git repository.
*
* @param {string} repoPath - the path (either URL or file path) to the repo to clone.
* @param {TaskOptions} options - options to pass to simple-git.
* @return {Promise<string>} the path to the cloned repo.
*/
const cloneRepo = (repoPath, options = {}) => __awaiter(void 0, void 0, void 0, function* () {
const folderPath = (0, path_1.join)((0, os_1.tmpdir)(), 'code-analyzer-tmp', (0, uuid_1.v4)());
(0, fs_1.mkdirSync)(folderPath, { recursive: true });
const git = (0, simple_git_1.simpleGit)({ baseDir: folderPath });
yield git.clone(repoPath, folderPath, options);
// If this is a local clone then the simplest way to maintain remote settings is to copy git config across
if (!isUrl(repoPath)) {
(0, child_process_1.execSync)(`cp ${repoPath}/.git/config ${folderPath}/.git/config`);
}
// Update the repo.
yield git.fetch();
return folderPath;
});
exports.cloneRepo = cloneRepo;
/**
* Clone a git repository without history.
*
* @param {string} repoPath - the path (either URL or file path) to the repo to clone.
* @return {Promise<string>} the path to the cloned repo.
*/
const cloneRepoShallow = (repoPath) => __awaiter(void 0, void 0, void 0, function* () {
return yield (0, exports.cloneRepo)(repoPath, { '--depth': 1 });
});
exports.cloneRepoShallow = cloneRepoShallow;
/**
* Add a remote using the authenticated token `GITHUB_TOKEN`
*
* @param {Object} options CLI options
* @param {string} options.owner repo owner
* @param {string} options.name repo name
* @return {string} remote
*/
const getAuthenticatedRemote = (options) => {
const { owner, name } = options;
const source = `github.com/${owner}/${name}`;
const token = (0, environment_1.getEnvVar)('GITHUB_TOKEN', true);
return `https://${owner}:${token}@${source}`;
};
exports.getAuthenticatedRemote = getAuthenticatedRemote;
/**
* Clone a repo using the authenticated token `GITHUB_TOKEN`. This allows the script to push branches to origin.
*
* @param {Object} options CLI options
* @param {string} options.owner repo owner
* @param {string} options.name repo name
* @param {boolean} isShallow whether to do a shallow clone or not.
* @return {string} temporary repo path
*/
const cloneAuthenticatedRepo = (options, isShallow = true) => __awaiter(void 0, void 0, void 0, function* () {
const remote = (0, exports.getAuthenticatedRemote)(options);
return isShallow
? yield (0, exports.cloneRepoShallow)(remote)
: yield (0, exports.cloneRepo)(remote);
});
exports.cloneAuthenticatedRepo = cloneAuthenticatedRepo;
/**
* Do a minimal sparse checkout of a github repo.
*
* @param {string} githubRepoUrl - the URL to the repo to checkout.
* @param {string} path - the path to checkout to.
* @param {Array<string>} directories - the files or directories to checkout.
* @param {string} base - the base branch to checkout from. Defaults to trunk.
* @param {TaskOptions} options - options to pass to simple-git.
* @return {Promise<string>} the path to the cloned repo.
*/
const sparseCheckoutRepo = (githubRepoUrl, path, directories, base = 'trunk', options = {}) => __awaiter(void 0, void 0, void 0, function* () {
const folderPath = (0, path_1.join)((0, os_1.tmpdir)(), path);
// clean up if it already exists.
yield (0, promises_1.rm)(folderPath, { recursive: true, force: true });
yield (0, promises_1.mkdir)(folderPath, { recursive: true });
const git = (0, simple_git_1.simpleGit)({ baseDir: folderPath });
const cloneOptions = { '--no-checkout': null };
yield git.clone(githubRepoUrl, folderPath, Object.assign(Object.assign({}, cloneOptions), options));
yield git.raw('sparse-checkout', 'init', { '--cone': null });
yield git.raw('sparse-checkout', 'set', directories.join(' '));
yield git.checkout(base);
return folderPath;
});
exports.sparseCheckoutRepo = sparseCheckoutRepo;
/**
* Do a minimal sparse checkout of a github repo without history.
*
* @param {string} githubRepoUrl - the URL to the repo to checkout.
* @param {string} path - the path to checkout to.
* @param {Array<string>} directories - the files or directories to checkout.
* @return {Promise<string>} the path to the cloned repo.
*/
const sparseCheckoutRepoShallow = (githubRepoUrl, path, directories, base = 'trunk') => __awaiter(void 0, void 0, void 0, function* () {
return yield (0, exports.sparseCheckoutRepo)(githubRepoUrl, path, directories, base, {
'--depth': 1,
});
});
exports.sparseCheckoutRepoShallow = sparseCheckoutRepoShallow;
/**
* checkoutRef - checkout a ref in a git repo.
*
* @param {string} pathToRepo - the path to the repo to checkout a ref from.
* @param {string} ref - the ref to checkout.
* @return {Response<string>} - the simple-git response.
*/
const checkoutRef = (pathToRepo, ref) => {
const git = (0, simple_git_1.simpleGit)({
baseDir: pathToRepo,
config: ['core.hooksPath=/dev/null'],
});
return git.checkout(ref);
};
exports.checkoutRef = checkoutRef;
/**
* Do a git diff of 2 commit hashes (or branches)
*
* @param {string} baseDir - baseDir that the repo is in
* @param {string} hashA - either a git commit hash or a git branch
* @param {string} hashB - either a git commit hash or a git branch
* @param {Array<string>} excludePaths - A list of paths to exclude from the diff
* @return {Promise<string>} - diff of the changes between the 2 hashes
*/
const diffHashes = (baseDir, hashA, hashB, excludePaths = []) => {
const git = (0, simple_git_1.simpleGit)({ baseDir });
if (excludePaths.length) {
return git.diff([
`${hashA}..${hashB}`,
'--',
'.',
...excludePaths.map((ps) => `:^${ps}`),
]);
}
return git.diff([`${hashA}..${hashB}`]);
};
exports.diffHashes = diffHashes;
/**
* Determines if a string is a commit hash or not.
*
* @param {string} ref - the ref to check
* @return {boolean} whether the ref is a commit hash or not.
*/
const refIsHash = (ref) => {
return /^[0-9a-f]{7,40}$/i.test(ref);
};
/**
* Get the commit hash for a ref (either branch or commit hash). If a validly
* formed hash is provided it is returned unmodified.
*
* @param {string} baseDir - the dir of the git repo to get the hash from.
* @param {string} ref - Either a commit hash or a branch name.
* @return {string} - the commit hash of the ref.
*/
const getCommitHash = (baseDir, ref) => __awaiter(void 0, void 0, void 0, function* () {
const isHash = refIsHash(ref);
// check if its in history, if its not an error will be thrown
try {
yield (0, simple_git_1.simpleGit)({ baseDir }).show(ref);
}
catch (e) {
throw new Error(`${ref} is not a valid commit hash or branch name that exists in git history`);
}
// If its not a hash we assume its a branch
if (!isHash) {
return (0, simple_git_1.simpleGit)({ baseDir }).revparse([ref]);
}
// Its a hash already
return ref;
});
exports.getCommitHash = getCommitHash;
/**
* Get the commit hash for the last change to a line within a specific file.
*
* @param {string} baseDir - the dir of the git repo to get the hash from.
* @param {string} filePath - the relative path to the file to check the commit hash of.
* @param {number} lineNumber - the line number from which to get the hash of the last commit.
* @return {string} - the commit hash of the last change to filePath at lineNumber.
*/
const getLineCommitHash = (baseDir, filePath, lineNumber) => __awaiter(void 0, void 0, void 0, function* () {
// Remove leading slash, if it exists.
const adjustedFilePath = filePath.replace(/^\//, '');
try {
const git = yield (0, simple_git_1.simpleGit)({ baseDir });
const blame = yield git.raw([
'blame',
`-L${lineNumber},${lineNumber}`,
adjustedFilePath,
]);
const hash = blame.match(/^([a-f0-9]+)\s+/);
if (!hash) {
throw new Error(`Unable to git blame ${adjustedFilePath}:${lineNumber}`);
}
return hash[1];
}
catch (e) {
throw new Error(`Unable to git blame ${adjustedFilePath}:${lineNumber}`);
}
});
exports.getLineCommitHash = getLineCommitHash;
/**
* Get the commit hash for the last change to a line within a specific file.
*
* @param {string} baseDir - the dir of the git repo to get the PR number from.
* @param {string} hash - the hash to get the PR number from.
* @return {number} - the pull request number from the given inputs.
*/
const getPullRequestNumberFromHash = (baseDir, hash) => __awaiter(void 0, void 0, void 0, function* () {
try {
const git = yield (0, simple_git_1.simpleGit)({
baseDir,
config: ['core.hooksPath=/dev/null'],
});
const formerHead = yield git.revparse('HEAD');
yield git.checkout(hash);
const cmdOutput = yield git.raw([
'log',
'-1',
'--first-parent',
'--format=%cI\n%s',
]);
const cmdLines = cmdOutput.split('\n');
yield git.checkout(formerHead);
const prNumber = cmdLines[1]
.trim()
.match(/(?:^Merge pull request #(\d+))|(?:\(#(\d+)\)$)/);
if (prNumber) {
return prNumber[1]
? parseInt(prNumber[1], 10)
: parseInt(prNumber[2], 10);
}
throw new Error(`Unable to get PR number from hash ${hash}.`);
}
catch (e) {
throw new Error(`Unable to get PR number from hash ${hash}.`);
}
});
exports.getPullRequestNumberFromHash = getPullRequestNumberFromHash;
/**
* generateDiff generates a diff for a given repo and 2 hashes or branch names.
*
* @param {string} tmpRepoPath - filepath to the repo to generate a diff from.
* @param {string} hashA - commit hash or branch name.
* @param {string} hashB - commit hash or branch name.
* @param {Function} onError - the handler to call when an error occurs.
* @param {Array<string>} excludePaths - A list of directories to exclude from the diff.
*/
const generateDiff = (tmpRepoPath, hashA, hashB, onError, excludePaths = []) => __awaiter(void 0, void 0, void 0, function* () {
try {
const git = (0, simple_git_1.simpleGit)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
const validBranches = [hashA, hashB].filter((hash) => !refIsHash(hash));
// checking out any branches will automatically track remote branches.
for (const validBranch of validBranches) {
// Note you can't do checkouts in parallel otherwise the git binary will crash
yield git.checkout([validBranch]);
}
// turn both hashes into commit hashes if they are not already.
const commitHashA = yield (0, exports.getCommitHash)(tmpRepoPath, hashA);
const commitHashB = yield (0, exports.getCommitHash)(tmpRepoPath, hashB);
const isRepo = yield (0, simple_git_1.simpleGit)({
baseDir: tmpRepoPath,
}).checkIsRepo();
if (!isRepo) {
throw new Error('Not a git repository');
}
const diff = yield (0, exports.diffHashes)(tmpRepoPath, commitHashA, commitHashB, excludePaths);
return diff;
}
catch (e) {
if (e instanceof Error) {
onError(`Unable to create diff. Check that git repo, base hash, and compare hash all exist.\n Error: ${e.message}`);
}
else {
onError('Unable to create diff. Check that git repo, base hash, and compare hash all exist.');
}
return '';
}
});
exports.generateDiff = generateDiff;
/**
*
* @param {string} tmpRepoPath path to temporary repo
* @param {string} branch remote branch to checkout
* @param {boolean} isShallow whether to do a shallow clone and get only the latest commit
*/
const checkoutRemoteBranch = (tmpRepoPath, branch, isShallow = true) => __awaiter(void 0, void 0, void 0, function* () {
const git = (0, simple_git_1.simpleGit)({
baseDir: tmpRepoPath,
config: ['core.hooksPath=/dev/null'],
});
// When the clone is shallow, we need to call this before fetching.
yield git.raw(['remote', 'set-branches', '--add', 'origin', branch]);
const fetchArgs = ['fetch', 'origin', branch];
if (isShallow) {
fetchArgs.push('--depth=1');
}
yield git.raw(fetchArgs);
yield git.raw(['checkout', '-b', branch, `origin/${branch}`]);
});
exports.checkoutRemoteBranch = checkoutRemoteBranch;

View File

@ -0,0 +1,65 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Internal dependencies
*/
const repo_1 = require("../repo");
jest.mock('../api', () => {
return {
graphqlWithAuth: () => jest.fn().mockResolvedValue({
repository: {
releases: {
nodes: [
{
tagName: 'nightly',
isLatest: false,
},
{
tagName: 'wc-beta-tester-99.99.0',
isLatest: false,
},
{
tagName: '1.0.0',
isLatest: false,
},
{
tagName: '1.1.0',
isLatest: false,
},
{
tagName: '1.2.0',
isLatest: false,
},
{
tagName: '2.0.0',
isLatest: false,
},
{
tagName: '2.0.1',
isLatest: true,
},
{
tagName: '1.0.1',
isLatest: false,
},
],
},
},
}),
};
});
it('should return the latest release version', () => __awaiter(void 0, void 0, void 0, function* () {
expect(yield (0, repo_1.getLatestGithubReleaseVersion)({
owner: 'woocommerce',
name: 'woocommerce',
})).toBe('2.0.1');
}));

View File

@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.octokitWithAuth = exports.graphqlWithAuth = void 0;
/**
* External dependencies
*/
const graphql_1 = require("@octokit/graphql");
const octokit_1 = require("octokit");
/**
* Internal dependencies
*/
const environment_1 = require("../environment");
let graphqlWithAuthInstance;
let octokitWithAuthInstance;
/**
* Returns a graphql instance with auth headers, throws an Exception if
* `GITHUB_TOKEN` env var is not present.
*
* @return graphql instance
*/
const graphqlWithAuth = () => {
if (graphqlWithAuthInstance) {
return graphqlWithAuthInstance;
}
graphqlWithAuthInstance = graphql_1.graphql.defaults({
headers: {
authorization: `Bearer ${(0, environment_1.getEnvVar)('GITHUB_TOKEN', true)}`,
},
});
return graphqlWithAuthInstance;
};
exports.graphqlWithAuth = graphqlWithAuth;
/**
* Returns an Octokit instance with auth headers, throws an Exception if
* `GITHUB_TOKEN` env var is not present.
*
* @return graphql instance
*/
const octokitWithAuth = () => {
if (octokitWithAuthInstance) {
return octokitWithAuthInstance;
}
octokitWithAuthInstance = new octokit_1.Octokit({
auth: (0, environment_1.getEnvVar)('GITHUB_TOKEN', true),
});
return octokitWithAuthInstance;
};
exports.octokitWithAuth = octokitWithAuth;

View File

@ -0,0 +1,152 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCommunityPullRequest = exports.getPullRequest = exports.createPullRequest = exports.deleteGithubBranch = exports.createGithubBranch = exports.getRefFromGithubBranch = exports.doesGithubBranchExist = exports.getLatestGithubReleaseVersion = void 0;
/**
* Internal dependencies
*/
const api_1 = require("./api");
const getLatestGithubReleaseVersion = (options) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
const data = yield (0, api_1.graphqlWithAuth)()(`
{
repository(owner: "${owner}", name: "${name}") {
releases(
first: 25
orderBy: { field: CREATED_AT, direction: DESC }
) {
nodes {
tagName
isLatest
}
}
}
}
`);
return data.repository.releases.nodes.find((tagName) => tagName.isLatest).tagName;
});
exports.getLatestGithubReleaseVersion = getLatestGithubReleaseVersion;
const doesGithubBranchExist = (options, nextReleaseBranch) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
try {
const branchOnGithub = yield (0, api_1.octokitWithAuth)().request('GET /repos/{owner}/{repo}/branches/{branch}', {
owner,
repo: name,
branch: nextReleaseBranch,
});
return branchOnGithub.data.name === nextReleaseBranch;
}
catch (e) {
if (e.status === 404 &&
e.response.data.message === 'Branch not found') {
return false;
}
throw new Error(e);
}
});
exports.doesGithubBranchExist = doesGithubBranchExist;
const getRefFromGithubBranch = (options, source) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
const { repository } = yield (0, api_1.graphqlWithAuth)()(`
{
repository(owner:"${owner}", name:"${name}") {
ref(qualifiedName: "refs/heads/${source}") {
target {
... on Commit {
history(first: 1) {
edges{ node{ oid } }
}
}
}
}
}
}
`);
// @ts-expect-error: The graphql query is typed, but the response is not.
return repository.ref.target.history.edges.shift().node.oid;
});
exports.getRefFromGithubBranch = getRefFromGithubBranch;
const createGithubBranch = (options, branch, ref) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
yield (0, api_1.octokitWithAuth)().request('POST /repos/{owner}/{repo}/git/refs', {
owner,
repo: name,
ref: `refs/heads/${branch}`,
sha: ref,
});
});
exports.createGithubBranch = createGithubBranch;
const deleteGithubBranch = (options, branch) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name } = options;
yield (0, api_1.octokitWithAuth)().request('DELETE /repos/{owner}/{repo}/git/refs/heads/{ref}', {
owner,
repo: name,
ref: branch,
});
});
exports.deleteGithubBranch = deleteGithubBranch;
/**
* Create a pull request from branches on Github.
*
* @param {Object} options pull request options.
* @param {string} options.head branch name containing the changes you want to merge.
* @param {string} options.base branch name you want the changes pulled into.
* @param {string} options.owner repository owner.
* @param {string} options.name repository name.
* @param {string} options.title pull request title.
* @param {string} options.body pull request body.
* @return {Promise<object>} pull request data.
*/
const createPullRequest = (options) => __awaiter(void 0, void 0, void 0, function* () {
const { head, base, owner, name, title, body } = options;
const pullRequest = yield (0, api_1.octokitWithAuth)().request('POST /repos/{owner}/{repo}/pulls', {
owner,
repo: name,
title,
body,
head,
base,
});
return pullRequest.data;
});
exports.createPullRequest = createPullRequest;
/**
* Get a pull request from GitHub.
*
* @param {Object} options
* @param {string} options.owner repository owner.
* @param {string} options.name repository name.
* @param {string} options.prNumber pull request number.
* @return {Promise<object>} pull request data.
*/
const getPullRequest = (options) => __awaiter(void 0, void 0, void 0, function* () {
const { owner, name, prNumber } = options;
const pr = yield (0, api_1.octokitWithAuth)().request('GET /repos/{owner}/{repo}/pulls/{pull_number}', {
owner,
repo: name,
pull_number: Number(prNumber),
});
return pr.data;
});
exports.getPullRequest = getPullRequest;
/**
* Determine if a pull request is coming from a community contribution, i.e., not from a member of the WooCommerce organization.
*
* @param {Object} pullRequestData pull request data.
* @param {string} owner repository owner.
* @param {string} name repository name.
* @return {boolean} if a pull request is coming from a community contribution.
*/
const isCommunityPullRequest = (pullRequestData, owner, name) => {
// We can't use author_association here because it can be changed by PR authors. Instead check PR source.
return pullRequestData.head.repo.full_name !== `${owner}/${name}`;
};
exports.isCommunityPullRequest = isCommunityPullRequest;

View File

@ -0,0 +1,2 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View File

@ -0,0 +1,82 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Logger = void 0;
/**
* External dependencies
*/
const ora_1 = __importDefault(require("ora"));
const chalk_1 = __importDefault(require("chalk"));
const cli_table_1 = __importDefault(require("cli-table"));
/**
* Internal dependencies
*/
const environment_1 = require("./environment");
const LOGGING_LEVELS = {
verbose: 3,
warn: 2,
error: 1,
silent: 0,
};
const { log, error, warn } = console;
class Logger {
static get loggingLevel() {
return LOGGING_LEVELS[(0, environment_1.getEnvVar)('LOGGER_LEVEL') || 'warn'];
}
static error(err, failOnErr = true) {
if (Logger.loggingLevel >= LOGGING_LEVELS.error) {
if (err instanceof Error) {
error(chalk_1.default.red(`${err.message}\n${err.stack}`));
}
else if (typeof err === 'string') {
error(chalk_1.default.red(err));
}
else {
// Best effort to log the error when we don't know the type.
error(chalk_1.default.red(JSON.stringify(err, null, 2)));
}
if (failOnErr) {
process.exit(1);
}
}
}
static warn(message) {
if (Logger.loggingLevel >= LOGGING_LEVELS.warn) {
warn(chalk_1.default.yellow(message));
}
}
static notice(message) {
if (Logger.loggingLevel > LOGGING_LEVELS.silent) {
log(chalk_1.default.green(message));
}
}
static startTask(message) {
if (Logger.loggingLevel > LOGGING_LEVELS.silent && !(0, environment_1.isGithubCI)()) {
const spinner = (0, ora_1.default)(chalk_1.default.green(`${message}...`)).start();
Logger.lastSpinner = spinner;
}
else if ((0, environment_1.isGithubCI)()) {
Logger.notice(message);
}
}
static table(head, rows) {
if (Logger.loggingLevel > LOGGING_LEVELS.silent) {
const table = new cli_table_1.default({ head, rows });
log(table.toString());
}
}
static endTask() {
if (Logger.loggingLevel > LOGGING_LEVELS.silent &&
Logger.lastSpinner &&
!(0, environment_1.isGithubCI)()) {
Logger.lastSpinner.succeed(`${Logger.lastSpinner.text} complete.`);
Logger.lastSpinner = null;
}
else if ((0, environment_1.isGithubCI)()) {
Logger.notice('Task complete.');
}
}
}
exports.Logger = Logger;

34
tools/monorepo-utils/dist/core/util.js vendored Normal file
View File

@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.requestAsync = exports.execAsync = void 0;
/**
* External dependencies
*/
const util_1 = require("util");
const child_process_1 = require("child_process");
const https_1 = require("https");
exports.execAsync = (0, util_1.promisify)(child_process_1.exec);
// A wrapper around https.request that returns a promise encapulating the response body and other response attributes.
const requestAsync = (options, data) => {
return new Promise((resolve, reject) => {
const req = (0, https_1.request)(options, (res) => {
let body = '';
res.setEncoding('utf8');
res.on('data', (chunk) => {
body += chunk;
});
res.on('end', () => {
const httpsResponse = Object.assign(Object.assign({}, res), { body });
resolve(httpsResponse);
});
});
req.on('error', (err) => {
reject(err);
});
if (data) {
req.write(data);
}
req.end();
});
};
exports.requestAsync = requestAsync;

View File

@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getMajorMinor = exports.WPIncrement = void 0;
/**
* External dependencies
*/
const semver_1 = require("semver");
/**
* Bumps the version according to WP rules.
*
* @param {string} version Version to increment
* @return {string} Incremented version
*/
const WPIncrement = (version) => {
const parsedVersion = (0, semver_1.parse)(version);
return (0, semver_1.inc)(parsedVersion, parsedVersion.minor === 9 ? 'major' : 'minor');
};
exports.WPIncrement = WPIncrement;
/**
* Gets the major-minor of a given version number.
*
* @param {string} version Version to gather major minor from.
* @return {string} major minor
*/
const getMajorMinor = (version) => {
const parsedVersion = (0, semver_1.parse)(version);
return `${parsedVersion.major}.${parsedVersion.minor}`;
};
exports.getMajorMinor = getMajorMinor;

106
tools/monorepo-utils/dist/core/wpenv.js vendored Normal file
View File

@ -0,0 +1,106 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.stopWPEnv = exports.startWPEnv = exports.isWPEnvPortTaken = void 0;
/**
* External dependencies
*/
const net_1 = require("net");
const path_1 = require("path");
/**
* Internal dependencies
*/
const util_1 = require("./util");
/**
* Determine if the default port for wp-env is already taken. If so, see
* https://developer.wordpress.org/block-editor/reference-guides/packages/packages-env/#2-check-the-port-number
* for alternatives.
*
* @return {Promise<boolean>} if the port is being currently used.
*/
const isWPEnvPortTaken = () => {
return new Promise((resolve, reject) => {
const test = (0, net_1.createServer)()
.once('error', (err) => {
return err.code === 'EADDRINUSE'
? resolve(true)
: reject(err);
})
.once('listening', () => {
return test.once('close', () => resolve(false)).close();
})
.listen('8888');
});
};
exports.isWPEnvPortTaken = isWPEnvPortTaken;
/**
* Start wp-env.
*
* @param {string} tmpRepoPath - path to the temporary repo to start wp-env from.
* @param {Function} error - error print method.
* @return {boolean} if starting the container succeeded.
*/
const startWPEnv = (tmpRepoPath, error) => __awaiter(void 0, void 0, void 0, function* () {
try {
// Stop wp-env if its already running.
yield (0, util_1.execAsync)('wp-env stop', {
cwd: (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce'),
encoding: 'utf-8',
});
}
catch (e) {
// If an error is produced here, it means wp-env is not initialized and therefore not running already.
}
try {
if (yield (0, exports.isWPEnvPortTaken)()) {
throw new Error('Unable to start wp-env. Make sure port 8888 is available or specify port number WP_ENV_PORT in .wp-env.override.json');
}
yield (0, util_1.execAsync)('wp-env start', {
cwd: (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce'),
encoding: 'utf-8',
});
return true;
}
catch (e) {
let message = '';
if (e instanceof Error) {
message = e.message;
error(message);
}
return false;
}
});
exports.startWPEnv = startWPEnv;
/**
* Stop wp-env.
*
* @param {string} tmpRepoPath - path to the temporary repo to stop wp-env from.
* @param {Function} error - error print method.
* @return {boolean} if stopping the container succeeded.
*/
const stopWPEnv = (tmpRepoPath, error) => __awaiter(void 0, void 0, void 0, function* () {
try {
yield (0, util_1.execAsync)('wp-env stop', {
cwd: (0, path_1.join)(tmpRepoPath, 'plugins/woocommerce'),
encoding: 'utf-8',
});
return true;
}
catch (e) {
let message = '';
if (e instanceof Error) {
message = e.message;
error(message);
}
return false;
}
});
exports.stopWPEnv = stopWPEnv;

View File

@ -0,0 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const create_1 = require("./manifest/create");
/**
* Internal dependencies
*/
const program = new extra_typings_1.Command('md-docs')
.description('Utilities for generating markdown doc manifests.')
.addCommand(create_1.generateManifestCommand, { isDefault: true });
exports.default = program;

View File

@ -0,0 +1,60 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generateManifestCommand = void 0;
/**
* External dependencies
*/
const fs_1 = require("fs");
const extra_typings_1 = require("@commander-js/extra-typings");
const path_1 = __importDefault(require("path"));
/**
* Internal dependencies
*/
const generate_manifest_1 = require("../../../lib/generate-manifest");
const logger_1 = require("../../../../core/logger");
const markdown_links_1 = require("../../../lib/markdown-links");
exports.generateManifestCommand = new extra_typings_1.Command('create')
.description('Create a manifest file representing the contents of a markdown directory.')
.argument('<directory>', 'Path to directory of Markdown files to generate the manifest from.')
.argument('<projectName>', 'Name of the project to generate the manifest for, used to uniquely identify manifest entries.')
.option('-o --outputFilePath <outputFilePath>', 'Full path and filename of where to output the manifest.', `${process.cwd()}/manifest.json`)
.option('-b --baseUrl <baseUrl>', 'Base url to resolve markdown file URLs to in the manifest.', 'https://raw.githubusercontent.com/woocommerce/woocommerce/trunk')
.option('-r --rootDir <rootDir>', 'Root directory of the markdown files, used to generate URLs.', process.cwd())
.option('-be --baseEditUrl <baseEditUrl>', 'Base url to provide edit links to. This option will be ignored if your baseUrl is not a GitHub URL.', 'https://github.com/woocommerce/woocommerce/edit/trunk')
.action((dir, projectName, options) => __awaiter(void 0, void 0, void 0, function* () {
const { outputFilePath, baseUrl, rootDir, baseEditUrl } = options;
// determine if the rootDir is absolute or relative
const absoluteRootDir = path_1.default.isAbsolute(rootDir)
? rootDir
: path_1.default.join(process.cwd(), rootDir);
const absoluteSubDir = path_1.default.isAbsolute(dir)
? dir
: path_1.default.join(process.cwd(), dir);
const absoluteOutputFilePath = path_1.default.isAbsolute(outputFilePath)
? outputFilePath
: path_1.default.join(process.cwd(), outputFilePath);
logger_1.Logger.startTask('Generating manifest');
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(absoluteRootDir, absoluteSubDir, projectName, baseUrl, baseEditUrl);
const manifestWithLinks = yield (0, markdown_links_1.processMarkdownLinks)(manifest, absoluteRootDir, absoluteSubDir, projectName);
logger_1.Logger.endTask();
logger_1.Logger.startTask('Writing manifest');
yield (0, fs_1.writeFile)(absoluteOutputFilePath, JSON.stringify(manifestWithLinks, null, 2), (err) => {
if (err) {
logger_1.Logger.error(err);
}
});
logger_1.Logger.endTask();
logger_1.Logger.notice(`Manifest output at ${outputFilePath}`);
}));

View File

@ -0,0 +1,24 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
/**
* Internal dependencies
*/
const generate_urls_1 = require("../generate-urls");
describe('generateFileUrl', () => {
it('should generate a file url relative to the root directory provided', () => {
const url = (0, generate_urls_1.generateFileUrl)('https://example.com', path_1.default.join(__dirname, 'fixtures/example-docs'), path_1.default.join(__dirname, 'fixtures/example-docs/get-started'), path_1.default.join(__dirname, 'fixtures/example-docs/get-started/local-development.md'));
expect(url).toBe('https://example.com/get-started/local-development.md');
});
it('should throw an error if relative paths are passed', () => {
expect(() => (0, generate_urls_1.generateFileUrl)('https://example.com', './example-docs', path_1.default.join(__dirname, 'fixtures/example-docs/get-started'), path_1.default.join(__dirname, 'fixtures/example-docs/get-started/local-development.md'))).toThrow();
expect(() => (0, generate_urls_1.generateFileUrl)('https://example.com', path_1.default.join(__dirname, 'fixtures/example-docs'), './get-started', path_1.default.join(__dirname, 'fixtures/example-docs/get-started/local-development.md'))).toThrow();
expect(() => (0, generate_urls_1.generateFileUrl)('https://example.com', path_1.default.join(__dirname, 'fixtures/example-docs'), path_1.default.join(__dirname, 'fixtures/example-docs/get-started'), './local-development.md')).toThrow();
});
});

View File

@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Internal dependencies
*/
const generate_frontmatter_1 = require("../generate-frontmatter");
describe('generateFrontmatter', () => {
it('should not allow disallowed attributes', () => {
const frontMatter = (0, generate_frontmatter_1.generatePostFrontMatter)(`---
title: Hello World
description: This is a description
post_content: This is some content
post_title: This is a title
---
`);
expect(frontMatter).toEqual({
post_title: 'This is a title',
});
});
it('should not do additional date parsing', () => {
const frontMatter = (0, generate_frontmatter_1.generatePostFrontMatter)(`---
post_date: 2023-07-12 15:30:00
---
`);
expect(frontMatter).toEqual({
post_date: '2023-07-12 15:30:00',
});
});
});

View File

@ -0,0 +1,140 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
const fs_1 = __importDefault(require("fs"));
/**
* Internal dependencies
*/
const generate_manifest_1 = require("../generate-manifest");
describe('generateManifest', () => {
const dir = path_1.default.join(__dirname, './fixtures/example-docs');
const rootDir = path_1.default.join(__dirname, './fixtures');
it('should generate a manifest with the correct category structure', () => __awaiter(void 0, void 0, void 0, function* () {
// generate the manifest from fixture directory
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const topLevelCategories = manifest.categories;
expect(topLevelCategories[0].category_title).toEqual('Getting Started with WooCommerce');
expect(topLevelCategories[1].category_title).toEqual('Testing WooCommerce');
const subCategories = topLevelCategories[0].categories;
expect(subCategories[1].category_title).toEqual('Troubleshooting Problems');
}));
it('should exclude files and folders matching patterns in .manifestignore', () => __awaiter(void 0, void 0, void 0, function* () {
// generate the manifest from fixture directory
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const topLevelCategories = manifest.categories;
expect(topLevelCategories).toHaveLength(2);
expect(topLevelCategories[0].posts).toHaveLength(1);
}));
it('should generate a manifest with categories that contain all markdown files in a location as individual posts', () => __awaiter(void 0, void 0, void 0, function* () {
// generate the manifest from fixture directory
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const topLevelCategories = manifest.categories;
expect(topLevelCategories[1].category_title).toEqual('Testing WooCommerce');
const posts = topLevelCategories[1].posts;
expect(posts).toHaveLength(2);
expect(posts[0].post_title).toEqual('Unit Testing');
expect(posts[1].post_title).toEqual('Integration Testing');
}));
it('should create categories with titles where there is no index README', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
expect(manifest.categories[0].categories[0].category_title).toEqual('Installation');
}));
it('should create post urls with the correct url', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
expect(manifest.categories[0].posts[0].url).toEqual('https://example.com/example-docs/get-started/local-development.md');
expect(manifest.categories[0].categories[0].posts[0].url).toEqual('https://example.com/example-docs/get-started/installation/install-plugin.md');
}));
it('should generate posts with stable IDs', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
expect(manifest.categories[0].posts[0].id).toEqual('29bce0a522cef4cd72aad4dd1c9ad5d0b6780704');
}));
it('should create a hash for each manifest', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
expect(manifest.hash).not.toBeUndefined();
}));
it('should generate edit_url when github is in the base url', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://github.com', 'https://github.com/edit');
expect(manifest.categories[0].posts[0].edit_url).toEqual('https://github.com/edit/example-docs/get-started/local-development.md');
}));
it('should create a hash for each post in a manifest', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const topLevelCategories = manifest.categories;
const posts = [
...topLevelCategories[0].posts,
...topLevelCategories[0].categories[0].posts,
...topLevelCategories[0].categories[1].posts,
...topLevelCategories[1].posts,
];
posts.forEach((post) => {
expect(post.hash).not.toBeUndefined();
});
}));
it('should update a post hash and manifest hash when content is updated', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const post = manifest.categories[0].posts[0];
const originalPostHash = post.hash;
const originalManifestHash = manifest.hash;
// Confirm hashes are not undefined
expect(originalPostHash).not.toBeUndefined();
expect(originalManifestHash).not.toBeUndefined();
// Update the file content of the corresponding post
const filePath = path_1.default.join(dir, 'get-started/local-development.md');
const fileContent = fs_1.default.readFileSync(filePath, 'utf-8');
const updatedFileContent = fileContent + '\n\n<!-- updated -->';
fs_1.default.writeFileSync(filePath, updatedFileContent);
// Generate a new manifest
const nextManifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const nextPost = nextManifest.categories[0].posts[0];
const nextPostHash = nextPost.hash;
const nextManifestHash = nextManifest.hash;
// Confirm hashes are newly created.
expect(nextPostHash).not.toEqual(originalPostHash);
expect(nextManifestHash).not.toEqual(originalManifestHash);
// Reset the file content
fs_1.default.writeFileSync(filePath, fileContent);
}));
it('should not update a post hash and manifest hash when content is unchanged', () => __awaiter(void 0, void 0, void 0, function* () {
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const post = manifest.categories[0].posts[0];
const originalPostHash = post.hash;
const originalManifestHash = manifest.hash;
// Confirm hashes are not undefined
expect(originalPostHash).not.toBeUndefined();
expect(originalManifestHash).not.toBeUndefined();
// Generate a new manifest
const nextManifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const nextPost = nextManifest.categories[0].posts[0];
const nextPostHash = nextPost.hash;
const nextManifestHash = nextManifest.hash;
// Confirm hashes are newly created.
expect(nextPostHash).toEqual(originalPostHash);
expect(nextManifestHash).toEqual(originalManifestHash);
}));
});
describe('generatePostId', () => {
it('should generate a stable ID for the same file', () => {
const id1 = (0, generate_manifest_1.generatePostId)('get-started/local-development.md', 'woodocs');
const id2 = (0, generate_manifest_1.generatePostId)('get-started/local-development.md', 'woodocs');
expect(id1).toEqual(id2);
});
it('should generate a different ID for different prefixes', () => {
const id1 = (0, generate_manifest_1.generatePostId)('get-started/local-development.md', 'foodocs');
const id2 = (0, generate_manifest_1.generatePostId)('get-started/local-development.md', 'woodocs');
expect(id1).not.toEqual(id2);
});
});

View File

@ -0,0 +1,36 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
/**
* Internal dependencies
*/
const generate_manifest_1 = require("../generate-manifest");
const markdown_links_1 = require("../markdown-links");
describe('processMarkdownLinks', () => {
const dir = path_1.default.join(__dirname, './fixtures/example-docs');
const rootDir = path_1.default.join(__dirname, './fixtures');
it('should add the correct relative links to a manifest', () => __awaiter(void 0, void 0, void 0, function* () {
// generate the manifest from fixture directory
const manifest = yield (0, generate_manifest_1.generateManifestFromDirectory)(rootDir, dir, 'example-docs', 'https://example.com', 'https://example.com/edit');
const manifestWithLinks = yield (0, markdown_links_1.processMarkdownLinks)(manifest, rootDir, dir, 'example-docs');
const localDevelopmentPost = manifestWithLinks.categories[0].posts[0];
expect(localDevelopmentPost.links['./installation/install-plugin.md']).toBeDefined();
const installationPost = manifestWithLinks.categories[0].categories[0].posts[0];
expect(localDevelopmentPost.links['./installation/install-plugin.md']).toEqual(installationPost.id);
}));
});

View File

@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generatePostFrontMatter = void 0;
/**
* External dependencies
*/
const gray_matter_1 = __importDefault(require("gray-matter"));
const js_yaml_1 = __importDefault(require("js-yaml"));
/**
* Generate front-matter for supported post attributes.
*
* @param fileContents
*/
const generatePostFrontMatter = (fileContents, includeContent) => {
var _a, _b;
const allowList = [
'post_date',
'post_title',
'page_template',
'post_author',
'post_name',
'category_title',
'category_slug',
'content',
'menu_title',
];
const frontMatter = (0, gray_matter_1.default)(fileContents, {
engines: {
// By passing yaml.JSON_SCHEMA we disable date parsing that changes date format.
// See https://github.com/jonschlinkert/gray-matter/issues/62#issuecomment-577628177 for more details.
yaml: (s) => js_yaml_1.default.load(s, { schema: js_yaml_1.default.JSON_SCHEMA }),
},
});
const content = frontMatter.content.split('\n');
const headings = content.filter((line) => line.substring(0, 2) === '# ');
const title = (_a = headings[0]) === null || _a === void 0 ? void 0 : _a.substring(2).trim();
frontMatter.data.post_title = (_b = frontMatter.data.post_title) !== null && _b !== void 0 ? _b : title;
if (includeContent !== null && includeContent !== void 0 ? includeContent : false) {
frontMatter.data.content = frontMatter.content;
}
return Object.keys(frontMatter.data)
.filter((key) => allowList.includes(key))
.reduce((obj, key) => {
obj[key] = frontMatter.data[key];
return obj;
}, {});
};
exports.generatePostFrontMatter = generatePostFrontMatter;

View File

@ -0,0 +1,124 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generateManifestFromDirectory = exports.generatePostId = void 0;
/**
* External dependencies
*/
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const glob_1 = require("glob");
const crypto_1 = __importDefault(require("crypto"));
/**
* Internal dependencies
*/
const generate_frontmatter_1 = require("./generate-frontmatter");
const generate_urls_1 = require("./generate-urls");
function generatePostId(filePath, prefix = '') {
const hash = crypto_1.default.createHash('sha1');
hash.update(`${prefix}/${filePath}`);
return hash.digest('hex');
}
exports.generatePostId = generatePostId;
function filenameMatches(filename, hayStack) {
const found = hayStack.filter((item) => filename.match(item));
return found.length > 0;
}
function processDirectory(rootDirectory, subDirectory, projectName, baseUrl, baseEditUrl, fullPathToDocs, exclude, checkReadme = true) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
const category = {};
// Process README.md (if exists) for the category definition.
const readmePath = path_1.default.join(subDirectory, 'README.md');
if (checkReadme) {
if (fs_1.default.existsSync(readmePath)) {
const readmeContent = fs_1.default.readFileSync(readmePath, 'utf-8');
const frontMatter = (0, generate_frontmatter_1.generatePostFrontMatter)(readmeContent, true);
category.content = frontMatter.content;
category.category_slug = frontMatter.category_slug;
category.category_title = frontMatter.category_title;
category.menu_title = frontMatter.menu_title;
}
// derive the category title from the directory name, capitalize first letter of each word.
const categoryFolder = path_1.default.basename(subDirectory);
const categoryTitle = categoryFolder
.split('-')
.map((slugPart) => slugPart.charAt(0).toUpperCase() + slugPart.slice(1))
.join(' ');
category.category_slug = (_a = category.category_slug) !== null && _a !== void 0 ? _a : categoryFolder;
category.category_title = (_b = category.category_title) !== null && _b !== void 0 ? _b : categoryTitle;
}
const markdownFiles = glob_1.glob
.sync(path_1.default.join(subDirectory, '*.md'))
.filter((markdownFile) => !filenameMatches(markdownFile, exclude));
// If there are markdown files in this directory, add a posts array to the category. Otherwise, assume its a top level category that will contain subcategories.
if (markdownFiles.length > 0) {
category.posts = [];
}
markdownFiles.forEach((filePath) => {
if (filePath !== readmePath || !checkReadme) {
// Skip README.md which we have already processed.
const fileContent = fs_1.default.readFileSync(filePath, 'utf-8');
const fileFrontmatter = (0, generate_frontmatter_1.generatePostFrontMatter)(fileContent);
if (baseUrl.includes('github')) {
fileFrontmatter.edit_url = (0, generate_urls_1.generateFileUrl)(baseEditUrl, rootDirectory, subDirectory, filePath);
}
const post = Object.assign({}, fileFrontmatter);
// Generate hash of the post contents.
post.hash = crypto_1.default
.createHash('sha256')
.update(JSON.stringify(fileContent))
.digest('hex');
// get the folder name of rootDirectory.
const relativePath = path_1.default.relative(fullPathToDocs, filePath);
category.posts.push(Object.assign(Object.assign({}, post), { url: (0, generate_urls_1.generateFileUrl)(baseUrl, rootDirectory, subDirectory, filePath), filePath, id: generatePostId(relativePath, projectName) }));
}
});
// Recursively process subdirectories.
category.categories = [];
const subdirectories = fs_1.default
.readdirSync(subDirectory, { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.filter((dirent) => !filenameMatches(dirent.name, exclude))
.map((dirent) => path_1.default.join(subDirectory, dirent.name));
for (const subdirectory of subdirectories) {
const subcategory = yield processDirectory(rootDirectory, subdirectory, projectName, baseUrl, baseEditUrl, fullPathToDocs, exclude);
category.categories.push(subcategory);
}
return category;
});
}
function generateManifestFromDirectory(rootDirectory, subDirectory, projectName, baseUrl, baseEditUrl) {
return __awaiter(this, void 0, void 0, function* () {
const fullPathToDocs = subDirectory;
const manifestIgnore = path_1.default.join(subDirectory, '.manifestignore');
let ignoreList;
if (fs_1.default.existsSync(manifestIgnore)) {
ignoreList = fs_1.default
.readFileSync(manifestIgnore, 'utf-8')
.split('\n')
.map((item) => item.trim())
.filter((item) => item.length > 0)
.filter((item) => item.substring(0, 1) !== '#');
}
const manifest = yield processDirectory(rootDirectory, subDirectory, projectName, baseUrl, baseEditUrl, fullPathToDocs, ignoreList !== null && ignoreList !== void 0 ? ignoreList : [], false);
// Generate hash of the manifest contents.
const hash = crypto_1.default
.createHash('sha256')
.update(JSON.stringify(manifest))
.digest('hex');
return Object.assign(Object.assign({}, manifest), { hash });
});
}
exports.generateManifestFromDirectory = generateManifestFromDirectory;

View File

@ -0,0 +1,37 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generateFileUrl = void 0;
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
/**
* Generates a file url relative to the root directory provided.
*
* @param baseUrl The base url to use for the file url.
* @param rootDirectory The root directory where the file resides.
* @param subDirectory The sub-directory where the file resides.
* @param absoluteFilePath The absolute path to the file.
* @return The file url.
*/
const generateFileUrl = (baseUrl, rootDirectory, subDirectory, absoluteFilePath) => {
// check paths are absolute
for (const filePath of [
rootDirectory,
subDirectory,
absoluteFilePath,
]) {
if (!path_1.default.isAbsolute(filePath)) {
throw new Error(`File URLs cannot be generated without absolute paths. ${filePath} is not absolute.`);
}
}
// Generate a path from the subdirectory to the file path.
const relativeFilePath = path_1.default.resolve(subDirectory, absoluteFilePath);
// Determine the relative path from the rootDirectory to the filePath.
const relativePath = path_1.default.relative(rootDirectory, relativeFilePath);
return `${baseUrl}/${relativePath}`;
};
exports.generateFileUrl = generateFileUrl;

View File

@ -0,0 +1,53 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.processMarkdownLinks = void 0;
/**
* External dependencies
*/
const path_1 = __importDefault(require("path"));
const fs_1 = __importDefault(require("fs"));
/**
* Internal dependencies
*/
const generate_manifest_1 = require("./generate-manifest");
/**
* Process relative markdown links in the manifest.
*
* @param manifest Category or Post
* @param rootDirectory Root directory of the project
* @param absoluteSubDir Path to directory of Markdown files to generate the manifest from.
* @param projectName Name of the project
*/
const processMarkdownLinks = (manifest, rootDirectory, absoluteSubDir, projectName) => {
const updatedManifest = Object.assign({}, manifest);
if (updatedManifest.posts) {
updatedManifest.posts = updatedManifest.posts.map((post) => {
const updatedPost = Object.assign({}, post);
const filePath = path_1.default.resolve(rootDirectory, updatedPost.filePath);
const fileContent = fs_1.default.readFileSync(filePath, 'utf-8');
const linkRegex = /\[(?:.*?)\]\((.*?)\)/g;
let match;
while ((match = linkRegex.exec(fileContent))) {
const relativePath = match[1];
const absoluteLinkedFilePath = path_1.default.resolve(path_1.default.dirname(filePath), relativePath);
const relativeLinkedFilePath = path_1.default.relative(absoluteSubDir, absoluteLinkedFilePath);
if (fs_1.default.existsSync(absoluteLinkedFilePath)) {
const linkedId = (0, generate_manifest_1.generatePostId)(relativeLinkedFilePath, projectName);
updatedPost.links = updatedPost.links || {};
updatedPost.links[relativePath] = linkedId;
}
}
// dont expose filePath on updated posts
delete updatedPost.filePath;
return updatedPost;
});
}
if (updatedManifest.categories) {
updatedManifest.categories = updatedManifest.categories.map((category) => (0, exports.processMarkdownLinks)(category, rootDirectory, absoluteSubDir, projectName));
}
return updatedManifest;
};
exports.processMarkdownLinks = processMarkdownLinks;

View File

@ -0,0 +1,19 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const slack_message_1 = require("./slack-message");
const slack_file_1 = require("./slack-file");
/**
* Internal dependencies
*/
const program = new extra_typings_1.Command('slack')
.description('Slack message sending utilities')
.addCommand(slack_message_1.slackMessageCommand, { isDefault: true })
.addCommand(slack_file_1.slackFileCommand);
exports.default = program;

View File

@ -0,0 +1,66 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.slackFileCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const web_api_1 = require("@slack/web-api");
const path_1 = require("path");
const fs_1 = require("fs");
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
exports.slackFileCommand = new extra_typings_1.Command('file')
.description('Send a file upload message to a slack channel')
.argument('<token>', 'Slack authentication token bearing required scopes.')
.argument('<text>', 'Text based message to send to the slack channel.')
.argument('<filePath>', 'File path to upload to the slack channel.')
.argument('<channelIds...>', 'Slack channel IDs to send the message to. Pass as many as you like.')
.option('--dont-fail', 'Do not fail the command if a message fails to send to any channel.')
.option('--reply-ts <replyTs>', 'Reply to the message with the corresponding ts')
.option('--filename <filename>', 'If provided, the filename that will be used for the file on Slack.')
.action((token, text, filePath, channels, { dontFail, replyTs, filename }) => __awaiter(void 0, void 0, void 0, function* () {
logger_1.Logger.startTask(`Attempting to send message to Slack for channels: ${channels.join(',')}`);
const shouldFail = !dontFail;
if (filePath && !(0, fs_1.existsSync)(filePath)) {
logger_1.Logger.error(`Unable to open file with path: ${filePath}`, shouldFail);
}
const client = new web_api_1.WebClient(token);
for (const channel of channels) {
try {
const requestOptions = {
file: filePath,
filename: filename ? filename : (0, path_1.basename)(filePath),
channel_id: channel,
initial_comment: text.replace(/\\n/g, '\n'),
request_file_info: false,
thread_ts: replyTs ? replyTs : null,
};
yield client.files.uploadV2(requestOptions);
logger_1.Logger.notice(`Successfully uploaded ${filePath} to channel: ${channel}`);
}
catch (e) {
if ('code' in e &&
e.code === web_api_1.ErrorCode.PlatformError &&
'message' in e &&
e.message.includes('missing_scope')) {
logger_1.Logger.error(`The provided token does not have the required scopes, please add files:write and chat:write to the token.`, shouldFail);
}
else {
logger_1.Logger.error(e, shouldFail);
}
}
}
logger_1.Logger.endTask();
}));

View File

@ -0,0 +1,65 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.slackMessageCommand = void 0;
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
const core_1 = require("@actions/core");
/**
* Internal dependencies
*/
const logger_1 = require("../../../core/logger");
const util_1 = require("../../../core/util");
const environment_1 = require("../../../core/environment");
exports.slackMessageCommand = new extra_typings_1.Command('message')
.description('Send a plain-text message to a slack channel')
.argument('<token>', 'Slack authentication token bearing required scopes.')
.argument('<text>', 'Text based message to send to the slack channel.')
.argument('<channels...>', 'Slack channels to send the message to. Pass as many as you like.')
.option('--dont-fail', 'Do not fail the command if a message fails to send to any channel.')
.action((token, text, channels, { dontFail }) => __awaiter(void 0, void 0, void 0, function* () {
logger_1.Logger.startTask(`Attempting to send message to Slack for channels: ${channels.join(',')}`);
const shouldFail = !dontFail;
for (const channel of channels) {
// Define the request options
const options = {
hostname: 'slack.com',
path: '/api/chat.postMessage',
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
};
try {
const { statusCode, body } = yield (0, util_1.requestAsync)(options, JSON.stringify({
channel,
text: text.replace(/\\n/g, '\n'),
}));
logger_1.Logger.endTask();
const response = JSON.parse(body);
if (!response.ok || statusCode !== 200) {
logger_1.Logger.error(`Slack API returned an error: ${response === null || response === void 0 ? void 0 : response.error}, message failed to send to ${channel}.`, shouldFail);
}
else {
logger_1.Logger.notice(`Slack message sent successfully to channel: ${channel}`);
if ((0, environment_1.isGithubCI)()) {
(0, core_1.setOutput)('ts', response.ts);
}
}
}
catch (e) {
logger_1.Logger.error(e, shouldFail);
}
}
}));

View File

@ -0,0 +1,19 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const list_1 = __importDefault(require("./list"));
const profile_1 = __importDefault(require("./profile"));
const program = new extra_typings_1.Command('workflows')
.description('Profile Github workflows')
.addCommand(profile_1.default)
.addCommand(list_1.default);
exports.default = program;

View File

@ -0,0 +1,36 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const data_1 = require("../../lib/data");
const logger_1 = require("../../../core/logger");
const program = new extra_typings_1.Command('list')
.description('List all Github workflows in a repository')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.action(({ owner, name }) => __awaiter(void 0, void 0, void 0, function* () {
logger_1.Logger.startTask('Listing all workflows');
const allWorkflows = yield (0, data_1.getAllWorkflows)(owner, name);
logger_1.Logger.notice(`There are ${allWorkflows.length} workflows in the repository.`);
logger_1.Logger.table(['Workflow Name', 'configuration file', 'Id'], allWorkflows.map((workflow) => [
workflow.name,
workflow.path.replace('.github/workflows/', ''),
workflow.id,
]));
logger_1.Logger.endTask();
}));
exports.default = program;

View File

@ -0,0 +1,51 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* External dependencies
*/
const extra_typings_1 = require("@commander-js/extra-typings");
/**
* Internal dependencies
*/
const data_1 = require("../../lib/data");
const log_1 = require("../../lib/log");
const logger_1 = require("../../../core/logger");
const program = new extra_typings_1.Command('profile')
.description('Profile GitHub workflows')
.argument('<start>', 'Start date in YYYY-MM-DD format')
.argument('<end>', 'End date in YYYY-MM-DD format')
.argument('<id>', 'Workflow Id or filename.')
.option('-o --owner <owner>', 'Repository owner. Default: woocommerce', 'woocommerce')
.option('-n --name <name>', 'Repository name. Default: woocommerce', 'woocommerce')
.option('-s --show-steps')
.action((start, end, id, { owner, name, showSteps }) => __awaiter(void 0, void 0, void 0, function* () {
const workflowData = yield (0, data_1.getWorkflowData)(id, owner, name);
logger_1.Logger.notice(`Processing workflow id ${id}: "${workflowData.name}" from ${start} to ${end}`);
const workflowRunData = yield (0, data_1.getWorkflowRunData)({
id,
owner,
name,
start,
end,
});
let runJobData = {};
if (showSteps) {
const { nodeIds } = workflowRunData;
runJobData = yield (0, data_1.getRunJobData)(nodeIds);
}
(0, log_1.logWorkflowRunResults)(workflowData.name, workflowRunData);
if (showSteps) {
(0, log_1.logJobResults)(runJobData);
(0, log_1.logStepResults)(runJobData);
}
}));
exports.default = program;

View File

@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const config = {
WORKFLOW_DURATION_CUTOFF_MINUTES: 60,
};
exports.default = config;

View File

@ -0,0 +1,231 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRunJobData = exports.getCompiledJobData = exports.getWorkflowRunData = exports.getWorkflowData = exports.getAllWorkflows = void 0;
/**
* Internal dependencies
*/
const api_1 = require("../../core/github/api");
const logger_1 = require("../../core/logger");
const github_1 = require("./github");
const config_1 = __importDefault(require("../config"));
/**
* Get all workflows from the WooCommerce repository.
*
* @param {string} owner - The owner of the repository.
* @param {string} name - The name of the repository.
* @return Workflows and total count
*/
const getAllWorkflows = (owner, name) => __awaiter(void 0, void 0, void 0, function* () {
const initialTotals = {
count_items_processed: 0,
count_items_available: 0,
workflows: [],
};
const requestOptions = {
owner,
repo: name,
};
const endpoint = 'GET /repos/{owner}/{repo}/actions/workflows';
const processPage = (data, totals) => {
const { total_count, workflows } = data;
totals.count_items_available = total_count;
totals.count_items_processed += workflows.length;
totals.workflows = totals.workflows.concat(workflows);
return totals;
};
const totals = yield (0, github_1.requestPaginatedData)(initialTotals, endpoint, requestOptions, processPage);
return totals.workflows;
});
exports.getAllWorkflows = getAllWorkflows;
/**
* Handle on page of workflow runs.
*
* @param {Object} data Github workflow run data
* @param {Object} totals totals
* @return {Object} totals
*/
const processWorkflowRunPage = (data, totals) => {
const { workflow_runs, total_count } = data;
if (total_count === 0) {
return totals;
}
totals.count_items_available = total_count;
totals.count_items_processed += workflow_runs.length;
logger_1.Logger.notice(`Fetched workflows ${totals.count_items_processed} / ${totals.count_items_available}`);
const { WORKFLOW_DURATION_CUTOFF_MINUTES } = config_1.default;
workflow_runs.forEach((run) => {
totals[run.conclusion]++;
if (run.conclusion === 'success') {
totals.nodeIds.push(run.node_id);
const time = new Date(run.updated_at).getTime() -
new Date(run.run_started_at).getTime();
const maxDuration = 1000 * 60 * WORKFLOW_DURATION_CUTOFF_MINUTES;
if (time < maxDuration) {
totals.times.push(time);
}
}
});
return totals;
};
/**
* Get workflow run data for a given workflow.
*
* @param {number} id Workflow id
* @return {Object} Workflow data
*/
const getWorkflowData = (id, owner, name) => __awaiter(void 0, void 0, void 0, function* () {
const { data } = yield (0, api_1.octokitWithAuth)().request('GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}', {
owner,
repo: name,
workflow_id: id,
});
return data;
});
exports.getWorkflowData = getWorkflowData;
/**
* Get workflow run data for a given workflow.
*
* @param {Object} options request options
* @param {Object} options.id workflow id
* @param {Object} options.owner repo owner
* @param {Object} options.name repo name
* @param {Object} options.start start date
* @param {Object} options.end end date
* @return {Object} totals
*/
const getWorkflowRunData = (options) => __awaiter(void 0, void 0, void 0, function* () {
const { id, start, end, owner, name } = options;
const initialTotals = {
count_items_available: 0,
nodeIds: [],
times: [],
success: 0,
failure: 0,
cancelled: 0,
skipped: 0,
count_items_processed: 0,
};
const requestOptions = {
owner,
repo: name,
workflow_id: id,
created: `${start}..${end}`,
};
const workflowRunEndpoint = 'GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs';
const totals = yield (0, github_1.requestPaginatedData)(initialTotals, workflowRunEndpoint, requestOptions, processWorkflowRunPage);
return totals;
});
exports.getWorkflowRunData = getWorkflowRunData;
function splitArrayIntoChunks(array, n) {
const chunks = [];
for (let i = 0; i < array.length; i += n) {
const chunk = array.slice(i, i + n);
chunks.push(chunk);
}
return chunks;
}
/**
* Get compiled job data for a given workflow run.
*
* @param {Object} jobData Workflow run data
* @return {Object} Compiled job data
*/
const getCompiledJobData = (jobData, result = {}) => {
const { nodes } = jobData;
nodes.forEach((node) => {
const jobs = node.checkSuite.checkRuns.nodes;
jobs.forEach((job) => {
const { name, startedAt, completedAt } = job;
const time = new Date(completedAt).getTime() -
new Date(startedAt).getTime();
if (!result[name]) {
result[name] = {
times: [],
steps: {},
};
}
result[name].times.push(time);
const steps = job.steps.nodes;
steps.forEach((step) => {
const { name: stepName, startedAt: stepStart, completedAt: stepCompleted, } = step;
if (stepName === 'Set up job' ||
stepName === 'Complete job' ||
stepName.startsWith('Post ')) {
return;
}
const stepTime = new Date(stepCompleted).getTime() -
new Date(stepStart).getTime();
if (!result[name].steps[stepName]) {
result[name].steps[stepName] = [];
}
result[name].steps[stepName].push(stepTime);
});
});
});
return result;
};
exports.getCompiledJobData = getCompiledJobData;
/**
* Get data on individual workflow runs.
*
* @param {Array} nodeIds Workflow node ids
* @return {Object} Workflow run data
*/
const getRunJobData = (nodeIds) => __awaiter(void 0, void 0, void 0, function* () {
logger_1.Logger.notice(`Processing individual data for the ${nodeIds.length} successful workflow run(s)`);
let compiledJobData = {};
const perPage = 50;
const gql = (0, api_1.graphqlWithAuth)();
yield Promise.all(splitArrayIntoChunks(nodeIds, perPage).map((pageOfNodeIds, index) => __awaiter(void 0, void 0, void 0, function* () {
logger_1.Logger.notice(`Fetched runs ${pageOfNodeIds.length === perPage
? (index + 1) * perPage
: index * perPage + pageOfNodeIds.length} / ${nodeIds.length}`);
const data = yield gql(`
query($nodeIds: [ID!]!){
nodes ( ids: $nodeIds ) {
... on WorkflowRun {
id
workflow {
id
name
}
checkSuite {
checkRuns ( first: 20, filterBy: { status: COMPLETED } ) {
nodes {
name
id
startedAt
completedAt
steps ( first: 50 ) {
nodes {
name
startedAt
completedAt
}
}
}
}
}
}
}
}
`, {
nodeIds: pageOfNodeIds,
});
compiledJobData = (0, exports.getCompiledJobData)(data, compiledJobData);
})));
return compiledJobData;
});
exports.getRunJobData = getRunJobData;

View File

@ -0,0 +1,41 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.requestPaginatedData = void 0;
/**
* Internal dependencies
*/
const api_1 = require("../../core/github/api");
/**
* Helper method for getting data from GitHub REST API in paginated format.
*
* This function is used to process multiple pages of GitHub data by keeping track of running totals.
* The requirements `totals` are properties `count` and `total_number`. A processing function `processPage` is also passed to handle each page's data by updating the `totals` object.
*
* @param {Object} totals An object for keeping track of the total data.
* @param {string} endpoint API endpoint
* @param {Object} requestOptions API request options
* @param {Function} processPage A function to handle returned data and update totals
* @param page Page number to start from
* @param per_page Number of items per page
* @return {Object} The updated totals object
*/
const requestPaginatedData = (totals, endpoint, requestOptions, processPage, page = 1, per_page = 50) => __awaiter(void 0, void 0, void 0, function* () {
const { data } = yield (0, api_1.octokitWithAuth)().request(endpoint, Object.assign(Object.assign({}, requestOptions), { page,
per_page }));
let resultingTotals = processPage(data, totals);
const { total_count } = data;
if (total_count > resultingTotals.count_items_processed) {
resultingTotals = yield (0, exports.requestPaginatedData)(resultingTotals, endpoint, requestOptions, processPage, page + 1);
}
return resultingTotals;
});
exports.requestPaginatedData = requestPaginatedData;

View File

@ -0,0 +1,99 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.logStepResults = exports.logJobResults = exports.logWorkflowRunResults = void 0;
/**
* Internal dependencies
*/
const logger_1 = require("../../core/logger");
const math_1 = require("./math");
/**
* Print workflow run results to the console.
*
* @param {string} name Workflow name
* @param {Object} data Workflow run results
*/
const logWorkflowRunResults = (name, data) => {
logger_1.Logger.table([
'Workflow Name',
'Total runs',
'success',
'failed',
'cancelled',
'average (min)',
'median (min)',
'longest (min)',
'shortest (min)',
'90th percentile (min)',
], [
[
name,
data.count_items_available.toString(),
data.success.toString(),
data.failure.toString(),
data.cancelled.toString(),
((0, math_1.calculateMean)(data.times) / 1000 / 60).toFixed(2), // in minutes,
((0, math_1.calculateMedian)(data.times) / 1000 / 60).toFixed(2), // in minutes
(Math.max(...data.times) / 1000 / 60).toFixed(2), // in minutes
(Math.min(...data.times) / 1000 / 60).toFixed(2), // in minutes
((0, math_1.calculate90thPercentile)(data.times) / 1000 / 60).toFixed(2), // in minutes
],
]);
};
exports.logWorkflowRunResults = logWorkflowRunResults;
/**
* Log job data from a workflow run.
*
* @param {Object} data compiled job data
*/
const logJobResults = (data) => {
const rows = Object.keys(data).map((jobName) => {
const job = data[jobName];
return [
jobName,
((0, math_1.calculateMean)(job.times) / 1000 / 60).toFixed(2), // in minutes
((0, math_1.calculateMedian)(job.times) / 1000 / 60).toFixed(2), // in minutes
(Math.max(...job.times) / 1000 / 60).toFixed(2), // in minutes
(Math.min(...job.times) / 1000 / 60).toFixed(2), // in minutes
((0, math_1.calculate90thPercentile)(job.times) / 1000 / 60).toFixed(2), // in minutes
];
});
logger_1.Logger.table([
'Job Name',
'average (min)',
'median (min)',
'longest (min)',
'shortest (min)',
'90th percentile (min)',
], rows);
};
exports.logJobResults = logJobResults;
/**
* Log job steps from a workflow run.
*
* @param {Object} data compiled job data
*/
const logStepResults = (data) => {
Object.keys(data).forEach((jobName) => {
const job = data[jobName];
const rows = Object.keys(job.steps).map((stepName) => {
const step = job.steps[stepName];
return [
stepName,
((0, math_1.calculateMean)(step) / 1000 / 60).toFixed(2), // in minutes
((0, math_1.calculateMedian)(step) / 1000 / 60).toFixed(2), // in minutes
(Math.max(...step) / 1000 / 60).toFixed(2), // in minutes
(Math.min(...step) / 1000 / 60).toFixed(2), // in minutes
((0, math_1.calculate90thPercentile)(step) / 1000 / 60).toFixed(2), // in minutes
];
});
logger_1.Logger.table([
`Steps for job: ${jobName}`,
'average (min)',
'median (min)',
'longest (min)',
'shortest (min)',
'90th percentile (min)',
], rows);
});
};
exports.logStepResults = logStepResults;

View File

@ -0,0 +1,59 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.calculate90thPercentile = exports.calculateMedian = exports.calculateMean = void 0;
/**
* Calculate the mean value of an array of numbers.
*
* @param {Array} numbers Array of numbers
* @return {number} Mean value
*/
const calculateMean = (numbers) => {
if (numbers.length === 0) {
return 0;
}
const sum = numbers.reduce(function (a, b) {
return a + b;
}, 0);
return sum / numbers.length;
};
exports.calculateMean = calculateMean;
/**
* Calculate the median value of an array of numbers.
*
* @param {Array} numbers Array of numbers
* @return {number} Median value
*/
const calculateMedian = (numbers) => {
const numbersCopy = [...numbers];
if (numbersCopy.length === 0) {
return 0;
}
// Sort the numbersCopy in ascending order
numbersCopy.sort(function (a, b) {
return a - b;
});
const middleIndex = Math.floor(numbersCopy.length / 2);
if (numbersCopy.length % 2 === 0) {
// If the array length is even, return the average of the two middle values
return ((numbersCopy[middleIndex - 1] + numbersCopy[middleIndex]) / 2);
}
// If the array length is odd, return the middle value
return numbersCopy[middleIndex];
};
exports.calculateMedian = calculateMedian;
/**
* Get the 90th percentile value of an array of numbers.
*
* @param {Array} numbers Array of numbers
* @return {number} 90th percentile value
*/
const calculate90thPercentile = (numbers) => {
const numbersCopy = [...numbers];
// Sorting the numbers in ascending order
const sortedNumbers = numbersCopy.sort((a, b) => a - b);
// Calculating the index for the 90th percentile
const index = Math.ceil(sortedNumbers.length * 0.9) - 1;
// Returning the 90th percentile value
return sortedNumbers[index];
};
exports.calculate90thPercentile = calculate90thPercentile;