Refactored Linting & Unit Testing Workflows (#40572)

This commit is contained in:
Christopher Allford 2023-10-19 11:24:20 -07:00 committed by GitHub
parent 8827e42bd7
commit fbbca9c3e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 1117 additions and 467 deletions

View File

@ -3,6 +3,9 @@ description: Handles the installation, building, and caching of the projects wit
permissions: {}
inputs:
install:
description: Indicates whether or not the action should install any projects.
default: 'true'
install-filters:
description: The PNPM filter used to decide what projects to install. Supports multiline strings for multiple filters.
default: ''
@ -54,6 +57,9 @@ runs:
- name: Install Node and PHP Dependencies
shell: bash
if: ${{ inputs.install == 'true' }}
env:
PUPPETEER_SKIP_DOWNLOAD: 'true'
run: |
pnpm -w install turbo
pnpm install ${{ steps.parse-input.outputs.INSTALL_FILTERS }}
@ -70,6 +76,7 @@ runs:
fi
- name: Cache Build Output
if: ${{ inputs.build == 'true' }}
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .turbo
@ -79,6 +86,6 @@ runs:
${{ runner.os }}-build-output
- name: Build
if: ${{ inputs.build == 'true' }}
if: ${{ inputs.install == 'true' && inputs.build == 'true' }}
shell: bash
run: pnpm -w exec turbo run turbo:build --cache-dir=".turbo" ${{ steps.parse-input.outputs.BUILD_FILTERS }}

View File

@ -1,90 +1,98 @@
name: Run CI
on:
push:
branches:
- trunk
- 'release/**'
workflow_dispatch:
defaults:
run:
shell: bash
name: 'CI'
on:
pull_request:
push:
branches:
- 'trunk'
- 'release/*'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
group: '${{ github.workflow }}-${{ github.ref }}'
cancel-in-progress: true
jobs:
test:
name: PHP ${{ matrix.php }} WP ${{ matrix.wp }} - ${{ matrix.unittests }}
timeout-minutes: 30
runs-on: ubuntu-20.04
permissions:
contents: read
continue-on-error: ${{ matrix.wp == 'nightly' }}
strategy:
fail-fast: false
matrix:
php: ['7.4', '8.0']
wp: ['latest']
unittests: ['shard1', 'shard2']
include:
- wp: nightly
php: '7.4'
unittests: 'shard1'
- wp: nightly
php: '7.4'
unittests: 'shard2'
- wp: '6.1'
php: 7.4
unittests: 'shard1'
- wp: '6.1'
php: 7.4
unittests: 'shard2'
services:
database:
image: mysql:5.6
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
steps:
- uses: actions/checkout@v3
- name: Setup WooCommerce Monorepo
uses: ./.github/actions/setup-woocommerce-monorepo
with:
build-filters: woocommerce
- name: Tool versions
run: |
php --version
composer --version
- name: Build Admin feature config
working-directory: plugins/woocommerce
run: pnpm run build:feature-config
- id: parseMatrix
name: Parse Matrix Variables
uses: actions/github-script@v6
with:
script: |
const parseWPVersion = require( './.github/workflows/scripts/parse-wp-version' );
parseWPVersion( '${{ matrix.wp }}' ).then( ( version ) => {
core.setOutput( 'wpVersion', version );
} );
- name: Prepare Testing Environment
env:
WP_ENV_CORE: ${{ steps.parseMatrix.outputs.wpVersion }}
WP_ENV_PHP_VERSION: ${{ matrix.php }}
run: pnpm --filter=woocommerce env:test
- name: Run Tests
env:
WP_ENV_CORE: ${{ steps.parseMatrix.outputs.wpVersion }}
WP_ENV_PHP_VERSION: ${{ matrix.php }}
run: pnpm --filter=woocommerce test:unit:env --testsuite ${{ matrix.unittests }}
project-matrix:
# Since this is a monorepo, not every pull request or change is going to impact every project.
# Instead of running CI tasks on all projects indiscriminately, we use a script to detect
# which projects have changed and what kind of change occurred. This lets us build a
# matrix that we can use to run CI tasks only on the projects that need them.
name: 'Build Project Matrix'
runs-on: 'ubuntu-20.04'
outputs:
matrix: ${{ steps.project-matrix.outputs.matrix }}
steps:
- uses: 'actions/checkout@v3'
name: 'Checkout'
with:
fetch-depth: 0
- uses: './.github/actions/setup-woocommerce-monorepo'
name: 'Setup Monorepo'
with:
install: false
- uses: actions/github-script@v6
id: 'project-matrix'
name: 'Build Matrix'
with:
script: |
let baseRef = ${{ toJson( github.base_ref ) }};
if ( baseRef ) {
baseRef = 'origin/' + baseRef;
}
const buildCIMatrix = require( './.github/workflows/scripts/build-ci-matrix' );
core.setOutput( 'matrix', JSON.stringify( await buildCIMatrix( baseRef ) ) );
project-task-matrix:
# This is the actual CI job that will be ran against every project with applicable changes.
# Note that we only run the tasks that have commands set. Our script will set them if
# they are needed and so all the workflow needs to do is run them.
name: '${{ matrix.projectName }} - ${{ matrix.taskName }}' # Note: GitHub doesn't process expressions for skipped jobs so when there's no matrix the name will literally be this.
runs-on: 'ubuntu-20.04'
needs: 'project-matrix'
if: ${{ needs.project-matrix.outputs.matrix != '[]' }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON( needs.project-matrix.outputs.matrix ) }}
steps:
- uses: 'actions/checkout@v3'
name: 'Checkout'
with:
fetch-depth: 0
- uses: './.github/actions/setup-woocommerce-monorepo'
id: 'setup-monorepo'
name: 'Setup Monorepo'
with:
# install-filters: '${{ matrix.projectName }}...'
build-filters: '${{ matrix.projectName }}'
- name: 'Lint'
if: ${{ !cancelled() && matrix.lintCommand && steps.setup-monorepo.conclusion == 'success' }}
run: 'pnpm --filter="${{ matrix.projectName }}" ${{ matrix.lintCommand }}'
- name: 'Prepare Test Environment'
id: 'prepare-test-environment'
if: ${{ !cancelled() && matrix.testEnvCommand && steps.setup-monorepo.conclusion == 'success' }}
env: ${{ matrix.testEnvVars }}
run: 'pnpm --filter="${{ matrix.projectName }}" ${{ matrix.testEnvCommand }}'
- name: 'Test - JS'
if: ${{ !cancelled() && matrix.jsTestCommand && steps.setup-monorepo.conclusion == 'success' && ( ! matrix.testEnvCommand || steps.prepare-test-environment.conclusion == 'success' ) }}
run: 'pnpm --filter="${{ matrix.projectName }}" ${{ matrix.jsTestCommand }}'
- name: 'Test - PHP'
if: ${{ !cancelled() && matrix.phpTestCommand && steps.setup-monorepo.conclusion == 'success' && ( ! matrix.testEnvCommand || steps.prepare-test-environment.conclusion == 'success' ) }}
run: 'pnpm --filter="${{ matrix.projectName }}" ${{ matrix.phpTestCommand }}'
project-task-matrix-evaluation:
# In order to add a required status check we need a consistent job that we can grab onto.
# Since we are dynamically generating a project matrix, however, we can't rely on
# on any specific job being present. We can get around this limitation by using
# a job that runs after all the others and either passes or fails based on the
# results of the other jobs in the workflow.
name: 'Evaluate Project Matrix'
runs-on: 'ubuntu-20.04'
needs: 'project-task-matrix'
if: ${{ always() }}
steps:
- name: 'Check Matrix Success'
run: |
result="${{ needs.project-task-matrix.result }}"
if [[ $result == "success" || $result == "skipped" ]]; then
echo "The matrix has completed successfully."
exit 0
else
echo "One or more jobs in the matrix has failed."
exit 1
fi

View File

@ -1,51 +0,0 @@
name: Run code sniff on PR
on:
pull_request:
paths-ignore:
- '**/changelog/**'
defaults:
run:
shell: bash
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
test:
name: Code sniff (PHP 7.4, WP Latest)
timeout-minutes: 15
runs-on: ubuntu-20.04
permissions:
contents: read
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Get Changed Files
id: changed-files
uses: tj-actions/changed-files@v39
with:
path: plugins/woocommerce
files: "**/*.php"
- name: Setup WooCommerce Monorepo
if: steps.changed-files.outputs.any_changed == 'true'
uses: ./.github/actions/setup-woocommerce-monorepo
with:
build: false
- name: Tool versions
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: plugins/woocommerce
run: |
php --version
composer --version
phpcs-changed --version
- name: Run PHPCS
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: plugins/woocommerce
run: phpcs-changed -s --git --git-base ${{ github.event.pull_request.base.sha }} ${{ steps.changed-files.outputs.all_changed_files }}

View File

@ -1,54 +0,0 @@
name: Lint packages
on:
pull_request:
paths-ignore:
- '**/changelog/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
lint-test-js:
name: Lint
runs-on: ubuntu-20.04
permissions:
contents: read
# This is required to allow the action to annotate the PR with the linting results.
checks: write
pull-requests: read
steps:
- uses: actions/checkout@v3
- name: Setup PNPM
uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd
with:
version: '8.6.7'
- name: Setup Node
uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c
with:
node-version-file: .nvmrc
cache: pnpm
registry-url: 'https://registry.npmjs.org'
- name: Install prerequisites
run: |
# ignore scripts is faster, and postinstall should not be needed for lint.
pnpm install --ignore-scripts
- name: Lint JS and CSS
run: pnpm run -r --filter='release-posts' --filter='woocommerce/client/admin...' --filter='@woocommerce/monorepo-utils' --filter='!@woocommerce/e2e*' --filter='!@woocommerce/api' --color lint
- name: Collect and Combine Eslint Reports
if: ${{ github.event.pull_request.head.repo.fork != true && always() }}
run: node ./.github/workflows/scripts/collect-eslint-reports.js
- name: Annotate Code Linting Results
uses: ataylorme/eslint-annotate-action@a1bf7cb320a18aa53cb848a267ce9b7417221526
if: ${{ github.event.pull_request.head.repo.fork != true && always() }}
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
report-json: 'combined_eslint_report.json'

View File

@ -1,40 +0,0 @@
name: Run tests for JS packages and woocommerce-admin/client
on:
pull_request:
paths-ignore:
- '**/changelog/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
test-js:
name: Run JS Tests
runs-on: ubuntu-20.04
permissions:
contents: read
steps:
- uses: actions/checkout@v3
- name: Setup PNPM
uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd
with:
version: '8.6.7'
- name: Setup Node
uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c
with:
node-version-file: .nvmrc
cache: pnpm
registry-url: 'https://registry.npmjs.org'
- name: Install prerequisites
run: |
# ignore scripts is faster, and postinstall should not be needed for tests.
pnpm install --ignore-scripts
- name: Test
run: pnpm run test --filter='woocommerce/client/admin...' --filter='@woocommerce/monorepo-utils' --filter='!@woocommerce/e2e*' --filter='@woocommerce/monorepo-utils' --filter='!@woocommerce/api' --color

View File

@ -1,24 +0,0 @@
name: Run unit for other PHP plugins
on:
pull_request:
paths-ignore:
- '**/changelog/**'
permissions: {}
jobs:
test:
runs-on: ubuntu-20.04
permissions:
contents: read
steps:
- uses: actions/checkout@v3
- name: Setup WooCommerce Monorepo
uses: ./.github/actions/setup-woocommerce-monorepo
with:
php-version: '8.0'
- name: Run WooCommerce Docs Tests
run: pnpm test:unit
working-directory: ./plugins/woocommerce-docs

View File

@ -1,89 +0,0 @@
name: Run unit tests on PR
on:
pull_request:
paths-ignore:
- '**/changelog/**'
defaults:
run:
shell: bash
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
test:
if: ${{ github.event.pull_request.user.login != 'github-actions[bot]' }}
name: PHP ${{ matrix.php }} WP ${{ matrix.wp }} - ${{ matrix.unittests }} ${{ matrix.hpos && 'HPOS' || '' }}
timeout-minutes: 30
runs-on: ubuntu-20.04
permissions:
contents: read
continue-on-error: ${{ matrix.wp == 'nightly' }}
env:
HPOS: ${{ matrix.hpos }}
strategy:
fail-fast: false
matrix:
php: ['7.4', '8.0']
wp: ['latest']
unittests: ['shard1', 'shard2']
include:
- wp: nightly
php: '7.4'
unittests: 'shard1'
- wp: nightly
php: '7.4'
unittests: 'shard2'
- wp: '6.1'
php: 7.4
unittests: 'shard1'
- wp: '6.1'
php: 7.4
unittests: 'shard2'
- wp: 'latest'
php: '7.4'
hpos: true
unittests: 'shard1'
- wp: 'latest'
php: '7.4'
hpos: true
unittests: 'shard2'
services:
database:
image: mysql:5.6
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
steps:
- uses: actions/checkout@v3
- name: Setup WooCommerce Monorepo
uses: ./.github/actions/setup-woocommerce-monorepo
with:
php-version: ${{ matrix.php }}
- id: parseMatrix
name: Parse Matrix Variables
uses: actions/github-script@v6
with:
script: |
const parseWPVersion = require( './.github/workflows/scripts/parse-wp-version' );
parseWPVersion( '${{ matrix.wp }}' ).then( ( version ) => {
core.setOutput( 'wpVersion', version );
} );
- name: Prepare Testing Environment
env:
WP_ENV_CORE: ${{ steps.parseMatrix.outputs.wpVersion }}
WP_ENV_PHP_VERSION: ${{ matrix.php }}
run: pnpm --filter=woocommerce env:test
- name: Run Tests
env:
WP_ENV_CORE: ${{ steps.parseMatrix.outputs.wpVersion }}
WP_ENV_PHP_VERSION: ${{ matrix.php }}
run: pnpm --filter=woocommerce test:unit:env --testsuite ${{ matrix.unittests }}

View File

@ -1,24 +0,0 @@
name: Prime caches against trunk
on:
push:
branches:
- trunk
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
prime:
name: Prime cache
runs-on: ubuntu-20.04
permissions:
contents: read
steps:
- uses: actions/checkout@v3
- name: Setup WooCommerce Monorepo
uses: ./.github/actions/setup-woocommerce-monorepo

View File

@ -0,0 +1,972 @@
/**
* External dependencies.
*/
const child_process = require( 'child_process' );
const fs = require( 'fs' );
const https = require( 'http' );
/**
* Uses the WordPress API to get the downlod URL to the latest version of an X.X version line. This
* also accepts "latest-X" to get an offset from the latest version of WordPress.
*
* @param {string} wpVersion The version of WordPress to look for.
* @return {Promise.<string>} The precise WP version download URL.
*/
async function getPreciseWPVersionURL( wpVersion ) {
return new Promise( ( resolve, reject ) => {
// We're going to use the WordPress.org API to get information about available versions of WordPress.
const request = https.get(
'http://api.wordpress.org/core/stable-check/1.0/',
( response ) => {
// Listen for the response data.
let responseData = '';
response.on( 'data', ( chunk ) => {
responseData += chunk;
} );
// Once we have the entire response we can process it.
response.on( 'end', () =>
resolve( JSON.parse( responseData ) )
);
}
);
request.on( 'error', ( error ) => {
reject( error );
} );
} ).then( ( allVersions ) => {
// Note: allVersions is an object where the keys are the version and the value is information about the version's status.
// If we're requesting a "latest" offset then we need to figure out what version line we're offsetting from.
const latestSubMatch = wpVersion.match( /^latest(?:-([0-9]+))?$/i );
if ( latestSubMatch ) {
for ( const version in allVersions ) {
if ( allVersions[ version ] !== 'latest' ) {
continue;
}
// We don't care about the patch version because we will
// the latest version from the version line below.
const versionParts = version.match( /^([0-9]+)\.([0-9]+)/ );
// We're going to subtract the offset to figure out the right version.
let offset = parseInt( latestSubMatch[ 1 ] ?? 0, 10 );
let majorVersion = parseInt( versionParts[ 1 ], 10 );
let minorVersion = parseInt( versionParts[ 2 ], 10 );
while ( offset > 0 ) {
minorVersion--;
if ( minorVersion < 0 ) {
majorVersion--;
minorVersion = 9;
}
offset--;
}
// Set the version that we found in the offset.
wpVersion = majorVersion + '.' + minorVersion;
}
}
// Scan through all of the versions to find the latest version in the version line.
let latestVersion = null;
let latestPatch = -1;
for ( const v in allVersions ) {
// Parse the version so we can make sure we're looking for the latest.
const matches = v.match( /([0-9]+)\.([0-9]+)(?:\.([0-9]+))?/ );
// We only care about the correct minor version.
const minor = `${ matches[ 1 ] }.${ matches[ 2 ] }`;
if ( minor !== wpVersion ) {
continue;
}
// Track the latest version in the line.
const patch =
matches[ 3 ] === undefined ? 0 : parseInt( matches[ 3 ], 10 );
if ( patch > latestPatch ) {
latestPatch = patch;
latestVersion = v;
}
}
if ( ! latestVersion ) {
throw new Error(
`Unable to find latest version for version line ${ wpVersion }.`
);
}
return `https://wordpress.org/wordpress-${ latestVersion }.zip`;
} );
}
/**
* Parses a display-friendly WordPress version and returns a link to download the given version.
*
* @param {string} wpVersion A display-friendly WordPress version. Supports ("master", "trunk", "nightly", "latest", "latest-X", "X.X" for version lines, and "X.X.X" for specific versions)
* @return {Promise.<string>} A link to download the given version of WordPress.
*/
async function parseWPVersion( wpVersion ) {
// Allow for download URLs in place of a version.
if ( wpVersion.match( /[a-z]+:\/\//i ) ) {
return wpVersion;
}
// Start with versions we can infer immediately.
switch ( wpVersion ) {
case 'master':
case 'trunk': {
return 'WordPress/WordPress#master';
}
case 'nightly': {
return 'https://wordpress.org/nightly-builds/wordpress-latest.zip';
}
case 'latest': {
return 'https://wordpress.org/latest.zip';
}
}
// We can also infer X.X.X versions immediately.
const parsedVersion = wpVersion.match( /^([0-9]+)\.([0-9]+)\.([0-9]+)$/ );
if ( parsedVersion ) {
// Note that X.X.0 versions use a X.X download URL.
let urlVersion = `${ parsedVersion[ 1 ] }.${ parsedVersion[ 2 ] }`;
if ( parsedVersion[ 3 ] !== '0' ) {
urlVersion += `.${ parsedVersion[ 3 ] }`;
}
return `https://wordpress.org/wordpress-${ urlVersion }.zip`;
}
// Since we haven't found a URL yet we're going to use the WordPress.org API to try and infer one.
return getPreciseWPVersionURL( wpVersion );
}
/**
* Given a path within a project,
*
* @param {string} absolutePath An absolute path to a project or a project file.
* @return {string} The path to the project.
*/
function getProjectPathFromAbsolutePath( absolutePath ) {
const matches = absolutePath.match(
// Note the special handling for `plugins/woocommerce/client/*` packages.
/((?:plugins\/woocommerce\/client\/[a-z0-9\-_.]+|plugins\/|packages\/[a-z0-9\-_.]+\/|tools\/)[a-z0-9\-_.]+)\/?/i
);
if ( ! matches ) {
return null;
}
return matches[ 1 ];
}
/**
* A record for a project and all of the changes that have occurred to it.
*
* @typedef {Object} ProjectChanges
* @property {string} path The path to the project.
* @property {boolean} phpSourceChanges Whether or not the project has changes to PHP source files.
* @property {boolean} jsSourceChanges Whether or not the project has changes to JS source files.
* @property {boolean} assetSourceChanges Whether or not the project has changed to asset source files.
* @property {boolean} documentationChanges Whether or not the project has documentation changes.
* @property {boolean} phpTestChanges Whether or not the project has changes to PHP test files.
* @property {boolean} jsTestChanges Whether or not the project has changes to JS test files.
* @property {boolean} e2eTestChanges Whether or not the project has changes to e2e test files.
*/
/**
* Scans through the files that have been changed since baseRef and returns information about the projects that have
* changes and the kind of changes that have taken place.
*
* @param {string} baseRef The base branch to check for changes against.
* @return {Array.<ProjectChanges>} An array of projects and the kinds of changes that have occurred.
*/
function detectProjectChanges( baseRef ) {
// Using a diff will not only allow us to find the projects that have changed but we can also identify the nature of the change.
const output = child_process.execSync(
`git diff --relative --name-only ${ baseRef }`,
{ encoding: 'utf8' }
);
const changedFilePaths = output.split( '\n' );
// Scan all of the changed files into the projects they belong to.
const projectsWithChanges = {};
for ( const filePath of changedFilePaths ) {
if ( ! filePath ) {
continue;
}
const projectPath = getProjectPathFromAbsolutePath( filePath );
if ( ! projectPath ) {
console.log(
`${ filePath }: ignoring change because it is not part of a project.`
);
continue;
}
if ( ! projectsWithChanges[ projectPath ] ) {
projectsWithChanges[ projectPath ] = [];
}
projectsWithChanges[ projectPath ].push( filePath );
console.log(
`${ filePath }: marked as a change in project "${ projectPath }".`
);
}
// Scan through the projects that have changes and identify the type of changes that have occurred.
const projectChanges = [];
for ( const projectPath in projectsWithChanges ) {
// We are only interested in projects that are part of our workspace.
if ( ! fs.existsSync( `${ projectPath }/package.json` ) ) {
console.error( `${ projectPath }: no "package.json" file found.` );
continue;
}
// Keep track of the kind of changes that have occurred.
let phpTestChanges = false;
let jsTestChanges = false;
let e2eTestChanges = false;
let phpSourceChanges = false;
let jsSourceChanges = false;
let assetSourceChanges = false;
let documentationChanges = false;
// Now we can look through all of the files that have changed and figure out the type of changes that have occurred.
const fileChanges = projectsWithChanges[ projectPath ];
for ( const filePath of fileChanges ) {
// Some types of changes are not interesting and should be ignored completely.
if ( filePath.match( /\/changelog\//i ) ) {
console.log(
`${ projectPath }: ignoring changelog file "${ filePath }".`
);
continue;
}
// As a preface, the detection of changes here is likely not absolutely perfect. We're going to be making some assumptions
// about file extensions and paths in order to decide whether or not something is a type of change. This should still
// be okay though since we have other cases where we check everything without looking at any changes to filter.
// We can identify PHP test files using PSR-4 or WordPress file naming conventions. We also have
// a fallback to any PHP files in a "tests" directory or its children.
// Note: We need to check for this before we check for source files, otherwise we will
// consider test file changes to be PHP source file changes.
if (
filePath.match( /(?:[a-z]+Test|-test|\/tests?\/[^\.]+)\.php$/i )
) {
phpTestChanges = true;
console.log(
`${ projectPath }: detected PHP test file change in "${ filePath }".`
);
continue;
}
// We can identify JS test files using Jest file file naming conventions. We also have
// a fallback to any JS files in a "tests" directory or its children, but we need to
// avoid picking up E2E test files in the process.
// Note: We need to check for this before we check for source files, otherwise we will
// consider test file changes to be JS source file changes.
if (
filePath.match(
/(?:(?<!e2e[^\.]+)\.(?:spec|test)|\/tests?\/(?!e2e)[^\.]+)\.(?:t|j)sx?$/i
)
) {
jsTestChanges = true;
console.log(
`${ projectPath }: detected JS test file change in "${ filePath }".`
);
continue;
}
// We're going to make an assumption about where E2E test files live based on what seems typical.
if ( filePath.match( /\/test?\/e2e/i ) ) {
e2eTestChanges = true;
console.log(
`${ projectPath }: detected E2E test file change in "${ filePath }".`
);
continue;
}
// Generally speaking, PHP files and changes to Composer dependencies affect PHP source code.
if (
filePath.match( /\.(?:php|html)$|composer\.(?:json|lock)$/i )
) {
phpSourceChanges = true;
console.log(
`${ projectPath }: detected PHP source file change in "${ filePath }".`
);
continue;
}
// JS changes should also include JSX and TS files.
if (
filePath.match( /\.(?:(?:t|j)sx?|json|html)$|package\.json$/i )
) {
jsSourceChanges = true;
console.log(
`${ projectPath }: detected JS source file change in "${ filePath }".`
);
continue;
}
// We should also keep an eye on asset file changes since these may affect
// presentation in different tests that have expectations about this data.
if (
filePath.match(
/\.(?:png|jpg|gif|scss|css|ttf|svg|eot|woff|xml|csv|txt|ya?ml)$/i
)
) {
assetSourceChanges = true;
console.log(
`${ projectPath }: detected asset file change in "${ filePath }".`
);
continue;
}
// We can be a strict with documentation changes because they are only ever going to be markdown files.
if ( filePath.match( /\.md$/i ) ) {
documentationChanges = true;
console.log(
`${ projectPath }: detected documentation change in "${ filePath }".`
);
continue;
}
}
// We only want to track a changed project when we have encountered file changes that we care about.
if (
! phpSourceChanges &&
! jsSourceChanges &&
! assetSourceChanges &&
! documentationChanges &&
! phpTestChanges &&
! jsSourceChanges &&
! e2eTestChanges
) {
console.log( `${ projectPath }: no changes detected.` );
continue;
}
// We can use the information we've collected to generate the project change object.
projectChanges.push( {
path: projectPath,
phpSourceChanges,
jsSourceChanges,
assetSourceChanges,
documentationChanges,
phpTestChanges,
jsTestChanges,
e2eTestChanges,
} );
}
return projectChanges;
}
/**
* Check the changes that occurred in each project and add any projects that are affected by those changes.
*
* @param {Array.<ProjectChanges>} projectChanges The project changes to cascade.
* @return {Array.<ProjectChanges>} The project changes with any cascading changes.
*/
function cascadeProjectChanges( projectChanges ) {
const cascadedChanges = {};
// Scan through all of the changes and add any other projects that are affected by the changes.
for ( const changes of projectChanges ) {
// Populate the change object for the project if it doesn't already exist.
// It might exist if the project has been affected by another project.
if ( ! cascadedChanges[ changes.path ] ) {
cascadedChanges[ changes.path ] = changes;
}
// Make sure that we are recording any "true" changes that have occurred either in the project itself or as a result of another project.
for ( const property in changes ) {
// We're going to assume the only properties on this object are "path" and the change flags.
if ( property === 'path' ) {
continue;
}
cascadedChanges[ changes.path ][ property ] =
changes[ property ] ||
cascadedChanges[ changes.path ][ property ];
}
// Use PNPM to get a list of dependent packages that may have been affected.
// Note: This is actually a pretty slow way of doing this. If we find it is
// taking too long we can instead use `--depth="Infinity" --json` and then
// traverse the dependency tree ourselves.
const output = child_process.execSync(
`pnpm list --filter='...{./${ changes.path }}' --only-projects --depth='-1' --parseable`,
{ encoding: 'utf8' }
);
// The `--parseable` flag returns a list of package directories separated by newlines.
const affectedProjects = output.split( '\n' );
// At the VERY least PNPM will return the path to the project if it exists. The only way
// this will happen is if the project isn't part of the workspace and we can ignore it.
// We expect this to happen and thus haven't use the caret in the filter above.
if ( ! affectedProjects ) {
continue;
}
// Run through and decide whether or not the project has been affected by the changes.
for ( const affected of affectedProjects ) {
const affectedProjectPath =
getProjectPathFromAbsolutePath( affected );
if ( ! affectedProjectPath ) {
continue;
}
// Skip the project we're checking against since it'll be in the results.
if ( affectedProjectPath === changes.path ) {
continue;
}
// Only changes to source files will impact other projects.
if (
! changes.phpSourceChanges &&
! changes.jsSourceChanges &&
! changes.assetSourceChanges
) {
continue;
}
console.log(
`${ changes.path }: cascading source file changes to ${ affectedProjectPath }.`
);
// Populate the change object for the affected project if it doesn't already exist.
if ( ! cascadedChanges[ affectedProjectPath ] ) {
cascadedChanges[ affectedProjectPath ] = {
path: affectedProjectPath,
phpSourceChanges: false,
jsSourceChanges: false,
assetSourceChanges: false,
documentationChanges: false,
phpTestChanges: false,
jsTestChanges: false,
e2eTestChanges: false,
};
}
// Consider the source files to have changed in the affected project because they are dependent on the source files in the changed project.
if ( changes.phpSourceChanges ) {
cascadedChanges[ affectedProjectPath ].phpSourceChanges = true;
}
if ( changes.jsSourceChanges ) {
cascadedChanges[ affectedProjectPath ].jsSourceChanges = true;
}
if ( changes.assetSourceChanges ) {
cascadedChanges[
affectedProjectPath
].assetSourceChanges = true;
}
}
}
return Object.values( cascadedChanges );
}
/**
* The valid commands that we can execute.
*
* @typedef {string} CommandType
* @enum {CommandType}
*/
const COMMAND_TYPE = {
Lint: 'lint',
TestPHP: 'test:php',
TestJS: 'test:js',
E2E: 'e2e',
};
/**
* Checks a command to see whether or not it is valid.
*
* @param {CommandType} command The command to check.
* @return {boolean} Whether or not the command is valid.T
*/
function isValidCommand( command ) {
for ( const commandType in COMMAND_TYPE ) {
if ( COMMAND_TYPE[ commandType ] === command ) {
return true;
}
}
return false;
}
/**
* Indicates whether or not the command is a test command.
*
* @param {CommandType} command The command to check.
* @return {boolean} Whether or not the command is a test command.
*/
function isTestCommand( command ) {
return (
command === COMMAND_TYPE.TestPHP ||
command === COMMAND_TYPE.TestJS ||
command === COMMAND_TYPE.E2E
);
}
/**
* Details about a task that should be run for a project.
*
* @typedef {Object} ProjectTask
* @property {string} name The name of the task.
* @property {Array.<CommandType>} commandsToRun The commands that the project should run.
* @property {Object.<string,string>} customCommands Any commands that should be run in place of the default commands.
* @property {string|null} testEnvCommand The command that should be run to start the test environment if one is needed.
* @property {Object.<string,string>} testEnvConfig Any configuration for the test environment if one is needed.
*/
/**
* Parses the task configuration from the package.json file and returns a task object.
*
* @param {Object} packageFile The package file for the project.
* @param {Object} config The taw task configuration.
* @param {Array.<CommandType>} commandsForChanges The commands that we should run for the project.
* @param {ProjectTask|null} parentTask The task that this task is a child of.
* @return {ProjectTask|null} The parsed task.
*/
function parseTaskConfig(
packageFile,
config,
commandsForChanges,
parentTask
) {
// Child tasks are required to have a name because otherwise
// every task for a project would be named "default".
let taskName = 'default';
if ( parentTask ) {
taskName = config.name;
if ( ! taskName ) {
throw new Error( `${ packageFile.name }: missing name for task.` );
}
}
// When the config object declares a command filter we should remove any
// of the commands it contains from the list of commands to run.
if ( config?.commandFilter ) {
// Check for invalid commands being used since they won't do anything.
for ( const command of config.commandFilter ) {
if ( ! isValidCommand( command ) ) {
throw new Error(
`${ packageFile.name }: invalid command filter type of "${ command }" for task "${ taskName }".`
);
}
}
// Apply the command filter.
commandsForChanges = commandsForChanges.filter( ( command ) =>
config.commandFilter.includes( command )
);
}
// Custom commands developers to support a command without having to use the
// standardized script name for it. For ease of use we will add parent task
// custom commands to children and allow the children to override any
// specific tasks they want.
const customCommands = Object.assign(
{},
parentTask?.customCommands ?? {}
);
if ( config?.customCommands ) {
for ( const customCommandType in config.customCommands ) {
// Check for invalid commands being mapped since they won't do anything.
if ( ! isValidCommand( customCommandType ) ) {
throw new Error(
`${ packageFile.name }: invalid custom command type "${ customCommandType } for task "${ taskName }".`
);
}
// Custom commands may have tokens that we need to remove in order to check them for existence.
const split =
config.customCommands[ customCommandType ].split( ' ' );
const customCommand = split[ 0 ];
if ( ! packageFile.scripts?.[ customCommand ] ) {
throw new Error(
`${ packageFile.name }: unknown custom "${ customCommandType }" command "${ customCommand }" for task "${ taskName }".`
);
}
// We only need to bother with commands we can actually run.
if ( commandsForChanges.includes( customCommandType ) ) {
customCommands[ customCommandType ] =
config.customCommands[ customCommandType ];
}
}
}
// Our goal is to run only the commands that have changes, however, not all
// projects will have scripts for all of the commands we want to run.
const commandsToRun = [];
for ( const command of commandsForChanges ) {
// We have already filtered and confirmed custom commands.
if ( customCommands[ command ] ) {
commandsToRun.push( command );
continue;
}
// Commands that don't have a script to run should be ignored.
if ( ! packageFile.scripts?.[ command ] ) {
continue;
}
commandsToRun.push( command );
}
// We don't want to create a task if there aren't any commands to run.
if ( ! commandsToRun.length ) {
return null;
}
// The test environment command only needs to be set when a test environment is needed.
let testEnvCommand = null;
if ( commandsToRun.some( ( command ) => isTestCommand( command ) ) ) {
if ( config?.testEnvCommand ) {
// Make sure that a developer hasn't put in a test command that doesn't exist.
if ( ! packageFile.scripts?.[ config.testEnvCommand ] ) {
throw new Error(
`${ packageFile.name }: unknown test environment command "${ config.testEnvCommand }" for task "${ taskName }".`
);
}
testEnvCommand =
config?.testEnvCommand ?? parentTask?.testEnvCommand;
} else if ( packageFile.scripts?.[ 'test:env:start' ] ) {
testEnvCommand = 'test:env:start';
}
}
// The test environment configuration should also cascade from parent task to child task.
const testEnvConfig = Object.assign(
{},
parentTask?.testEnvConfig ?? {},
config?.testEnvConfig ?? {}
);
return {
name: taskName,
commandsToRun,
customCommands,
testEnvCommand,
testEnvConfig,
};
}
/**
* Details about a project and the tasks that should be run for it.
*
* @typedef {Object} ProjectTasks
* @property {string} name The name of the project.
* @property {Array.<ProjectTask>} tasks The tasks that should be run for the project.
*/
/**
* Evaluates the given changes against the possible commands and returns those that should run as
* a result of the change criteria being met.
*
* @param {ProjectChanges|null} changes Any changes that have occurred to the project.
* @return {Array.<string>} The commands that can be run for the project.
*/
function getCommandsForChanges( changes ) {
// Here are all of the commands that we support and the change criteria that they require to execute.
// We treat the command's criteria as passing if any of the properties are true.
const commandCriteria = {
[ COMMAND_TYPE.Lint ]: [
'phpSourceChanges',
'jsSourceChanges',
'assetSourceChanges',
'phpTestChanges',
'jsTestChanges',
],
[ COMMAND_TYPE.TestPHP ]: [ 'phpSourceChanges', 'phpTestChanges' ],
[ COMMAND_TYPE.TestJS ]: [ 'jsSourceChanges', 'jsTestChanges' ],
//[ COMMAND_TYPE.E2E ]: [ 'phpSourceChanges', 'jsSourceChanges', 'assetSourceChanges', 'e2eTestFileChanges' ],
};
// We only want the list of possible commands to contain those that
// the project actually has and meet the criteria for execution.
const commandsForChanges = [];
for ( const command in commandCriteria ) {
// The criteria only needs to be checked if there is a change object to evaluate.
if ( changes ) {
let commandCriteriaMet = false;
for ( const criteria of commandCriteria[ command ] ) {
// Confidence check to make sure the criteria wasn't misspelled.
if ( ! changes.hasOwnProperty( criteria ) ) {
throw new Error(
`Invalid criteria "${ criteria }" for command "${ command }".`
);
}
if ( changes[ criteria ] ) {
commandCriteriaMet = true;
break;
}
}
// As long as we meet one of the criteria requirements we can add the command.
if ( ! commandCriteriaMet ) {
continue;
}
}
commandsForChanges.push( command );
console.log( `${ changes.path }: command "${ command }" added based on given changes.` );
}
return commandsForChanges;
}
/**
* Builds a task object for the project with support for limiting the tasks to only those that have changed.
*
* @param {string} projectPath The path to the project.
* @param {ProjectChanges|null} changes Any changes that have occurred to the project.
* @return {ProjectTasks|null} The tasks that should be run for the project.
*/
function buildTasksForProject( projectPath, changes ) {
// There's nothing to do if the project has no tasks.
const commandsForChanges = getCommandsForChanges( changes );
if ( ! commandsForChanges.length ) {
return null;
}
// Load the package file so we can check for task existence before adding them.
const rawPackageFile = fs.readFileSync(
`${ projectPath }/package.json`,
'utf8'
);
const packageFile = JSON.parse( rawPackageFile );
// We're going to parse each of the projects and add them to the list of tasks if necessary.
const projectTasks = [];
// Parse the task configuration from the package.json file.
const parentTask = parseTaskConfig(
packageFile,
packageFile.config?.ci,
commandsForChanges,
null
);
if ( parentTask ) {
projectTasks.push( parentTask );
}
if ( packageFile.config?.ci?.additionalTasks ) {
for ( const additionalTask of packageFile.config.ci.additionalTasks ) {
const task = parseTaskConfig(
packageFile,
additionalTask,
commandsForChanges,
parentTask
);
if ( task ) {
projectTasks.push( task );
}
}
}
if ( ! projectTasks.length ) {
return null;
}
return {
name: packageFile.name,
tasks: projectTasks,
};
}
/**
* This function takes a list of project changes and generates a list of tasks that should be run for each project.
*
* @param {Array.<ProjectChanges>} projectChanges The project changes to generate tasks for.
* @return {Array.<ProjectTasks>} All of the projects and the tasks that they should undertake.
*/
function generateProjectTasksForChanges( projectChanges ) {
const projectTasks = [];
// Scan through all of the changes and generate task objects for them.
for ( const changes of projectChanges ) {
const tasks = buildTasksForProject( changes.path, changes );
if ( tasks ) {
projectTasks.push( tasks );
}
}
return projectTasks;
}
/**
* Generates a list of tasks that should be run for each project in the workspace.
*
* @return {Array.<ProjectTasks>} All of the projects and the tasks that they should undertake.
*/
function generateProjectTasksForWorkspace() {
// We can use PNPM to quickly get a list of every project in the workspace.
const output = child_process.execSync(
"pnpm list --filter='*' --only-projects --depth='-1' --parseable",
{ encoding: 'utf8' }
);
// The `--parseable` flag returns a list of package directories separated by newlines.
const workspaceProjects = output.split( '\n' );
const projectTasks = [];
for ( const project of workspaceProjects ) {
const projectPath = getProjectPathFromAbsolutePath( project );
if ( ! projectPath ) {
continue;
}
const tasks = buildTasksForProject( projectPath, null );
if ( tasks ) {
projectTasks.push( tasks );
}
}
return projectTasks;
}
/**
* A CI matrix for the GitHub workflow.
*
* @typedef {Object} CIMatrix
* @property {string} projectName The name of the project.
* @property {string} taskName The name of the task.
* @property {Object.<string,string>} testEnvVars The environment variables for the test environment.
* @property {string|null} lintCommand The command to run if linting is necessary.
* @property {string|null} phpTestCommand The command to run if PHP tests are necessary.
* @property {string|null} jsTestCommand The command to run if JS tests are necessary.
* @property {string|null} e2eCommand The command to run if E2E is necessary.
*/
/**
* Parses the test environment's configuration and returns any environment variables that
* should be set.
*
* @param {Object} testEnvConfig The test environment configuration.
* @return {Promise.<Object>} The environment variables for the test environment.
*/
async function parseTestEnvConfig( testEnvConfig ) {
const envVars = {};
// Convert `wp-env` configuration options to environment variables.
if ( testEnvConfig.wpVersion ) {
try {
envVars.WP_ENV_CORE = await parseWPVersion(
testEnvConfig.wpVersion
);
} catch ( error ) {
throw new Error(
`Failed to parse WP version: ${ error.message }.`
);
}
}
if ( testEnvConfig.phpVersion ) {
envVars.WP_ENV_PHP_VERSION = testEnvConfig.phpVersion;
}
return envVars;
}
/**
* Generates a command for the task that can be executed in the CI matrix. This will check the task
* for the command, apply any command override, and replace any valid tokens with their values.
*
* @param {ProjectTask} task The task to get the command for.
* @param {CommandType} command The command to run.
* @param {Object.<string,string>} tokenValues Any tokens that should be replaced and their associated values.
* @return {string|null} The command that should be run for the task or null if the command should not be run.
*/
function getCommandForMatrix( task, command, tokenValues ) {
if ( ! task.commandsToRun.includes( command ) ) {
return null;
}
// Support overriding the default command with a custom one.
command = task.customCommands[ command ] ?? command;
// Replace any of the tokens that are used in commands with their values if one exists.
let matrixCommand = command;
const matches = command.matchAll( /\${([a-z0-9_\-]+)}/gi );
if ( matches ) {
for ( const match of matches ) {
if ( ! tokenValues.hasOwnProperty( match[ 1 ] ) ) {
throw new Error(
`Command "${ command }" contains unknown token "${ match[ 1 ] }".`
);
}
matrixCommand = matrixCommand.replace(
match[ 0 ],
tokenValues[ match[ 1 ] ]
);
}
}
return matrixCommand;
}
/**
* Generates a matrix for the CI GitHub Workflow.
*
* @param {string} baseRef The base branch to check for changes against. If empty we check for everything.
* @return {Promise.<Array.<CIMatrix>>} The CI matrix to be used in the CI workflows.
*/
async function buildCIMatrix( baseRef ) {
const matrix = [];
// Build the project tasks based on the branch we are comparing against.
let projectTasks = [];
if ( baseRef ) {
const projectChanges = detectProjectChanges( baseRef );
const cascadedProjectChanges = cascadeProjectChanges( projectChanges );
projectTasks = generateProjectTasksForChanges( cascadedProjectChanges );
} else {
projectTasks = generateProjectTasksForWorkspace();
}
// Prepare the tokens that are able to be replaced in commands.
const commandTokens = {
baseRef: baseRef ?? '',
};
// Parse the tasks and generate matrix entries for each of them.
for ( const project of projectTasks ) {
for ( const task of project.tasks ) {
matrix.push( {
projectName: project.name,
taskName: task.name,
testEnvCommand: task.testEnvCommand,
testEnvVars: await parseTestEnvConfig( task.testEnvConfig ),
lintCommand: getCommandForMatrix(
task,
COMMAND_TYPE.Lint,
commandTokens
),
phpTestCommand: getCommandForMatrix(
task,
COMMAND_TYPE.TestPHP,
commandTokens
),
jsTestCommand: getCommandForMatrix(
task,
COMMAND_TYPE.TestJS,
commandTokens
),
e2eCommand: getCommandForMatrix(
task,
COMMAND_TYPE.E2E,
commandTokens
),
} );
}
}
return matrix;
}
module.exports = buildCIMatrix;

View File

@ -1,89 +0,0 @@
const https = require( 'http' );
/**
For convenience, this method will convert between a display-friendly version format and one used
internally by wp-env. We lean towards using WordPress.org ZIPs which requires us to reference
the full URL to the archive. For instance, instead of needing the action to fully define the
URL to the nightly build we can pass "nightly" to this function and retrieve it.
@param {string} wpVersion The display-friendly version. Supports ("master", "trunk", "nightly",
"latest", "X.X" for version lines, and "X.X.X" for specific versions)
@return {Promise.<string>} The wp-env "core" property".
**/
module.exports = async function parseWPVersion( wpVersion ) {
// Start with versions we can infer immediately.
switch ( wpVersion ) {
case 'master':
case 'trunk': {
return 'WordPress/WordPress#master';
}
case 'nightly': {
return 'https://wordpress.org/nightly-builds/wordpress-latest.zip';
}
case 'latest': {
return 'https://wordpress.org/latest.zip';
}
}
return new Promise( ( resolve, reject ) => {
// We're going to download the correct zip archive based on the version they're requesting.
const parsedVersion = wpVersion.match( /([0-9]+)\.([0-9]+)(?:\.([0-9]+))?/ );
if ( ! parsedVersion ) {
throw new Error( `Invalid 'wp-version': ${ wpVersion } must be 'trunk', 'nightly', 'latest', 'X.X', or 'X.X.X'.` );
}
// When they've provided a specific version we can just provide that.
if ( parsedVersion[ 3 ] !== undefined ) {
let zipVersion = `${ parsedVersion[ 1 ] }.${ parsedVersion[ 2 ] }`;
// .0 versions do not have a patch.
if ( parsedVersion[ 3 ] !== '0' ) {
zipVersion += `.${ parsedVersion[ 3 ] }`;
}
resolve( `https://wordpress.org/wordpress-${ zipVersion }.zip` );
}
const request = https.get(
'http://api.wordpress.org/core/stable-check/1.0/',
( response ) => {
// Listen for the response data.
let data = '';
response.on('data', (chunk) => {
data += chunk;
});
// Once we have the entire response we can process it.
response.on('end', () => {
// Parse the response and find the latest version of every minor release.
const latestVersions = {};
const rawVersions = JSON.parse( data );
for ( const v in rawVersions ) {
// Parse the version so we can find the latest.
const matches = v.match( /([0-9]+)\.([0-9]+)(?:\.([0-9]+))?/ );
const minor = `${ matches[1] }.${ matches[2] }`;
const patch = matches[ 3 ] === undefined ? 0 : parseInt( matches[ 3 ] );
// We will only be keeping the latest release of each minor.
if ( latestVersions[ minor ] === undefined || patch > latestVersions[ minor ] ) {
latestVersions[ minor ] = patch;
}
}
let zipVersion = `${ parsedVersion[ 1 ] }.${ parsedVersion[ 2 ] }`;
// .0 versions do not have a patch.
if ( latestVersions[ zipVersion ] !== 0 ) {
zipVersion += `.${ latestVersions[ zipVersion ]}`;
}
resolve( `https://wordpress.org/wordpress-${ zipVersion }.zip` );
});
},
);
request.on( 'error', ( error ) => {
reject( error );
} );
} );
}

View File

@ -1,5 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Lint branch
#
# Runs phpcs-changed, comparing the current branch to its "base" or "parent" branch.
@ -14,4 +13,9 @@ baseBranch=${1:-"trunk"}
changedFiles=$(git diff $(git merge-base HEAD $baseBranch) --relative --name-only -- '*.php')
# Only complete this if changed files are detected.
[[ -z $changedFiles ]] || composer exec phpcs-changed -- -s --git --git-base $baseBranch $changedFiles
if [[ -z $changedFiles ]]; then
echo "No changed files detected."
exit 0
fi
composer exec phpcs-changed -- -s --git --git-base $baseBranch $changedFiles

View File

@ -0,0 +1,4 @@
Significance: patch
Type: dev
Comment: Just making some changes to support the new CI workflow.

View File

@ -97,7 +97,7 @@
"chg=$(git diff HEAD --relative --name-only -- '*.php'); [[ -z $chg ]] || phpcs-changed -s --git $chg"
],
"lint-branch": [
"sh ./bin/lint-branch.sh"
"bash ./bin/lint-branch.sh"
],
"phpcbf": [
"phpcbf -p"

View File

@ -10,7 +10,34 @@
"license": "GPL-3.0+",
"config": {
"wp_org_slug": "woocommerce",
"build_step": "pnpm run build:zip"
"build_step": "pnpm run build:zip",
"ci": {
"name": "WP: latest",
"customCommands": {
"lint": "lint:php:changes:branch ${baseRef}",
"test:php": "test:php:env"
},
"testEnvCommand": "env:test",
"testEnvConfig": {
"wpVersion": "latest"
},
"additionalTasks": [
{
"name": "WP: latest-1",
"commandFilter": [ "test:php" ],
"testEnvConfig": {
"wpVersion": "latest-1"
}
},
{
"name": "WP: latest-2",
"commandFilter": [ "test:php" ],
"testEnvConfig": {
"wpVersion": "latest-2"
}
}
]
}
},
"scripts": {
"e2e": "pnpm exec wc-e2e test:e2e",
@ -19,8 +46,7 @@
"postinstall": "composer install",
"changelog": "composer exec -- changelogger",
"build": "pnpm -w exec turbo run turbo:build --filter=$npm_package_name",
"test": "pnpm test:js",
"test:js": "pnpm -w exec turbo run turbo:test --filter=$npm_package_name",
"test": "pnpm -w exec turbo run turbo:test --filter=$npm_package_name",
"lint": "pnpm lint:php",
"build:feature-config": "php bin/generate-feature-config.php",
"build:zip": "./bin/build-zip.sh",