Clean up unused files and dependencies from when WooCommerce Blocks was merged into the monorepo (#49319)

This commit is contained in:
Sam Seay 2024-07-12 12:40:07 +08:00 committed by GitHub
parent 35a0ee2e8a
commit e59decd222
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
77 changed files with 1075 additions and 3125 deletions

View File

@ -1,15 +0,0 @@
# Contributing
Thanks for your interest in contributing to WooCommerce Blocks!
If you wish to contribute code, to get started we recommend first reading our [Getting Started Guide](../docs/contributors/getting-started.md).
All other documentation for contributors can be found [in the docs directory](../docs/README.md).
## Guidelines
Like the WooCommerce project, we want to ensure a welcoming environment for everyone. With that in mind, all contributors are expected to follow our [Code of Conduct](./CODE_OF_CONDUCT.md).
## Reporting Security Issues
Please see [SECURITY.md](./SECURITY.md).

View File

@ -1,50 +0,0 @@
# comments-aggregator
> This GitHub Action helps you keep the PR page clean by merging comments/reports by multiple workflows into a single comment.
![screenshot](./screenshot.png)
## Usage
This action is meant to be used as the poster/commenter. Instead of having existing actions post the comment by themselves, set those comments as the action output, then feed that output to `comments-aggregator` to let this action manage those comments for you.
```yml
- name: Compare Assets
uses: ./.github/compare-assets
id: compare-assets
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
compare: assets-list/assets.json
create-comment: false
- name: Append report
uses: ./.github/comments-aggregator
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
section-id: compare-assets-with-trunk
content: ${{steps.compare-assets.outputs.comment}}
```
## Inputs
- **`repo-token`** (required): This is the GitHub token. This is required to manipulate PR comments.
- **`section-id`** (required): The unique ID that helps this action to update the correct part of the aggregated comment.
- **`content`** (option): The comment content. Default to empty. If nothing was provided, this action will stop gracefully.
- **`order`** (optional): The order of the comment part inside the aggregated comment. Default to 10.
## More examples
### Message contains GitHub Event properties
```yml
- name: Add release ZIP URL as comment to the PR
uses: ./.github/comments-aggregator
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
section-id: release-zip-url
order: 1
content: |
The release ZIP for this PR is accessible via:
```
https://wcblocks.wpcomstaging.com/wp-content/uploads/woocommerce-gutenberg-products-block-${{ github.event.pull_request.number }}.zip
```
```

View File

@ -1,19 +0,0 @@
name: 'Comments Aggregator'
description: 'Merge bot comments into one comment to keep PR page clean'
inputs:
repo-token:
description: 'GitHub token'
required: true
section-id:
description: 'Comment section ID for the action to know which part to update'
required: true
content:
description: 'Comment content'
default: ''
order:
description: 'Order of the comment'
required: false
default: 10
runs:
using: 'node16'
main: 'index.js'

View File

@ -1,84 +0,0 @@
/**
* External dependencies
*/
const { getOctokit, context } = require( '@actions/github' );
const { setFailed, getInput } = require( '@actions/core' );
/**
* Internal dependencies
*/
const { updateComment, isMergedComment } = require( './utils' );
const runner = async () => {
try {
const token = getInput( 'repo-token', { required: true } );
const octokit = getOctokit( token );
const payload = context.payload;
const repo = payload.repository.name;
const owner = payload.repository.owner.login;
// Only run this action on pull requests.
if ( ! payload.pull_request?.number ) {
return;
}
const sectionId = getInput( 'section-id', {
required: true,
} );
const content = getInput( 'content' );
const order = getInput( 'order' );
if ( ! sectionId || ! content ) {
return;
}
let commentId, commentBody;
const currentComments = await octokit.rest.issues.listComments( {
owner,
repo,
issue_number: payload.pull_request.number,
} );
if (
Array.isArray( currentComments.data ) &&
currentComments.data.length > 0
) {
const comment = currentComments.data.find( ( comment ) =>
isMergedComment( comment )
);
if ( comment ) {
commentId = comment.id;
commentBody = comment.body;
}
}
commentBody = updateComment( commentBody, {
sectionId,
content,
order,
} );
if ( commentId ) {
await octokit.rest.issues.updateComment( {
owner,
repo,
comment_id: commentId,
body: commentBody,
} );
} else {
await octokit.rest.issues.createComment( {
owner,
repo,
issue_number: payload.pull_request.number,
body: commentBody,
} );
}
} catch ( error ) {
setFailed( error.message );
}
};
runner();

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

View File

@ -1,89 +0,0 @@
const identifier = `<!-- comments-aggregator -->`;
const separator = '<!-- separator -->';
const footerText =
'[comments-aggregator](https://github.com/woocommerce/woocommerce-blocks/tree/trunk/.github/comments-aggregator)';
const footer = `\n> <sub>${ footerText }</sub>\n${ identifier }`;
function getSectionId( section ) {
const match = section.match( /-- section-id: ([^\s]+) --/ );
return match ? match[ 1 ] : null;
}
function getSectionOrder( section ) {
const match = section.match( /-- section-order: ([^\s]+) --/ );
return match ? match[ 1 ] : null;
}
function parseComment( comment ) {
if ( ! comment ) {
return [];
}
const sections = comment.split( separator );
return sections
.map( ( section ) => {
const sectionId = getSectionId( section );
const order = getSectionOrder( section );
/**
* This also remove the footer as it doesn't have a section id. This
* is intentional as we want the footer to always be the last
* section.
*/
if ( ! sectionId ) {
return null;
}
return {
id: sectionId,
order: parseInt( order, 10 ),
content: section.trim(),
};
} )
.filter( Boolean );
}
function updateSection( sections, data ) {
const { sectionId, content, order } = data;
const index = sections.findIndex( ( section ) => section.id === sectionId );
const formattedContent = `<!-- section-id: ${ sectionId } -->\n\n<!-- section-order: ${ order } -->\n\n${ content }`;
if ( index === -1 ) {
sections.push( {
id: sectionId,
content: formattedContent,
} );
} else {
sections[ index ].content = formattedContent;
}
return sections;
}
function appendFooter( sections ) {
return sections.concat( {
id: 'footer',
content: footer,
} );
}
function sortSections( sections ) {
return sections.sort( ( a, b ) => a.order - b.order );
}
function combineSections( sections ) {
return sections
.map( ( section ) => section.content )
.join( `\n\n${ separator }\n\n` );
}
exports.updateComment = function ( comment, data ) {
let sections = parseComment( comment );
sections = updateSection( sections, data );
sections = sortSections( sections );
sections = appendFooter( sections );
return combineSections( sections );
};
exports.isMergedComment = function ( comment ) {
return (
comment.body.includes( identifier ) &&
comment.user.login === 'github-actions[bot]'
);
};

View File

@ -1,19 +0,0 @@
name: 'Compare Assets'
description: 'Compares two assets files created by DependencyExtractionWebpackPlugin and reports the differences.'
inputs:
repo-token:
description: 'GitHub token'
required: true
compare:
description: 'Path to assets file to compare the build assets with.'
required: true
create-comment:
description: 'Create a dedicate comment for this report?'
required: false
default: true
outputs:
comment:
description: 'Markdown comment'
runs:
using: 'node16'
main: 'index.js'

View File

@ -1,158 +0,0 @@
/**
* External dependencies
*/
const { getOctokit, context } = require( '@actions/github' );
const { setFailed, getInput, setOutput } = require( '@actions/core' );
const runner = async () => {
try {
const token = getInput( 'repo-token', { required: true } );
const octokit = getOctokit( token );
const payload = context.payload;
const repo = payload.repository.name;
const owner = payload.repository.owner.login;
const oldAssets = require( '../../' +
getInput( 'compare', {
required: true,
} ) );
if ( ! oldAssets ) {
return;
}
const newAssets = require( '../../../woocommerce/assets/client/blocks/assets.json' );
if ( ! newAssets ) {
return;
}
const createComment = getInput( 'create-comment' );
const changes = Object.fromEntries(
Object.entries( newAssets )
.map( ( [ key, { dependencies = [] } ] ) => {
const oldDependencies =
oldAssets[ key ]?.dependencies || [];
const added = dependencies.filter(
( dependency ) =>
! oldDependencies.includes( dependency )
);
const removed = oldDependencies.filter(
( dependency ) => ! dependencies.includes( dependency )
);
return added.length || removed.length
? [
key,
{
added,
removed,
},
]
: null;
} )
.filter( Boolean )
);
let reportCommentId;
{
const currentComments = await octokit.rest.issues.listComments( {
owner,
repo,
issue_number: payload.pull_request.number,
} );
if (
Array.isArray( currentComments.data ) &&
currentComments.data.length > 0
) {
const comment = currentComments.data.find(
( comment ) =>
comment.body.includes( 'Script Dependencies Report' ) &&
comment.user.login === 'github-actions[bot]'
);
if ( comment ) {
reportCommentId = comment.id;
}
}
}
let commentBody = '';
if ( Object.keys( changes ).length > 0 ) {
let reportContent = '';
Object.entries( changes ).forEach(
( [ handle, { added, removed } ] ) => {
const addedDeps = added.length
? '`' + added.join( '`, `' ) + '`'
: '';
const removedDeps = removed.length
? '`' + removed.join( '`, `' ) + '`'
: '';
let icon = '';
if ( added.length && removed.length ) {
icon = '❓';
} else if ( added.length ) {
icon = '⚠️';
} else if ( removed.length ) {
icon = '🎉';
}
reportContent +=
`| \`${ handle }\` | ${ addedDeps } | ${ removedDeps } | ${ icon } |` +
'\n';
}
);
commentBody =
'## Script Dependencies Report' +
'\n\n' +
'The `compare-assets` action has detected some changed script dependencies between this branch and ' +
'trunk. Please review and confirm the following are correct before merging.' +
'\n\n' +
'| Script Handle | Added | Removed | |' +
'\n' +
'| ------------- | ------| ------- | -- |' +
'\n' +
reportContent +
'\n\n' +
'__This comment was automatically generated by the `./github/compare-assets` action.__';
} else {
commentBody =
'## Script Dependencies Report' +
'\n\n' +
'There is no changed script dependency between this branch and trunk.' +
'\n\n' +
'__This comment was automatically generated by the `./github/compare-assets` action.__';
}
if ( createComment !== 'true' ) {
setOutput( 'comment', commentBody );
return;
}
if ( reportCommentId ) {
await octokit.rest.issues.updateComment( {
owner,
repo,
comment_id: reportCommentId,
body: commentBody,
} );
} else {
await octokit.rest.issues.createComment( {
owner,
repo,
issue_number: payload.pull_request.number,
body: commentBody,
} );
}
} catch ( error ) {
setFailed( error.message );
}
};
runner();

View File

@ -1,3 +0,0 @@
todo:
blobLines: 10
label: false

View File

@ -1,22 +0,0 @@
name: 'Typescript Monitor'
description: 'Check TypesScript errors'
inputs:
repo-token:
description: 'GitHub token'
required: true
checkstyle:
description: 'Path checkstyle.xml file of current PR/branch'
required: true
checkstyle-trunk:
description: 'Path checkstyle.xml file of trunk'
required: true
create-comment:
description: 'Create a dedicate comment for this report?'
required: false
default: true
outputs:
comment:
description: 'Markdown comment'
runs:
using: 'node16'
main: 'index.js'

View File

@ -1,73 +0,0 @@
const fs = require( 'fs' );
const { getOctokit, context } = require( '@actions/github' );
const { getInput, setOutput } = require( '@actions/core' );
const { parseXml, getFilesWithNewErrors } = require( './utils/xml' );
const { generateMarkdownMessage } = require( './utils/markdown' );
const { addComment } = require( './utils/github' );
const runner = async () => {
const token = getInput( 'repo-token', { required: true } );
const octokit = getOctokit( token );
const payload = context.payload;
const repo = payload.repository.name;
const owner = payload.repository.owner.login;
const fileName = getInput( 'checkstyle', {
required: true,
} );
const trunkFileName = getInput( 'checkstyle-trunk', {
required: true,
} );
const createComment = getInput( 'create-comment' );
const newCheckStyleFile = fs.readFileSync( fileName );
const newCheckStyleFileParsed = parseXml( newCheckStyleFile );
const currentCheckStyleFile = fs.readFileSync( trunkFileName );
const currentCheckStyleFileContentParsed = parseXml(
currentCheckStyleFile
);
const { header } = generateMarkdownMessage( newCheckStyleFileParsed );
const filesWithNewErrors = getFilesWithNewErrors(
newCheckStyleFileParsed,
currentCheckStyleFileContentParsed
);
const message =
header +
'\n' +
( filesWithNewErrors.length > 0
? `⚠️ ⚠️ This PR introduces new TS errors on ${ filesWithNewErrors.length } files: \n` +
'<details> \n' +
filesWithNewErrors.join( '\n\n' ) +
'\n' +
'</details>'
: '🎉 🎉 This PR does not introduce new TS errors.' );
if ( process.env[ 'CURRENT_BRANCH' ] !== 'trunk' ) {
if ( createComment !== 'true' ) {
setOutput( 'comment', message );
} else {
await addComment( {
octokit,
owner,
repo,
message,
payload,
} );
}
}
/**
* @todo: Airtable integration is failing auth, so we're disabling it for now.
* Issue opened: https://github.com/woocommerce/woocommerce-blocks/issues/8961
*/
// if ( process.env[ 'CURRENT_BRANCH' ] === 'trunk' ) {
// try {
// await addRecord( currentCheckStyleFileContentParsed.totalErrors );
// } catch ( error ) {
// setFailed( error );
// }
// }
};
runner();

View File

@ -1,38 +0,0 @@
const axios = require( 'axios' ).default;
const BASE_URL = 'https://api.airtable.com/v0';
const TABLE_ID = 'appIIlxUVxOks06sZ';
const API_KEY = process.env[ 'AIRTABLE_API_KEY' ];
const TABLE_NAME = 'TypeScript Migration';
const TYPESCRIPT_ERRORS_COLUMN_NAME = 'TypeScript Errors';
const DATE_COLUMN_NAME = 'Date';
// https://community.airtable.com/t/datetime-date-field-woes/32121
const generateDateValueForAirtable = () => {
const today = new Date();
const string = today.toLocaleDateString();
return new Date( string );
};
exports.addRecord = async ( errorsNumber ) =>
axios.post(
`${ BASE_URL }/${ TABLE_ID }/${ TABLE_NAME }`,
{
records: [
{
fields: {
[ TYPESCRIPT_ERRORS_COLUMN_NAME ]: errorsNumber,
[ DATE_COLUMN_NAME ]: generateDateValueForAirtable(),
},
},
],
typecast: true,
},
{
headers: {
Authorization: `Bearer ${ API_KEY }`,
},
}
);

View File

@ -1,44 +0,0 @@
const getReportCommentId = async ( { octokit, owner, repo, payload } ) => {
const currentComments = await octokit.rest.issues.listComments( {
owner,
repo,
issue_number: payload.pull_request.number,
} );
if (
Array.isArray( currentComments.data ) &&
currentComments.data.length > 0
) {
const comment = currentComments.data.find(
( comment ) =>
comment.body.includes( 'TypeScript Errors Report' ) &&
comment.user.login === 'github-actions[bot]'
);
return comment?.id;
}
};
exports.addComment = async ( { octokit, owner, repo, message, payload } ) => {
const commentId = await getReportCommentId( {
octokit,
owner,
repo,
payload,
} );
if ( commentId ) {
return await octokit.rest.issues.updateComment( {
owner,
repo,
comment_id: commentId,
body: message,
} );
}
await octokit.rest.issues.createComment( {
owner,
repo,
issue_number: payload.pull_request.number,
body: message,
} );
};

View File

@ -1,27 +0,0 @@
exports.generateMarkdownMessage = ( dataFromParsedXml ) => {
const header = generateHeader( dataFromParsedXml );
const body = generateBody( dataFromParsedXml );
return { header, body };
};
const generateHeader = ( dataFromParsedXml ) => {
return `
## TypeScript Errors Report
- Files with errors: ${ dataFromParsedXml.totalFilesWithErrors }
- Total errors: ${ dataFromParsedXml.totalErrors }
`;
};
const generateBody = ( dataFromParsedXml ) => {
const files = dataFromParsedXml.files;
return Object.keys( files ).map( ( file ) => {
return `
Files with errors:
File: ${ file }
${ files[ file ].map( ( error ) => `- ${ error }` ).join( '\r\n' ) }
`;
} );
};

View File

@ -1,72 +0,0 @@
const { XMLParser } = require( 'fast-xml-parser' );
exports.parseXml = ( filePath ) => {
const parser = new XMLParser( {
ignoreAttributes: false,
attributeNamePrefix: '',
attributesGroupName: '',
} );
const parsedFile = parser.parse( filePath );
return getDataFromParsedXml( parsedFile );
};
const getErrorInfo = ( error ) => {
const line = error.line;
const column = error.column;
const message = error.message;
return {
line,
column,
message,
};
};
const getDataFromParsedXml = ( parsedXml ) => {
const data = parsedXml.checkstyle.file;
return data.reduce(
( acc, { name, error } ) => {
const pathFile = name;
const hasMultipleErrors = Array.isArray( error );
return {
files: {
[ pathFile ]: hasMultipleErrors
? error.map( getErrorInfo )
: [ getErrorInfo( error ) ],
...acc.files,
},
totalErrors:
acc.totalErrors + ( hasMultipleErrors ? error.length : 1 ),
totalFilesWithErrors: acc.totalFilesWithErrors + 1,
};
},
{
totalErrors: 0,
totalFilesWithErrors: 0,
}
);
};
exports.getFilesWithNewErrors = (
newCheckStyleFileParsed,
currentCheckStyleFileParsed
) => {
const newFilesReport = newCheckStyleFileParsed.files;
const currentFilesReport = currentCheckStyleFileParsed.files;
return Object.keys( newFilesReport )
.sort( ( a, b ) => a.localeCompare( b ) )
.reduce(
( acc, pathfile ) =>
typeof currentFilesReport[ pathfile ] === 'undefined' ||
currentFilesReport[ pathfile ] === null ||
newFilesReport[ pathfile ].length >
currentFilesReport[ pathfile ].length
? [ ...acc, pathfile ]
: acc,
[]
);
};

View File

@ -1,66 +0,0 @@
name: Dependabot auto-merge
on: pull_request
permissions:
pull-requests: write
contents: write
repository-projects: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.6.0
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
# Automatically merge the following dependency upgrades:
if: "${{ steps.metadata.outputs.dependency-names == '@actions/core' ||
steps.metadata.outputs.dependency-names == '@automattic/color-studio' ||
steps.metadata.outputs.dependency-names == '@babel/cli' ||
steps.metadata.outputs.dependency-names == '@babel/core' ||
steps.metadata.outputs.dependency-names == '@babel/plugin-proposal-class-properties' ||
steps.metadata.outputs.dependency-names == '@babel/plugin-syntax-jsx' ||
steps.metadata.outputs.dependency-names == '@babel/polyfill' ||
steps.metadata.outputs.dependency-names == '@types/classnames' ||
steps.metadata.outputs.dependency-names == '@types/dinero.js' ||
steps.metadata.outputs.dependency-names == '@types/dompurify' ||
steps.metadata.outputs.dependency-names == '@types/gtag.js' ||
steps.metadata.outputs.dependency-names == '@types/jest' ||
steps.metadata.outputs.dependency-names == '@types/jest-environment-puppeteer' ||
steps.metadata.outputs.dependency-names == '@types/jquery' ||
steps.metadata.outputs.dependency-names == '@types/lodash' ||
steps.metadata.outputs.dependency-names == '@types/puppeteer' ||
steps.metadata.outputs.dependency-names == '@types/react' ||
steps.metadata.outputs.dependency-names == '@types/react-dom' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__block-editor' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__blocks' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__data' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__data-controls' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__editor' ||
steps.metadata.outputs.dependency-names == '@types/wordpress__notices' ||
steps.metadata.outputs.dependency-names == '@typescript-eslint/eslint-plugin' ||
steps.metadata.outputs.dependency-names == '@typescript-eslint/parser' ||
steps.metadata.outputs.dependency-names == 'chalk' ||
steps.metadata.outputs.dependency-names == 'circular-dependency-plugin' ||
steps.metadata.outputs.dependency-names == 'commander' ||
steps.metadata.outputs.dependency-names == 'copy-webpack-plugin' ||
steps.metadata.outputs.dependency-names == 'eslint-import-resolver-typescript' ||
steps.metadata.outputs.dependency-names == 'gh-pages' ||
steps.metadata.outputs.dependency-names == 'markdown-it' ||
steps.metadata.outputs.dependency-names == 'promptly' ||
steps.metadata.outputs.dependency-names == 'react-docgen' ||
steps.metadata.outputs.dependency-names == 'wp-types'
}}"
run: |
gh pr edit --add-label 'dependencies-auto-merged' "$PR_URL"
gh pr review --approve "$PR_URL"
gh pr merge --auto --squash "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -1,19 +0,0 @@
name: Bundle Size
on: [pull_request]
jobs:
build-and-size:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
- uses: preactjs/compressed-size-action@8a15fc9a36a94c8c3f7835af11a4924da7e95c7c
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
pattern: '{build/**/*.js,build/**/*.css}'

View File

@ -1,24 +0,0 @@
{
"httpHeaders": [
{
"urls": [
"https://github.com/",
"https://guides.github.com/",
"https://help.github.com/",
"https://docs.github.com/"
],
"headers": {
"Accept-Encoding": "zstd, br, gzip, deflate"
}
}
],
"ignorePatterns": [
{
"pattern": "^http://localhost"
},
{
"pattern": "https://www.php.net/manual/en/install.php"
}
],
"retryOn429": true
}

View File

@ -1,33 +0,0 @@
name: Check Markdown links
on:
workflow_dispatch:
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
markdown_link_check:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Install Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Install markdown-link-check
run: npm install -g markdown-link-check@3.11.2
- name: Run markdown-link-check
run: |
find ./docs -path ./docs/internal-developers/testing/releases -prune -o -name "*.md" -print0 | xargs -0 -n1 markdown-link-check -c .github/workflows/check-doc-links-config.json

View File

@ -1,91 +0,0 @@
name: Check Modified Assets
on:
pull_request:
jobs:
build-trunk:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: trunk
- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
path: node_modules
key: ${{ runner.os }}-build-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: Setup node version and npm cache
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Install Node dependencies
if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: npm install --no-optional --no-audit
- name: Build Assets
run: npm run build:check-assets
- name: Upload Artifact
uses: actions/upload-artifact@v3.1.2
with:
name: assets-list
path: ./build/assets.json
compare-assets-with-trunk:
needs: [ build-trunk ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
path: node_modules
key: ${{ runner.os }}-build-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: Setup node version and npm cache
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Build Assets
run: npm run build:check-assets
- name: Download assets (trunk)
uses: actions/download-artifact@v3
with:
name: assets-list
path: assets-list
- name: Compare Assets
uses: ./.github/compare-assets
id: compare-assets
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
compare: assets-list/assets.json
create-comment: false
- name: Append report
uses: ./.github/comments-aggregator
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
section-id: compare-assets-with-trunk
content: ${{steps.compare-assets.outputs.comment}}

View File

@ -1,24 +0,0 @@
name: 'Close stale issues'
on:
schedule:
# Runs daily at 9am UTC
- cron: '0 9 * * *'
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60
days-before-pr-stale: 7
days-before-close: -1
remove-stale-when-updated: true
exempt-issue-labels: 'priority: critical,priority: high,Epic,type: technical debt,category: refactor,type: documentation,plugin incompatibility'
exempt-pr-labels: 'priority: critical,priority: high,Epic,type: technical debt,category: refactor,type: documentation,plugin incompatibility'
stale-issue-message: "This issue has been marked as `stale` because it has not seen any activity within the past 60 days. Our team uses this tool to help surface issues for review. If you are the author of the issue there's no need to comment as it will be looked at."
stale-pr-message: "This PR has been marked as `stale` because it has not seen any activity within the past 7 days. Our team uses this tool to help surface pull requests that have slipped through review. \n\n###### If deemed still relevant, the pr can be kept active by ensuring it's up to date with the main branch and removing the stale label."
stale-issue-label: 'status: stale'
stale-pr-label: 'status: stale'

View File

@ -1,66 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
name: 'CodeQL'
on:
push:
branches: [trunk]
pull_request:
# The branches below must be a subset of the branches above
branches: [trunk]
schedule:
- cron: '0 16 * * 4'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Override automatic language detection by changing the below list
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
language: ['javascript']
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -1,38 +0,0 @@
name: Report Flaky Tests
on:
workflow_run:
workflows: ['E2E tests']
types:
- completed
jobs:
report-to-issues:
name: Report to GitHub issues
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps:
# Checkout defaults to using the branch which triggered the event, which
# isn't necessarily `trunk` (e.g. in the case of a merge).
- uses: actions/checkout@v3
with:
repository: WordPress/gutenberg
ref: trunk
- name: Use desired version of NodeJS
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version-file: '.nvmrc'
cache: npm
- name: Npm install and build
# TODO: We don't have to build the entire project, just the action itself.
run: |
npm ci
npm run build:packages
- name: Report flaky tests
uses: ./packages/report-flaky-tests
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
label: 'type: flaky test'
artifact-name-prefix: flaky-tests-report

View File

@ -1,116 +0,0 @@
name: JavaScript, CSS and Markdown Linting
on:
pull_request:
push:
branches: [trunk]
permissions:
actions: write
checks: write
pull-requests: read
jobs:
# cache node and modules
Setup:
name: Setup
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
path: node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
- name: Setup node version and npm cache
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Install Node Dependencies
if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: npm ci --no-optional
JSLintingCheck:
name: Lint JavaScript
needs: Setup
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
path: node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
- name: Setup node version
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
- name: Save code linting report JSON
run: npm run lint:js:report
# Continue to the next step even if this fails
continue-on-error: true
- name: Upload ESLint report
uses: actions/upload-artifact@v3.1.2
with:
name: eslint_report.json
path: eslint_report.json
- name: Annotate code linting results
uses: ataylorme/eslint-annotate-action@v2
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
report-json: 'eslint_report.json'
CSSLintingCheck:
name: Lint CSS
needs: Setup
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache node_modules
id: cache-node-modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
path: node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
- name: Setup node version
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
- name: Lint CSS
run: npm run lint:css
MDLintingCheck:
name: Lint MD
needs: Setup
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup node version and npm cache
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Install Node dependencies
run: npm ci --no-optional
- name: Lint MD
run: npm run lint:md:docs

View File

@ -1,21 +0,0 @@
on:
pull_request:
types: [opened, synchronize, closed]
push:
issues:
types: [edited]
name: Project management automations
permissions:
pull-requests: write
actions: write
jobs:
project-management-automation:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: trunk
- uses: woocommerce/automations@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
automations: todos

View File

@ -1,67 +0,0 @@
// Note you'll need to install these dependencies as part of your workflow.
const { Octokit } = require( '@octokit/action' );
const core = require( '@actions/core' );
// Note that this script assumes you set GITHUB_TOKEN in env, if you don't
// this won't work.
const octokit = new Octokit();
const ignoredUsernames = [ 'dependabot[bot]' ];
const checkIfIgnoredUsername = ( username ) =>
ignoredUsernames.includes( username );
const getIssueAuthor = ( payload ) => {
return (
payload?.issue?.user?.login ||
payload?.pull_request?.user?.login ||
null
);
};
const isCommunityContributor = async ( owner, repo, username ) => {
if ( username && ! checkIfIgnoredUsername( username ) ) {
const {
data: { permission },
} = await octokit.rest.repos.getCollaboratorPermissionLevel( {
owner,
repo,
username,
} );
return permission === 'read' || permission === 'none';
}
console.log( 'Not a community contributor!' );
return false;
};
const addLabel = async ( label, owner, repo, issueNumber ) => {
await octokit.rest.issues.addLabels( {
owner,
repo,
issue_number: issueNumber,
labels: [ label ],
} );
};
const applyLabelToCommunityContributor = async () => {
const eventPayload = require( process.env.GITHUB_EVENT_PATH );
const username = getIssueAuthor( eventPayload );
const [ owner, repo ] = process.env.GITHUB_REPOSITORY.split( '/' );
const { number } = eventPayload?.issue || eventPayload?.pull_request;
const isCommunityUser = await isCommunityContributor(
owner,
repo,
username
);
core.setOutput( 'is-community', isCommunityUser ? 'yes' : 'no' );
if ( isCommunityUser ) {
console.log( 'Adding community contributor label' );
await addLabel( 'type: community contribution', owner, repo, number );
}
};
applyLabelToCommunityContributor();

View File

@ -1,68 +0,0 @@
name: Monitor TypeScript errors
on:
push:
branches: [trunk]
pull_request:
jobs:
check-typescript-errors-with-trunk:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: 'trunk'
- name: Cache node modules
uses: actions/cache@v3
env:
cache-name: cache-node-modules
with:
# npm cache files are stored in `~/.npm` on Linux/macOS
path: ~/.npm
key: ${{ runner.OS }}-build-${{ secrets.CACHE_VERSION }}-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.OS }}-build-${{ secrets.CACHE_VERSION }}-${{ env.cache-name }}-
${{ runner.OS }}-build-${{ secrets.CACHE_VERSION }}-
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'npm'
- name: Generate checkstyle for trunk
run: |
npm ci
npm run ts:log-errors
mv checkstyle.xml $HOME/checkstyle-trunk.xml
- uses: actions/checkout@v3
- name: Generate checkstyle for current PR
run: |
npm ci
npm run ts:log-errors
mv $HOME/checkstyle-trunk.xml checkstyle-trunk.xml
- name: Get branch name
id: branch-name
uses: tj-actions/branch-names@v7
- name: Monitor TypeScript errors
uses: ./.github/monitor-typescript-errors
id: monitor-typescript-errors
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
checkstyle: checkstyle.xml
checkstyle-trunk: checkstyle-trunk.xml
create-comment: false
env:
AIRTABLE_API_KEY: ${{ secrets.AIRTABLE_API_KEY }}
CURRENT_BRANCH: ${{ steps.branch-name.outputs.current_branch }}
- name: Append report
uses: ./.github/comments-aggregator
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
section-id: monitor-typescript-errors
content: ${{steps.monitor-typescript-errors.outputs.comment}}
order: 20

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

View File

@ -1,69 +0,0 @@
#!/bin/sh
# Functions
# Check if string contains substring
is_substring() {
case "$2" in
*$1*)
return 0
;;
*)
return 1
;;
esac
}
# Output colorized strings
#
# Color codes:
# 0 - black
# 1 - red
# 2 - green
# 3 - yellow
# 4 - blue
# 5 - magenta
# 6 - cian
# 7 - white
output() {
echo "$(tput setaf "$1")$2$(tput sgr0)"
}
if [ ! $VERSION ]; then
output 3 "Please enter the version number, for example, 1.0.0:"
read -r VERSION
fi
output 2 "Updating version numbers in files..."
IS_PRE_RELEASE=false
# Check if is a pre-release.
if is_substring "-" "${VERSION}"; then
IS_PRE_RELEASE=true
output 4 "Detected pre-release version."
fi
if [ $IS_PRE_RELEASE = false ]; then
# Replace all instances of $VID:$ with the release version but only if not pre-release.
find ./src woocommerce-gutenberg-products-block.php -name "*.php" -print0 | xargs -0 perl -i -pe 's/\$VID:\$/'${VERSION}'/g'
# Update version number in readme.txt but only if not pre-release.
perl -i -pe 's/Stable tag:*.+/Stable tag: '${VERSION}'/' readme.txt
output 2 "Version numbers updated in readme.txt and \$VID:\$ instances."
else
output 4 "Note: pre-releases will not have the readme.txt stable tag updated."
fi
# Update version in main plugin file.
perl -i -pe 's/Version:*.+/Version: '${VERSION}'/' woocommerce-gutenberg-products-block.php
# Update version in package.json.
perl -i -pe 's/"version":*.+/"version": "'${VERSION}'",/' package.json
# Update version in package-lock.json.
perl -i -0777 -pe 's/"name": "\@woocommerce\/block-library",\s*\K"version":*.+\n/"version": "'${VERSION}'",\n/g' package-lock.json
# Update version in src/Package.php.
perl -i -pe "s/version \= '*.+';/version = '${VERSION}';/" src/Package.php
# Update version in composer.json.
perl -i -pe 's/"version":*.+/"version": "'${VERSION}'",/' composer.json
output 2 "Version numbers updated in main plugin file, package.json, package-lock.json, src/Package.php and composer.json."

View File

@ -1,76 +0,0 @@
# Changelog Script
This folder contains the logic for a changelog script that can be used for generating changelog entries from either pull requests added to a GitHub milestone, or pull requests that are part of a Zenhub release.
## Usage
By default, changelog entries will use the title of pull requests. However, you can also customize the changelog entry by adding to the description of the pull custom text in the following format.
```md
### Changelog
> Fix bug in Safari and other Webkit browsers.
```
You can implement the script in your `package.json` in the simplest form by adding the following to the `"scripts"` property (assuming it is installed in `./bin`):
```json
{
"scripts": {
"changelog": "node ./bin/changelog"
}
}
```
## Configuration
The following configuration options can be set for the changelog script. **Note:** you can use all of these options but environment variables overwrite `package.json` config and command line arguments overwrite environment variables.
`package.json` configuration should be added on a top level `changelog` property.
The 'variable' in the following table can be used in `package.json` or as a cli arg.
| variable | description |
| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| labelPrefix | Any labels prefixed with this string will be used to derive the "type" of change (defaults to `type:`). |
| skipLabel | Any pull having this label will be skipped for the changelog (defaults to `no-changelog`). |
| defaultPrefix | When there is no label with the `labelPrefix` on a pull, this is the default type that will be used for the changelog entry (defaults to `dev`). |
| changelogSrcType | Either "MILESTONE" (default) or "ZENHUB_RELEASE". This determines what will serve as the source for the changelog entries. |
| devNoteLabel | If a pull has this label then `[DN]` will be appended to the end of the changelog. It's a good way to indicate what entries have (or will have) dev notes. |
| repo | This is the namespace for the GitHub repository used as the source for pulls used in the changelog entries. Example: `'woocommerce/woocommerce-gutenberg-products-block'` |
| githubToken | You can pass your GitHub API token to the script. NOTE: Strongly recommend you use environment variable for this (`GITHUB_TOKEN`). |
| zhApiKey | You can pass your Zenhub api key to the script using this config. NOTE: Strongly recommend you use environment variable for this. |
The two environment variables you can use are:
| Environment Variable | Description |
| -------------------- | ------------------------------------------------------------- |
| GITHUB_TOKEN | GitHub API token for authorizing on the GitHub API. |
| ZH_API_TOKEN | Zenhub API token used for authorizing against the Zenhub API. |
### Examples
#### package.json
```json
{
"changelog": {
"labelPrefix": "type:",
"skipLabel": "skip-changelog",
"defaultPrefix": "dev",
"repo": "woocommerce/woocommerce-gutenberg-products-block"
}
}
```
#### Environment Variable
```bash
GITHUB_TOKEN="1343ASDFQWER13241REASD" node ./bin/changelog
```
#### Command Line
```bash
node ./bin/changelog --labelPrefix="type:" --skipLabel="skip-changelog" --defaultPrefix="dev" --repo="woocommerce/woocommerce-gutenberg-products-block" --githubToken="1343ASDFQWER13241REASD"
```

View File

@ -1,88 +0,0 @@
'use strict';
const requestPromise = require( 'request-promise' );
const { graphql } = require( '@octokit/graphql' );
const { pkg, REPO } = require( '../config' );
/* eslint no-console: 0 */
const headers = {
authorization: `token ${ pkg.changelog.githubToken }`,
'user-agent': 'changelog-tool',
};
const authedGraphql = graphql.defaults( { headers } );
const getPullRequestType = ( labels ) => {
const typeLabel = labels.find( ( label ) =>
label.name.includes( pkg.changelog.labelPrefix )
);
if ( ! typeLabel ) {
return pkg.changelog.defaultPrefix;
}
return typeLabel.name.replace( `${ pkg.changelog.labelPrefix } `, '' );
};
const devNoteSuffix = ( labels ) => {
const noteLabel = labels.find( ( label ) =>
label.name.includes( pkg.changelog.devNoteLabel )
);
return noteLabel ? ' [DN]' : '';
};
const isCollaborator = async ( username ) => {
return requestPromise( {
url: `https://api.github.com/orgs/${
REPO.split( '/' )[ 0 ]
}/members/${ username }`,
headers,
resolveWithFullResponse: true,
} )
.then( ( response ) => {
return response.statusCode === 204;
} )
.catch( ( err ) => {
if ( err.statusCode !== 404 ) {
console.log( '🤯' );
console.log( err.message );
}
} );
};
const getEntry = async ( pullRequest ) => {
if (
pullRequest.labels.nodes.some(
( label ) => label.name === pkg.changelog.skipLabel
)
) {
return;
}
const collaborator = await isCollaborator( pullRequest.author.login );
const type = getPullRequestType( pullRequest.labels.nodes );
const authorTag = collaborator ? '' : `👏 @${ pullRequest.author.login }`;
const devNote = devNoteSuffix( pullRequest.labels.nodes );
let title;
if ( /### Changelog\r\n\r\n> /.test( pullRequest.body ) ) {
const bodyParts = pullRequest.body.split( '### Changelog\r\n\r\n> ' );
const note = bodyParts[ bodyParts.length - 1 ];
title = note
// Remove comment prompt
.replace( /<!---(.*)--->/gm, '' )
// Remove new lines and whitespace
.trim();
if ( ! title.length ) {
title = `${ type }: ${ pullRequest.title }`;
} else {
title = `${ type }: ${ title }`;
}
} else {
title = `${ type }: ${ pullRequest.title }`;
}
return `- ${ title } [#${ pullRequest.number }](${ pullRequest.url })${ devNote } ${ authorTag }`;
};
module.exports = {
authedGraphql,
getEntry,
};

View File

@ -1,4 +0,0 @@
const { authedGraphql } = require( './get-entry' );
const { make } = require( './make' );
module.exports = { authedGraphql, make };

View File

@ -1,34 +0,0 @@
'use strict';
const chalk = require( 'chalk' );
const { getEntry } = require( './get-entry' );
/* eslint no-console: 0*/
const make = async ( pullRequestFetcher, version ) => {
const pullRequests = await pullRequestFetcher( version );
let entries = await Promise.all(
pullRequests.map( async ( pr ) => await getEntry( pr ) )
);
if ( ! entries || ! entries.length ) {
console.log(
chalk.yellow( "This version doesn't have any associated PR." )
);
return;
}
entries = entries.filter( Boolean );
if ( ! entries || ! entries.length ) {
console.log(
chalk.yellow(
'None of the PRs of this version are eligible for the changelog.'
)
);
return;
}
entries.sort();
console.log( entries.join( '\n' ) );
};
module.exports = { make };

View File

@ -1,46 +0,0 @@
'use strict';
const pkg = require( '../../package.json' );
const Config = require( 'merge-config' );
const config = new Config();
const changelogSrcTypes = {
MILESTONE: 'MILESTONE',
ZENHUB: 'ZENHUB_RELEASE',
};
const DEFAULTS = {
labelPrefix: 'type:',
skipLabel: 'no-changelog',
defaultPrefix: 'dev',
changelogSrcType: changelogSrcTypes.MILESTONE,
devNoteLabel: 'dev-note',
repo: '',
githubToken: '',
zhApiToken: '',
};
pkg.changelog = pkg.changelog || DEFAULTS;
config.merge( { ...DEFAULTS, ...pkg.changelog } );
config.env( [ 'GITHUB_TOKEN', 'ZH_API_TOKEN' ] );
config.argv( Object.keys( DEFAULTS ) );
const REPO = config.get( 'repo' );
if ( ! REPO ) {
throw new Error(
"The 'repo' configuration value is not set. This script requires the\n" +
'repository namespace used as the source for the changelog entries.'
);
}
module.exports = {
pkg: {
...pkg,
changelog: config.get(),
},
REPO,
changelogSrcTypes,
};

View File

@ -1,7 +0,0 @@
'use-strict';
const { makeChangeLog } = require( './make-change-log' );
module.exports = {
makeChangeLog,
};

View File

@ -1,70 +0,0 @@
'use strict';
const chalk = require( 'chalk' );
const promptly = require( 'promptly' );
const { REPO, pkg } = require( '../config' );
const { make } = require( '../common' );
const { fetchAllPullRequests } = require( './requests' );
/* eslint no-console: 0 */
let ready = false;
const makeChangeLog = async () => {
if ( ! pkg.changelog.githubToken ) {
console.log(
chalk.yellow(
'This program requires an api token. You can create one here: '
) + 'https://github.com/settings/tokens'
);
console.log( '' );
console.log(
chalk.yellow(
'Token scope will require read permissions on public_repo, admin:org, and user.'
)
);
console.log( '' );
console.log(
chalk.yellow(
'Export the token as variable called GITHUB_TOKEN from your bash profile.'
)
);
console.log( '' );
ready = await promptly.confirm( 'Are you ready to continue? ' );
} else {
console.log( chalk.green( 'Detected GITHUB_TOKEN is set.' ) );
ready = true;
}
if ( ready ) {
console.log( '' );
console.log(
chalk.yellow(
'In order to generate the changelog, you will have to provide a version number to retrieve the PRs from.'
)
);
console.log( '' );
console.log(
chalk.yellow( 'Write it as it appears in the milestones page: ' ) +
`https://github.com/${ REPO }/milestones`
);
console.log( '' );
const version = await promptly.prompt( 'Version number: ' );
console.log( '' );
console.log(
chalk.green(
'Here is the generated changelog. Be sure to remove entries ' +
`not intended for a ${ pkg.title } release.`
)
);
console.log( '' );
make( fetchAllPullRequests, version );
} else {
console.log( '' );
console.log( chalk.yellow( 'Ok, see you soon.' ) );
console.log( '' );
}
};
module.exports = {
makeChangeLog,
};

View File

@ -1,93 +0,0 @@
'use strict';
const { REPO } = require( '../config' );
const { authedGraphql } = require( '../common' );
/* eslint no-console: 0 */
const getMilestoneNumber = async ( version ) => {
const [ owner, repo ] = REPO.split( '/' );
const query = `
{
repository(owner: "${ owner }", name: "${ repo }") {
milestones(last: 50) {
nodes {
title
number
}
}
}
}
`;
const data = await authedGraphql( query );
const matchingNode = data.repository.milestones.nodes.find(
( node ) => node.title === version
);
if ( ! matchingNode ) {
throw new Error(
`Unable to find a milestone matching the given version ${ version }`
);
}
return matchingNode.number;
};
const getQuery = ( milestoneNumber, before ) => {
const [ owner, repo ] = REPO.split( '/' );
const paging = before ? `, before: "${ before }"` : '';
return `
{
repository(owner: "${ owner }", name: "${ repo }") {
milestone(number: ${ milestoneNumber }) {
pullRequests(last: 100, states: [MERGED]${ paging }) {
totalCount
pageInfo {
hasPreviousPage
startCursor
}
nodes {
number
title
url
author {
login
}
body
labels(last: 10) {
nodes {
name
}
}
}
}
}
}
}
`;
};
const fetchAllPullRequests = async ( version ) =>
await ( async () => {
const milestoneNumber = await getMilestoneNumber( version );
const fetchResults = async ( before ) => {
const query = getQuery( milestoneNumber, before );
const results = await authedGraphql( query );
if (
results.repository.milestone.pullRequests.pageInfo
.hasPreviousPage === false
) {
return results.repository.milestone.pullRequests.nodes;
}
const nextResults = await fetchResults(
results.repository.milestone.pullRequests.pageInfo.startCursor
);
return results.repository.milestone.pullRequests.nodes.concat(
nextResults
);
};
return await fetchResults();
} )();
module.exports = {
fetchAllPullRequests,
};

View File

@ -1,19 +0,0 @@
#!/usr/bin/env node
'use strict';
/* eslint no-console: 0 */
const chalk = require( 'chalk' );
try {
const { makeChangeLog: githubMake } = require( './github' );
const { makeChangeLog: zenhubMake } = require( './zenhub' );
const { pkg, changelogSrcTypes } = require( './config' );
const makeChangeLog =
pkg.changelog.changelogSrcType === changelogSrcTypes.ZENHUB
? zenhubMake
: githubMake;
makeChangeLog();
} catch ( error ) {
console.log( chalk.red( error.message ) );
}

View File

@ -1,7 +0,0 @@
'use-strict';
const { makeChangeLog } = require( './make-change-log' );
module.exports = {
makeChangeLog,
};

View File

@ -1,88 +0,0 @@
'use strict';
const chalk = require( 'chalk' );
const promptly = require( 'promptly' );
const { pkg } = require( '../config' );
const { make } = require( '../common' );
const { fetchAllPullRequests } = require( './requests' );
/* eslint no-console: 0 */
let ready = false;
const makeChangeLog = async () => {
if ( ! pkg.changelog.zhApiToken || ! pkg.changelog.githubToken ) {
const zenhubSet = pkg.changelog.zhApiToken
? chalk.green( 'set' )
: chalk.red( 'not set' );
const githubSet = pkg.changelog.githubToken
? chalk.green( 'set' )
: chalk.red( 'not set' );
console.log( `${ chalk.yellow( 'Zenhub Token:' ) } ${ zenhubSet }` );
console.log( `${ chalk.yellow( 'GitHub Token:' ) } ${ githubSet }` );
console.log( '' );
console.log(
chalk.yellow(
'This program requires an api token from GitHub and Zenhub.'
)
);
console.log(
chalk.yellow(
'You can create and get a GitHub token here: https://github.com/settings/tokens'
)
);
console.log(
chalk.yellow(
'You can create and get a Zenhub token here: https://app.zenhub.com/dashboard/tokens'
)
);
console.log( '' );
console.log(
chalk.yellow(
'Token scope for GitHub will require read permissions on public_repo, admin:org, and user.'
)
);
console.log( '' );
console.log(
chalk.yellow(
'Export the github token as variable called GITHUB_TOKEN and the Zenhub token as a variable called ZH_API_TOKEN from your bash profile.'
)
);
console.log( '' );
ready = await promptly.confirm( 'Are you ready to continue? ' );
} else {
console.log(
chalk.green(
'Detected that ZH_API_TOKEN and GITHUB_TOKEN values are set.'
)
);
ready = true;
}
if ( ready ) {
console.log( '' );
console.log(
chalk.yellow(
'In order to generate the changelog, you will have to provide the Zenhub release ID to retrieve the PRs from. You can get that from `release` param value in the url of the release report page.'
)
);
console.log( '' );
const releaseId = await promptly.prompt( 'Release Id: ' );
console.log( '' );
console.log(
chalk.green(
'Here is the generated changelog. Be sure to remove entries ' +
`not intended for a ${ pkg.title } release. All entries with the ${ pkg.changelog.skipLabel } label have been skipped`
)
);
console.log( '' );
make( fetchAllPullRequests, releaseId );
} else {
console.log( '' );
console.log( chalk.yellow( 'Ok, see you soon.' ) );
console.log( '' );
}
};
module.exports = {
makeChangeLog,
};

View File

@ -1,100 +0,0 @@
'use strict';
/* eslint no-console: 0 */
const ZenHub = require( 'zenhub-api' );
const { REPO, pkg } = require( '../config' );
const { authedGraphql } = require( '../common' );
const { pull } = require( 'lodash' );
const api = new ZenHub( pkg.changelog.zhApiToken );
const getQuery = ( before ) => {
const [ owner, repo ] = REPO.split( '/' );
const paging = before ? `, before: "${ before }"` : '';
const query = `
{
repository(owner: "${ owner }", name: "${ repo }") {
pullRequests(last: 100, states: [MERGED]${ paging }) {
totalCount
pageInfo {
startCursor
}
nodes {
number
title
url
author {
login
}
body
labels(last: 10) {
nodes {
name
}
}
}
}
}
}
`;
return query;
};
const fetchAllIssuesForRelease = async ( releaseId ) => {
const releaseIssues = await api.getReleaseReportIssues( {
release_id: releaseId,
} );
return releaseIssues.map( ( releaseIssue ) => releaseIssue.issue_number );
};
const extractPullRequestsMatchingReleaseIssue = (
releaseIds,
pullRequests
) => {
return pullRequests.filter( ( pullRequest ) => {
const hasPullRequest = releaseIds.includes( pullRequest.number );
if ( hasPullRequest ) {
pull( releaseIds, pullRequest.number );
return true;
}
return false;
} );
};
const fetchAllPullRequests = async ( releaseId ) => {
// first get all release issue ids
const releaseIds = await fetchAllIssuesForRelease( releaseId );
let maxPages = Math.ceil( releaseIds.length / 100 ) + 2;
const fetchResults = async ( before ) => {
const query = getQuery( before );
const results = await authedGraphql( query );
const pullRequests = extractPullRequestsMatchingReleaseIssue(
releaseIds,
results.repository.pullRequests.nodes
);
if ( maxPages === 0 ) {
return pullRequests;
}
maxPages--;
const nextResults = await fetchResults(
results.repository.pullRequests.pageInfo.startCursor
);
return pullRequests.concat(
extractPullRequestsMatchingReleaseIssue( releaseIds, nextResults )
);
};
let results = [];
try {
results = await fetchResults();
} catch ( e ) {
console.log( e.request );
console.log( e.message );
console.log( e.data );
}
return results;
};
module.exports = {
fetchAllPullRequests,
};

View File

@ -1,34 +0,0 @@
cd "$1" || exit
rsync ./ "$2"/ --recursive --delete --delete-excluded \
--exclude=assets/ \
--exclude=".*/" \
--exclude="*.md" \
--exclude=".*" \
--exclude="composer.*" \
--exclude="*.lock" \
--exclude=bin/ \
--exclude=node_modules/ \
--exclude=tests/ \
--exclude=docs/ \
--exclude=phpcs.xml \
--exclude=phpunit.xml.dist \
--exclude=CODEOWNERS \
--exclude=renovate.json \
--exclude="*.config.js" \
--exclude="*-config.js" \
--exclude="*.config.json" \
--exclude=package.json \
--exclude=package-lock.json \
--exclude=none \
--exclude=blocks.ini \
--exclude=docker-compose.yml \
--exclude=tsconfig.json \
--exclude=tsconfig.base.json \
--exclude=woocommerce-gutenberg-products-block.zip \
--exclude="zip-file/" \
--exclude=global.d.ts \
--exclude=packages/ \
--exclude=patches/ \
--exclude=reports/ \
--exclude=storybook/
echo -e "\nDone copying files!\n"

View File

@ -1,83 +0,0 @@
#!/bin/bash
# Output colorized strings
#
# Color codes:
# 0 - black
# 1 - red
# 2 - green
# 3 - yellow
# 4 - blue
# 5 - magenta
# 6 - cian
# 7 - white
output() {
echo "$(tput setaf "$1")$2$(tput sgr0)"
}
pass() {
output 2 "$1"
}
fail() {
output 1 "$1"
}
warn() {
output 3 "$1"
}
function command_exists_as_alias {
alias $1 2>/dev/null >/dev/null
}
function which {
type "$1" >>/dev/null 2>&1
}
function command_is_available {
which $1 || command_exists_as_alias $1 || type $1 >/dev/null 2>/dev/null
}
function node_modules_are_available {
[ -d node_modules ]
}
function vendor_dir_is_available {
[ -d vendor ]
}
function assert {
$1 $2 && pass "- $3 is available ✔"
$1 $2 || fail "- $3 is missing ✗ $4"
}
echo
output 6 "BLOCKS DEVELOPMENT ENVIRONMENT CHECKS"
output 6 "====================================="
echo
echo "Checking under $PWD"
echo
output 6 "(*・‿・)ノ⌒*:・゚✧"
echo
echo "Tools for building assets"
echo "========================="
echo
assert command_is_available node "Node.js" "Node and NPM allow us to install required dependencies. You can install it from here: https://nodejs.org/en/download/"
assert command_is_available composer "Composer" "Composer allows us to install PHP dependencies. You can install it from https://getcomposer.org, or if you are running Brew you can install it by running $ brew install composer"
echo
echo "Dependencies"
echo "============"
echo
assert node_modules_are_available "" "node_modules dir" "You need to have node installed and run: $ npm install"
assert vendor_dir_is_available "" "vendor dir" "You need to have composer installed and run: $ composer install"
echo
echo "Contributing and other helpers"
echo "=============================="
echo
assert command_is_available git "git" "Git is required to push and pull from the GitHub repository. If you're running Brew, you can install it by running $ brew install git"
assert command_is_available hub "Hub" "Hub provides some useful git commands used by the deployment scripts. If you're running Brew, you can install it by running $ brew install hub"
echo

View File

@ -1,76 +0,0 @@
#!/bin/bash
# Enable nicer messaging for build status.
BLUE_BOLD='\033[1;34m';
RED_BOLD='\033[1;31m';
COLOR_RESET='\033[0m';
GREEN_BOLD='\033[1;32m';
RED_BOLD='\033[1;31m';
YELLOW_BOLD='\033[1;33m';
error () {
echo -e "${RED_BOLD}$1${COLOR_RESET}\n";
exit 0;
}
status () {
echo -e "${BLUE_BOLD}$1${COLOR_RESET}\n"
}
success () {
echo -e "${GREEN_BOLD}$1${COLOR_RESET}\n"
}
warning () {
echo -e "${YELLOW_BOLD}$1${COLOR_RESET}\n"
}
[[ -z "$1" ]] && {
error "You must specify a branch to fix, for example: npm run fix-package-lock your/branch";
}
echo -e "${YELLOW_BOLD} ___ ___ ___
| | | |
|___|___|___|
| | | |
|___|___|___|
| | | |
|___|___|___|
FIX PACKAGE LOCK
================
This script will attempt to rebase a Renovate PR and update the package.lock file.
Usage: npm run fix-package-lock branch/name
${COLOR_RESET}"
echo -e "${RED_BOLD}BEFORE PROCEEDING\n=================
You should check the PR on GitHub to see if it already has conflicts with trunk.
If it does, use the checkbox in the PR to force Renovate to rebase it for you.
Once the PR has been rebased, you can run this script, and then do a squash merge on GitHub.${COLOR_RESET}"
printf "Ready to proceed? [y/N]: "
read -r PROCEED
echo
if [ "$(echo "${PROCEED:-n}" | tr "[:upper:]" "[:lower:]")" != "y" ]; then
exit
fi
git fetch
if ! git checkout $1
then
error "Unable to checkout branch";
else
success "Checked out branch"
fi
status "Removing package-lock.json...";
rm package-lock.json
status "Installing dependencies...";
npm cache verify
npm install
status "Comitting updated package-lock.json...";
git add package-lock.json
git commit -m 'update package-lock.json'
git push --force-with-lease
success "Done. Package Lock has been updated. 🎉"

View File

@ -1,201 +0,0 @@
#!/bin/sh
RELEASER_PATH=$(pwd)
PLUGIN_SLUG="woo-gutenberg-products-block"
GITHUB_ORG="woocommerce"
GITHUB_SLUG="woocommerce-gutenberg-products-block"
IS_PRE_RELEASE=false
BUILD_PATH="${HOME}/blocks-deployment"
# When it is set to true, the commands are just printed but not executed.
DRY_RUN_MODE=false
# When it is set to true, the commands that affect the local env are executed (e.g. git commit), while the commands that affect the remote env are not executed but just printed (e.g. git push)
SIMULATE_RELEASE_MODE=false
# Functions
# Check if string contains substring
is_substring() {
case "$2" in
*$1*)
return 0
;;
*)
return 1
;;
esac
}
# Output colorized strings
#
# Color codes:
# 0 - black
# 1 - red
# 2 - green
# 3 - yellow
# 4 - blue
# 5 - magenta
# 6 - cian
# 7 - white
output() {
echo "$(tput setaf "$1")$2$(tput sgr0)"
}
# Output colorized list
output_list() {
echo "$(tput setaf "$1")$2:$(tput sgr0) \"$3\""
}
simulate() {
if $2 = true ; then
eval "$1"
else
output 3 "DRY RUN: $1"
fi
}
run_command() {
if $DRY_RUN_MODE = true; then
output 3 "DRY RUN: $1"
elif $SIMULATE_RELEASE_MODE = true; then
simulate "$1" $2
else
eval "$1"
fi
}
# Release script
echo
output 4 "BLOCKS->WordPress.org RELEASE SCRIPT"
output 4 "===================================="
echo
printf "This script prepares a GitHub tag/release for WordPress.org SVN."
echo
echo
echo "Before proceeding:"
echo " • Ensure you have already created the release on GitHub. You can use '$ npm run deploy'."
echo
output 3 "Do you want to continue? [y/N]: "
read -r PROCEED
echo
if [ "$(echo "${PROCEED:-n}" | tr "[:upper:]" "[:lower:]")" != "y" ]; then
output 1 "Release cancelled!"
exit 1
fi
echo
output 3 "Please enter the version number to tag, for example, 1.0.0:"
read -r VERSION
echo
# Check if is a pre-release.
if is_substring "-" "${VERSION}"; then
IS_PRE_RELEASE=true
output 2 "Detected pre-release version!"
fi
# Set deploy variables
SVN_REPO="http://plugins.svn.wordpress.org/${PLUGIN_SLUG}/"
GIT_REPO="https://github.com/${GITHUB_ORG}/${GITHUB_SLUG}.git"
SVN_PATH="${BUILD_PATH}/${PLUGIN_SLUG}-svn"
GIT_PATH="${BUILD_PATH}/${PLUGIN_SLUG}-git"
BRANCH="v$VERSION"
echo
echo "-------------------------------------------"
echo
echo "Review all data before proceeding:"
echo
output_list 3 "Version to release" "${VERSION}"
output_list 3 "GIT tag to release" "${BRANCH}"
output_list 3 "GIT repository" "${GIT_REPO}"
output_list 3 "wp.org repository" "${SVN_REPO}"
echo
output 3 "Do you want to continue? [y/N]: "
read -r PROCEED
echo
if [ "$(echo "${PROCEED:-n}" | tr "[:upper:]" "[:lower:]")" != "y" ]; then
output 1 "Release cancelled!"
exit 1
fi
# Create build directory if does not exists
if [ ! -d "$BUILD_PATH" ]; then
mkdir -p "$BUILD_PATH"
fi
# Delete old GIT directory
rm -rf "$GIT_PATH"
# Clone GIT repository
output 2 "Cloning GIT repository..."
run_command "git clone '$GIT_REPO' '$GIT_PATH' --branch '$BRANCH' --single-branch || exit '$?'" true
if [ ! -d "$GIT_PATH/build" ]; then
output 3 "Build directory not found in tag. Aborting."
exit 1
fi
if [ ! -d "$GIT_PATH/vendor" ]; then
output 3 "Vendor directory not found in tag. Aborting."
exit 1
fi
# Checkout SVN repository if not exists
if [ ! -d "$SVN_PATH" ]; then
output 2 "No SVN directory found, fetching files..."
# Checkout project without any file
run_command "svn co --depth=files '$SVN_REPO' '$SVN_PATH'" true
cd "$SVN_PATH" || exit
# Fetch main directories
run_command "svn up assets branches trunk" true
# Fetch tags directories without content
run_command "svn up --set-depth=immediates tags" true
# To fetch content for a tag, use:
# svn up --set-depth=infinity tags/<tag_number>
else
# Update SVN
cd "$SVN_PATH" || exit
output 2 "Updating SVN..."
run_command "svn up" true
fi
# Copy GIT directory to trunk
output 2 "Copying project files to SVN trunk..."
run_command "sh '${RELEASER_PATH}/bin/copy-plugin-files.sh' '$GIT_PATH' '$SVN_PATH/trunk'" true
cd "$SVN_PATH"
# Update stable tag on trunk/readme.txt
if [ $IS_PRE_RELEASE = false ]; then
output 2 "Updating \"Stable tag\" to ${VERSION} on trunk/readme.txt..."
run_command "perl -i -pe's/Stable tag: .*/Stable tag: ${VERSION}/' trunk/readme.txt" true
fi
# Do the remove all deleted files
run_command "svn st | grep -v '^.[ \t]*\..*' | grep '^\!' | awk '{print $2'@'}' | xargs svn rm" true
# Do the add all not know files
run_command "svn st | grep -v '^.[ \t]*\..*' | grep '^?' | awk '{print $2'@'}' | xargs svn add" true
# Copy trunk to tag/$VERSION
if [ ! -d "tags/${VERSION}" ]; then
output 2 "Creating SVN tags/${VERSION}..."
run_command "svn 'cp trunk tags/'${VERSION}''" true
fi
# Remove the GIT directory
output 2 "Removing GIT directory..."
run_command "rm -rf '$GIT_PATH'" true
# SVN commit messsage
output 2 "Ready to commit into WordPress.org Plugin's Directory!"
echo
echo "-------------------------------------------"
echo
output 3 "Run the following command to commit to SVN:"
echo "cd ${SVN_PATH} && svn ci -m \"Release ${VERSION}, see readme.txt for changelog.\""

View File

@ -1,15 +0,0 @@
const fs = require( 'fs' );
const path = require( 'path' );
const wpEnvRaw = fs.readFileSync(
path.join( __dirname, '..', '.wp-env.json' )
);
const wpEnv = JSON.parse( wpEnvRaw );
wpEnv.plugins.push(
'https://downloads.wordpress.org/plugin/gutenberg.latest-stable.zip'
);
// We write the new file to .wp-env.override.json (https://developer.wordpress.org/block-editor/reference-guides/packages/packages-env/#wp-env-override-json)
fs.writeFileSync(
path.join( __dirname, '..', '.wp-env.override.json' ),
JSON.stringify( wpEnv )
);

View File

@ -1,19 +0,0 @@
const fs = require( 'fs' );
const path = require( 'path' );
const wpEnvRaw = fs.readFileSync(
path.join( __dirname, '..', '.wp-env.json' )
);
const wpEnv = JSON.parse( wpEnvRaw );
// Pin the core version to 6.2.2 for Jest E2E test so we can keep the test
// passing when new WordPress versions are released. We do this because we're
// moving to Playwright and will abandon the Jest E2E tests once the migration
// is complete.
wpEnv.core = 'WordPress/WordPress#6.4.1';
// We write the new file to .wp-env.override.json (https://developer.wordpress.org/block-editor/reference-guides/packages/packages-env/#wp-env-override-json)
fs.writeFileSync(
path.join( __dirname, '..', '.wp-env.override.json' ),
JSON.stringify( wpEnv )
);

View File

@ -1,4 +0,0 @@
Significance: patch
Type: add
Comment: Add new Active Filters block powered by Interactivity API.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Simplify the Product Gallery Pop-up.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Add transforms for better discovery.

View File

@ -1,4 +0,0 @@
Significance: minor
Type: enhancement
Move the switch to classic shortcode block button to separate component.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: dev
Comment: Fixed linting errors

View File

@ -1,4 +0,0 @@
Significance: minor
Type: enhancement
Update the generic incompatibility notice message for the Cart and Checkout page.

View File

@ -1,4 +0,0 @@
Significance: minor
Type: add
Add support for select fields in the experimental WooCommerce Blocks custom fields API.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: dev
Comment: Just a change to the `.wp-env` config.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Use @container rule to adjust overlay link count font size.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Remove Product Gallery (Beta) from being default.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: add
Add support for additional fields in Checkout block

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Add Group block labels.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: dev
clear out unneeded github files from block folder

View File

@ -1,4 +0,0 @@
Significance: patch
Type: update
update text domains in woocommerce-blocks folder

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Fix Large Image snapping position on window resize.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Update div elements to ul and li for better accessibility.

View File

@ -1,3 +0,0 @@
Significance: patch
Type: tweak
Comment: Product Gallery: Improve the accessibility of the Product Gallery Pager.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: update
remove PHP related scripts from package.json

View File

@ -1,4 +0,0 @@
Significance: patch
Type: fix
Add composer.json for live build

View File

@ -1,4 +0,0 @@
Significance: patch
Type: update
Allow built in payment method descriptions to contain HTML when rendered on the block checkout.

View File

@ -1,4 +0,0 @@
Significance: patch
Type: dev
Comment: Reset main image when variations are cleared.

View File

@ -54,8 +54,6 @@
"build:docs": "./vendor/bin/wp-hooks-generator --input=src --output=bin/hook-docs/data && node ./bin/hook-docs && pnpm build:docs:block-references",
"build:docs:block-references": "node ./bin/gen-block-list-doc.js",
"postbuild:docs": "./bin/add-doc-footer.sh",
"changelog:zenhub": "node ./bin/changelog --changelogSrcType='ZENHUB_RELEASE'",
"change-versions": "source ./bin/change-versions.sh",
"dev": "rimraf build/* && cross-env BABEL_ENV=default webpack",
"labels:dry": "github-label-sync --labels ./.github/label-sync-config.json --allow-added-labels --dry-run woocommerce/woocommerce-gutenberg-products-block",
"labels:sync": "github-label-sync --labels ./.github/label-sync-config.json --allow-added-labels woocommerce/woocommerce-gutenberg-products-block",
@ -72,10 +70,8 @@
"lint:js:report": "pnpm run lint:js -- --output-file eslint_report.json --ext=js,ts,tsx --format json",
"lint:js-fix": "eslint assets/js --ext=js,jsx,ts,tsx --fix",
"lint:md:docs": "wp-scripts lint-md-docs",
"fix-package-lock": "./bin/fix-package-lock.sh",
"pre-commit": "lint-staged",
"reformat-files": "prettier --ignore-path .eslintignore --write \"**/*.{js,jsx,json,ts,tsx}\"",
"release": "sh ./bin/wordpress-deploy.sh",
"rimraf": "./node_modules/rimraf/bin.js",
"start": "rimraf build/* && cross-env BABEL_ENV=default CHECK_CIRCULAR_DEPS=true webpack --watch",
"storybook": "storybook dev -c ./storybook -p 6006 --ci",
@ -107,8 +103,6 @@
"watch:build:project:bundle": "wireit"
},
"devDependencies": {
"@actions/core": "1.10.0",
"@actions/github": "5.1.1",
"@automattic/color-studio": "2.5.0",
"@babel/cli": "7.23.0",
"@babel/core": "7.23.2",
@ -119,8 +113,6 @@
"@babel/preset-react": "7.23.3",
"@babel/preset-typescript": "7.23.2",
"@bartekbp/typescript-checkstyle": "5.0.0",
"@octokit/action": "5.0.2",
"@octokit/graphql": "5.0.5",
"@playwright/test": "^1.45.1",
"@storybook/addon-a11y": "7.5.2",
"@storybook/addon-actions": "^7.6.4",
@ -161,7 +153,6 @@
"@types/wordpress__wordcount": "^2.4.5",
"@typescript-eslint/eslint-plugin": "5.56.0",
"@typescript-eslint/parser": "5.56.0",
"@woocommerce/api": "0.2.0",
"@woocommerce/data": "workspace:*",
"@woocommerce/e2e-utils": "workspace:*",
"@woocommerce/eslint-plugin": "workspace:*",
@ -195,14 +186,12 @@
"@wordpress/stylelint-config": "^21.36.0",
"allure-playwright": "^2.9.2",
"autoprefixer": "10.4.14",
"axios": "0.27.2",
"babel-jest": "^29.7.0",
"babel-plugin-explicit-exports-references": "^1.0.2",
"babel-plugin-react-docgen": "4.2.1",
"babel-plugin-transform-react-remove-prop-types": "0.4.24",
"chalk": "4.1.2",
"circular-dependency-plugin": "5.2.2",
"commander": "11.0.0",
"copy-webpack-plugin": "11.0.0",
"core-js": "3.25.0",
"create-file-webpack": "1.0.2",
@ -219,9 +208,6 @@
"eslint-plugin-storybook": "^0.6.15",
"eslint-plugin-woocommerce": "file:bin/eslint-plugin-woocommerce",
"eslint-plugin-you-dont-need-lodash-underscore": "6.12.0",
"expect-puppeteer": "6.1.1",
"fast-xml-parser": "4.2.5",
"fs-extra": "11.1.1",
"gh-pages": "4.0.0",
"github-label-sync": "^2.3.1",
"glob": "7.2.3",
@ -237,15 +223,12 @@
"lint-staged": "13.2.0",
"lodash": "4.17.21",
"markdown-it": "13.0.1",
"merge-config": "2.0.0",
"mini-css-extract-plugin": "2.7.6",
"patch-package": "6.4.7",
"postcss": "8.4.32",
"postcss-color-function": "4.1.0",
"postcss-loader": "4.3.0",
"prettier": "npm:wp-prettier@^2.8.5",
"progress-bar-webpack-plugin": "2.1.0",
"promptly": "3.2.0",
"puppeteer": "17.1.3",
"react-docgen": "5.4.3",
"react-docgen-typescript-plugin": "^1.0.5",
@ -265,8 +248,7 @@
"webpack-bundle-analyzer": "4.7.0",
"webpack-cli": "5.1.4",
"wireit": "0.14.3",
"wp-types": "3.63.0",
"zenhub-api": "0.2.0"
"wp-types": "3.63.0"
},
"engines": {
"node": "^20.11.1",

View File

@ -1,44 +0,0 @@
diff --git a/node_modules/wordpress-components/build-module/combobox-control/index.js b/node_modules/wordpress-components/build-module/combobox-control/index.js
index ddef775..2d0b3ab 100644
--- a/node_modules/wordpress-components/build-module/combobox-control/index.js
+++ b/node_modules/wordpress-components/build-module/combobox-control/index.js
@@ -55,6 +55,7 @@ function ComboboxControl({
const instanceId = useInstanceId(ComboboxControl);
const [selectedSuggestion, setSelectedSuggestion] = useState(null);
const [isExpanded, setIsExpanded] = useState(false);
+ const [inputHasFocus, setInputHasFocus] = useState( false );
const [inputValue, setInputValue] = useState('');
const inputContainer = useRef();
const currentOption = options.find(option => option.value === value);
@@ -135,7 +136,12 @@ function ComboboxControl({
}
};
+ const onBlur = () => {
+ setInputHasFocus( false );
+ };
+
const onFocus = () => {
+ setInputHasFocus( true );
setIsExpanded(true);
onFilterValueChange('');
setInputValue('');
@@ -149,7 +155,9 @@ function ComboboxControl({
const text = event.value;
setInputValue(text);
onFilterValueChange(text);
- setIsExpanded(true);
+ if ( inputHasFocus ) {
+ setIsExpanded( true );
+ }
};
const handleOnReset = () => {
@@ -193,6 +201,7 @@ function ComboboxControl({
value: isExpanded ? inputValue : currentLabel,
"aria-label": currentLabel ? `${currentLabel}, ${label}` : null,
onFocus: onFocus,
+ onBlur: onBlur,
isExpanded: isExpanded,
selectedSuggestionIndex: matchingSuggestions.indexOf(selectedSuggestion),
onChange: onInputChange

View File

@ -0,0 +1,4 @@
Significance: patch
Type: dev
Clean up unused files in plugins/woocommerce-blocks

File diff suppressed because it is too large Load Diff