From bc164ab28e68d103539abaf6c8f710b63f04e01f Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Fri, 6 Mar 2020 15:09:34 +0100 Subject: [PATCH 01/19] support yaml config (fix #23) --- .github/lighthouse/lighthouserc-static-dist-dir.json | 7 ------- .github/lighthouse/lighthouserc-static-dist-dir.yml | 3 +++ .github/workflows/LHCI-static-dist-dir.yml | 2 +- CONTRIBUTING.md | 2 +- src/input.js | 12 +++++------- src/types.d.ts | 3 +++ 6 files changed, 13 insertions(+), 16 deletions(-) delete mode 100644 .github/lighthouse/lighthouserc-static-dist-dir.json create mode 100644 .github/lighthouse/lighthouserc-static-dist-dir.yml create mode 100644 src/types.d.ts diff --git a/.github/lighthouse/lighthouserc-static-dist-dir.json b/.github/lighthouse/lighthouserc-static-dist-dir.json deleted file mode 100644 index 632cbdbf0..000000000 --- a/.github/lighthouse/lighthouserc-static-dist-dir.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "ci": { - "collect": { - "staticDistDir": "./script/static-website" - } - } -} diff --git a/.github/lighthouse/lighthouserc-static-dist-dir.yml b/.github/lighthouse/lighthouserc-static-dist-dir.yml new file mode 100644 index 000000000..e251009f8 --- /dev/null +++ b/.github/lighthouse/lighthouserc-static-dist-dir.yml @@ -0,0 +1,3 @@ +ci: + collect: + staticDistDir: './script/static-website' diff --git a/.github/workflows/LHCI-static-dist-dir.yml b/.github/workflows/LHCI-static-dist-dir.yml index 850313faa..0b75b430c 100644 --- a/.github/workflows/LHCI-static-dist-dir.yml +++ b/.github/workflows/LHCI-static-dist-dir.yml @@ -10,4 +10,4 @@ jobs: uses: ./ with: # no urls needed, since it uses local folder to scan .html files - configPath: '.github/lighthouse/lighthouserc-static-dist-dir.json' + configPath: '.github/lighthouse/lighthouserc-static-dist-dir.yml' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d48bc7b34..60a74bc85 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -41,7 +41,7 @@ python script/simple-server.py # start basic server in a separate tab PAGE="src/" INPUT_URLS="http://localhost:3000/\$PAGE" INPUT_RUNS="1" node src/index.js # run with a static dist dir -INPUT_CONFIGPATH=".github/lighthouse/lighthouserc-static-dist-dir.json" INPUT_RUNS="1" node src/index.js +INPUT_CONFIGPATH=".github/lighthouse/lighthouserc-static-dist-dir.yml" INPUT_RUNS="1" node src/index.js # run with Slack integration # some of env variables mocked from GitHub ENV - https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-environment-variables diff --git a/src/input.js b/src/input.js index 1a340e9bf..eddd94d4a 100644 --- a/src/input.js +++ b/src/input.js @@ -1,5 +1,5 @@ const core = require('@actions/core') -const { readFileSync } = require('fs') +const { loadRcFile } = require('@lhci/utils/src/lighthouserc') function getArgs() { // Make sure we don't have LHCI xor API token @@ -19,9 +19,8 @@ function getArgs() { // Inspect lighthouserc file for malformations const configPath = getArg('configPath') if (configPath) { - const contents = readFileSync(configPath, 'utf8') - const rcFileObj = JSON.parse(contents) - if (!('ci' in rcFileObj)) { + const rcFileObj = loadRcFile(configPath) + if (!rcFileObj.ci) { // Fail and exit core.setFailed(`Config missing top level 'ci' property`) process.exit(1) @@ -30,10 +29,9 @@ function getArgs() { rcAssert = 'assert' in rcFileObj.ci // Check if we have a static-dist-dir - if (rcCollect) { - + if (rcFileObj.ci.collect) { if ('url' in rcFileObj.ci.collect) { - urls = rcFileObj.ci.collect.url + urls = rcFileObj.ci.collect.url } if ('staticDistDir' in rcFileObj.ci.collect) { diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 000000000..37d6b3fc3 --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,3 @@ +declare module '@lhci/utils/src/lighthouserc' { + export function loadRcFile(path: string): { ci?: { collect?: { url?: string; staticDistDir?: string } } } +} From 2980b9e8436ffc0faa23d4eca6bbbed1d27a3d96 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Fri, 6 Mar 2020 15:09:52 +0100 Subject: [PATCH 02/19] remove log level --- .../LHCI-assert-on-budget-github-notification.yml | 1 - .../workflows/LHCI-assert-on-budget-notification.yml | 1 - .../LHCI-assert-on-budget-slack-notification.yml | 1 - README.md | 10 ---------- action.yml | 3 --- src/output.js | 3 +-- 6 files changed, 1 insertion(+), 18 deletions(-) diff --git a/.github/workflows/LHCI-assert-on-budget-github-notification.yml b/.github/workflows/LHCI-assert-on-budget-github-notification.yml index d5bb6c36d..29295dbd3 100644 --- a/.github/workflows/LHCI-assert-on-budget-github-notification.yml +++ b/.github/workflows/LHCI-assert-on-budget-github-notification.yml @@ -13,4 +13,3 @@ jobs: budgetPath: '.github/lighthouse/budget.json' applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} - logLevel: 'info' \ No newline at end of file diff --git a/.github/workflows/LHCI-assert-on-budget-notification.yml b/.github/workflows/LHCI-assert-on-budget-notification.yml index b3f526288..d8453a935 100644 --- a/.github/workflows/LHCI-assert-on-budget-notification.yml +++ b/.github/workflows/LHCI-assert-on-budget-notification.yml @@ -14,4 +14,3 @@ jobs: slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} - logLevel: 'error' \ No newline at end of file diff --git a/.github/workflows/LHCI-assert-on-budget-slack-notification.yml b/.github/workflows/LHCI-assert-on-budget-slack-notification.yml index d245c4062..166e0b374 100644 --- a/.github/workflows/LHCI-assert-on-budget-slack-notification.yml +++ b/.github/workflows/LHCI-assert-on-budget-slack-notification.yml @@ -13,4 +13,3 @@ jobs: budgetPath: '.github/lighthouse/budget.json' slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} githubToken: ${{ secrets.GITHUB_TOKEN }} - logLevel: 'error' \ No newline at end of file diff --git a/README.md b/README.md index 5d05f3d6d..d026bdb7d 100644 --- a/README.md +++ b/README.md @@ -164,15 +164,6 @@ slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} [Read more](#recipes) about detailed configuration. -### logLevel (default: 'info') - -Notifications (Github/Slack) log level. By default all notifications will be send. -Use `error` value to send notifications only for failed CI checks. - -```yml -logLevel: 'error' -``` - ## Recipes
@@ -253,7 +244,6 @@ jobs: slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} - logLevel: 'error' ``` Make a `budget.json` file with [budgets syntax](https://web.dev/use-lighthouse-for-performance-budgets/). diff --git a/action.yml b/action.yml index f1be00338..043873973 100644 --- a/action.yml +++ b/action.yml @@ -14,9 +14,6 @@ inputs: descripton: 'Opt-in to saving LHRs to Lighthouse public-temporary-storage' slackWebhookUrl: description: 'Slack webhook url to post run results to slack' - logLevel: - description: 'Level of logging info to print to console' - default: 'info' applicationGithubToken: description: 'GitHub Application access token for ' personalGithubToken: diff --git a/src/output.js b/src/output.js index 6a6cd0b5a..126aa7575 100644 --- a/src/output.js +++ b/src/output.js @@ -44,9 +44,8 @@ const lhAssertResultsPath = join(resultsDirPath, 'assertion-results.json') async function sendNotifications({ status }) { try { const { slackWebhookUrl, applicationGithubToken, personalGithubToken } = input - const shouldRunOutput = input.logLevel === 'info' || (input.logLevel === 'error' && status) - if (!shouldRunOutput) { + if (!status) { return Promise.resolve() } From abf7f57a067b63a188bd0b40448becc8ed10fec6 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Mon, 9 Mar 2020 15:16:43 +0100 Subject: [PATCH 03/19] use githubToken and gistUploadToken --- ...I-assert-on-budget-github-notification.yml | 4 +- .../LHCI-assert-on-budget-notification.yml | 4 +- CONTRIBUTING.md | 6 +- README.md | 56 ++++++++++--------- action.yml | 13 +++-- src/input.js | 4 +- src/output.js | 10 ++-- 7 files changed, 49 insertions(+), 48 deletions(-) diff --git a/.github/workflows/LHCI-assert-on-budget-github-notification.yml b/.github/workflows/LHCI-assert-on-budget-github-notification.yml index 29295dbd3..355007ee0 100644 --- a/.github/workflows/LHCI-assert-on-budget-github-notification.yml +++ b/.github/workflows/LHCI-assert-on-budget-github-notification.yml @@ -11,5 +11,5 @@ jobs: with: urls: 'https://alekseykulikov.com/' budgetPath: '.github/lighthouse/budget.json' - applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} - personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} + githubToken: ${{ secrets.GITHUB_TOKEN }} + gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} diff --git a/.github/workflows/LHCI-assert-on-budget-notification.yml b/.github/workflows/LHCI-assert-on-budget-notification.yml index d8453a935..8a98ba376 100644 --- a/.github/workflows/LHCI-assert-on-budget-notification.yml +++ b/.github/workflows/LHCI-assert-on-budget-notification.yml @@ -12,5 +12,5 @@ jobs: urls: 'https://alekseykulikov.com/' budgetPath: '.github/lighthouse/budget.json' slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} - applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} - personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} + githubToken: ${{ secrets.GITHUB_TOKEN }} + gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 60a74bc85..2a42e45f3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -45,8 +45,6 @@ INPUT_CONFIGPATH=".github/lighthouse/lighthouserc-static-dist-dir.yml" INPUT_RUN # run with Slack integration # some of env variables mocked from GitHub ENV - https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-environment-variables -INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_PERSONALGITHUBTOKEN="github-token" INPUT_APPLICATIONGITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js -INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_PERSONALGITHUBTOKEN="github-github" INPUT_APPLICATIONGITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js - -> INPUT_APPLICATIONGITHUBTOKEN requers setup a Github Application etc. +INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_GISTUPLOADTOKEN="github-token" INPUT_GITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js +INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_GISTUPLOADTOKEN="github-github" INPUT_GITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js ``` diff --git a/README.md b/README.md index d026bdb7d..5e667df81 100644 --- a/README.md +++ b/README.md @@ -82,18 +82,40 @@ urls: | #### `temporaryPublicStorage` (default: false) -All results are private by default. Use this option to upload reports to LHCI's `temporary-public-storage`. You can find out more about `temporary-public-storage` in the [LHCI repo](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/cli.md#upload). +Upload reports to the [_temporary public storage_](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#collect-lighthouse-results). + +> **Note**: As the name implies, this is temporary and public storage. If you're uncomfortable with the idea of your Lighthouse reports being stored +> on a public URL on Google Cloud, use a private [LHCI server](#upload) or [Gist](). Reports are automatically deleted 7 days after upload. ```yml temporaryPublicStorage: true ``` +#### `githubToken` + +Token to allow runs Github check suite. +By default for Action environment it's allowed via `${{ secrets.GITHUB_TOKEN }}` without any additional setup. + +```yml +githubToken: ${{ secrets.GITHUB_TOKEN }} +``` + +### slackWebhookUrl + +Allows to send notification in [Slack](https://slack.com/intl/en-ua/) channel. +Visit Slack Incoming Webhooks [docs](https://api.slack.com/messaging/webhooks#create_a_webhook) and follow step provided there. +Then copy `webhookUrl` value and set it up via [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your url hidden! + +```yml +slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} +``` + #### `runs` (default: 1) Specify the number of runs to do on each URL. > **Note**: Asserting against a single run can lead to flaky performance assertions. -> Use `1` only to ensure static audits like Lighthouse scores or page size. +> Use `1` only to ensure static audits like Lighthouse scores, page size, or performance budgets. ```yml runs: 3 @@ -131,39 +153,19 @@ upload.serverBaseUrl: ${{ secrets.LHCI_SERVER }} upload.token: ${{ secrets.LHCI_TOKEN }} ``` -Specify an API token for the LHCI server. [Learn how to generate a token](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#historical-reports--diffing-lighthouse-ci-server). - -#### `applicationGithubToken` - -Token to allow runs Github check suite. By default for Action environment it's allowed via `${{ secrets.GITHUB_TOKEN }}` without any additional setup. - -```yml -applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} -``` +#### `gistUploadToken` -#### `personalGithubToken` +Specify an API token for the LHCI server. [Learn how to generate a token](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#historical-reports--diffing-lighthouse-ci-server). Personal Github token to allow Action upload results to your secret [gist](https://help.github.com/en/enterprise/2.13/user/articles/about-gists) and provide report link directly in notification. Action will upload results to your gist, get gist id and compose url report using [Lighthouse Report Viewer](https://googlechrome.github.io/lighthouse/viewer/). ```yml -personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} +gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} ``` > **Note**: Use [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your token hidden! -### slackWebhookUrl - -Allows to send notification in [Slack](https://slack.com/intl/en-ua/) channel. -Visit Slack Incoming Webhooks [docs](https://api.slack.com/messaging/webhooks#create_a_webhook) and follow step provided there. -Then copy `webhookUrl` value and set it up via [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your url hidden! - -```yml -slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} -``` - -[Read more](#recipes) about detailed configuration. - ## Recipes
@@ -242,8 +244,8 @@ jobs: urls: 'https://alekseykulikov.com/' budgetPath: '.github/lighthouse/budget.json' slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} - applicationGithubToken: ${{ secrets.GITHUB_TOKEN }} - personalGithubToken: ${{ secrets.PERSONAL_GITHUB_TOKEN }} + githubToken: ${{ secrets.GITHUB_TOKEN }} + gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} ``` Make a `budget.json` file with [budgets syntax](https://web.dev/use-lighthouse-for-performance-budgets/). diff --git a/action.yml b/action.yml index 043873973..8ba8bde81 100644 --- a/action.yml +++ b/action.yml @@ -11,13 +11,14 @@ inputs: configPath: description: 'Path to a LHCI lighthouserc.json file' temporaryPublicStorage: - descripton: 'Opt-in to saving LHRs to Lighthouse public-temporary-storage' + descripton: 'Opt-in to saving Lighthouse results to temporary public storage' + default: false slackWebhookUrl: - description: 'Slack webhook url to post run results to slack' - applicationGithubToken: - description: 'GitHub Application access token for ' - personalGithubToken: - description: 'GitHub access token' + description: 'Slack webhook url to send failed results to Slack' + githubToken: + description: 'Shared github token to create a debug check' + gistUploadToken: + description: 'GitHub access token to upload results to Gist' upload.serverBaseUrl: description: 'Address of a LHCI server' upload.token: diff --git a/src/input.js b/src/input.js index eddd94d4a..1e5249c63 100644 --- a/src/input.js +++ b/src/input.js @@ -67,8 +67,8 @@ function getArgs() { slackWebhookUrl: getArg('slackWebhookUrl'), logLevel: logLevel ? logLevel : 'info', numberOfRuns: getIntArg('runs'), - applicationGithubToken: getArg('applicationGithubToken'), - personalGithubToken: getArg('personalGithubToken'), + githubToken: getArg('githubToken'), + gistUploadToken: getArg('gistUploadToken'), serverBaseUrl, token, rcCollect, diff --git a/src/output.js b/src/output.js index 126aa7575..1b347caed 100644 --- a/src/output.js +++ b/src/output.js @@ -43,27 +43,27 @@ const lhAssertResultsPath = join(resultsDirPath, 'assertion-results.json') */ async function sendNotifications({ status }) { try { - const { slackWebhookUrl, applicationGithubToken, personalGithubToken } = input + const { slackWebhookUrl, githubToken, gistUploadToken } = input if (!status) { return Promise.resolve() } const slackEnabled = slackWebhookUrl - const githubEnabled = applicationGithubToken + const githubEnabled = githubToken /** * @type {[ LHResultsByURL, ChangesURL, Gist[] ]} */ const [groupedResults, changesURL, gists] = await Promise.all([ getGroupedAssertionResultsByURL(), - getChangesUrl({ githubToken: personalGithubToken }), + getChangesUrl({ githubToken: gistUploadToken }), // keep uploading as part of Promise all instead of separate request - uploadResultsToGist({ githubToken: personalGithubToken }) + uploadResultsToGist({ githubToken: gistUploadToken }) ]) const slackData = { status, slackWebhookUrl, changesURL, gists, groupedResults } - const githubData = { status, githubToken: applicationGithubToken, changesURL, gists, groupedResults } + const githubData = { status, githubToken: githubToken, changesURL, gists, groupedResults } if (githubEnabled) { try { From 6e672d05f6dd21065c9c392c70063e096bca4775 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Mon, 9 Mar 2020 16:38:27 +0100 Subject: [PATCH 04/19] refactoring: - use @actions/exec instead of node's fs - make input.js a function + map names to action arguments - move LH plugins support to a separate file - use @actions/core for all logging - scope status & args to each command - start to separate upload & notifying --- README.md | 2 +- node_modules/@actions/exec/lib/exec.js | 37 ++ node_modules/@actions/exec/lib/interfaces.js | 3 + node_modules/@actions/exec/lib/toolrunner.js | 587 +++++++++++++++++++ node_modules/@actions/exec/package.json | 40 ++ node_modules/@actions/io/lib/io-util.js | 195 ++++++ node_modules/@actions/io/lib/io.js | 290 +++++++++ node_modules/@actions/io/package.json | 37 ++ package.json | 1 + src/index.js | 118 ++-- src/input.js | 27 +- src/support-lh-plugins.js | 13 + src/types.d.ts | 2 +- yarn.lock | 12 + 14 files changed, 1275 insertions(+), 89 deletions(-) create mode 100644 node_modules/@actions/exec/lib/exec.js create mode 100644 node_modules/@actions/exec/lib/interfaces.js create mode 100644 node_modules/@actions/exec/lib/toolrunner.js create mode 100644 node_modules/@actions/exec/package.json create mode 100644 node_modules/@actions/io/lib/io-util.js create mode 100644 node_modules/@actions/io/lib/io.js create mode 100644 node_modules/@actions/io/package.json create mode 100644 src/support-lh-plugins.js diff --git a/README.md b/README.md index 5e667df81..9c836a3b1 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ URLs support interpolation of process env vars so that you can write URLs like: ## Inputs -#### `urls` (required) +#### `urls` Provide the list of URLs separated by a new line. Each URL is audited using the latest version of Lighthouse and Chrome preinstalled on the environment. diff --git a/node_modules/@actions/exec/lib/exec.js b/node_modules/@actions/exec/lib/exec.js new file mode 100644 index 000000000..2748debcc --- /dev/null +++ b/node_modules/@actions/exec/lib/exec.js @@ -0,0 +1,37 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const tr = require("./toolrunner"); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +//# sourceMappingURL=exec.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/interfaces.js b/node_modules/@actions/exec/lib/interfaces.js new file mode 100644 index 000000000..db9191150 --- /dev/null +++ b/node_modules/@actions/exec/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/toolrunner.js b/node_modules/@actions/exec/lib/toolrunner.js new file mode 100644 index 000000000..cbb433d11 --- /dev/null +++ b/node_modules/@actions/exec/lib/toolrunner.js @@ -0,0 +1,587 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = require("os"); +const events = require("events"); +const child = require("child_process"); +const path = require("path"); +const io = require("@actions/io"); +const ioUtil = require("@actions/io/lib/io-util"); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + }); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/package.json b/node_modules/@actions/exec/package.json new file mode 100644 index 000000000..1b8078fda --- /dev/null +++ b/node_modules/@actions/exec/package.json @@ -0,0 +1,40 @@ +{ + "name": "@actions/exec", + "version": "1.0.3", + "description": "Actions exec lib", + "keywords": [ + "github", + "actions", + "exec" + ], + "homepage": "https://github.com/actions/toolkit/tree/master/packages/exec", + "license": "MIT", + "main": "lib/exec.js", + "types": "lib/exec.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/exec" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --audit-level=moderate", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/io": "^1.0.1" + } +} diff --git a/node_modules/@actions/io/lib/io-util.js b/node_modules/@actions/io/lib/io-util.js new file mode 100644 index 000000000..17b3bba58 --- /dev/null +++ b/node_modules/@actions/io/lib/io-util.js @@ -0,0 +1,195 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = require("assert"); +const fs = require("fs"); +const path = require("path"); +_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.IS_WINDOWS = process.platform === 'win32'; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Recursively create a directory at `fsPath`. + * + * This implementation is optimistic, meaning it attempts to create the full + * path first, and backs up the path stack from there. + * + * @param fsPath The path to create + * @param maxDepth The maximum recursion depth + * @param depth The current recursion depth + */ +function mkdirP(fsPath, maxDepth = 1000, depth = 1) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + fsPath = path.resolve(fsPath); + if (depth >= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); +} +exports.mkdirP = mkdirP; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/lib/io.js b/node_modules/@actions/io/lib/io.js new file mode 100644 index 000000000..ad5bdb926 --- /dev/null +++ b/node_modules/@actions/io/lib/io.js @@ -0,0 +1,290 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const childProcess = require("child_process"); +const path = require("path"); +const util_1 = require("util"); +const ioUtil = require("./io-util"); +const exec = util_1.promisify(childProcess.exec); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`rd /s /q "${inputPath}"`); + } + else { + yield exec(`del /f /a "${inputPath}"`); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + } + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield exec(`rm -rf "${inputPath}"`); + } + else { + yield ioUtil.unlink(inputPath); + } + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + yield ioUtil.mkdirP(fsPath); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + } + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; + } + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; + } + } + return ''; + } + catch (err) { + throw new Error(`which failed with message ${err.message}`); + } + }); +} +exports.which = which; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + return { force, recursive }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/package.json b/node_modules/@actions/io/package.json new file mode 100644 index 000000000..0fd128ef5 --- /dev/null +++ b/node_modules/@actions/io/package.json @@ -0,0 +1,37 @@ +{ + "name": "@actions/io", + "version": "1.0.2", + "description": "Actions io lib", + "keywords": [ + "github", + "actions", + "io" + ], + "homepage": "https://github.com/actions/toolkit/tree/master/packages/io", + "license": "MIT", + "main": "lib/io.js", + "types": "lib/io.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/io" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --audit-level=moderate", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + } +} diff --git a/package.json b/package.json index 8e1028c36..ec8f96ac3 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ }, "dependencies": { "@actions/core": "^1.2.3", + "@actions/exec": "^1.0.3", "@actions/github": "^2.1.1", "@lhci/cli": "0.3.9", "@slack/webhook": "^5.0.2", diff --git a/src/index.js b/src/index.js index 38176d69d..608b9312a 100644 --- a/src/index.js +++ b/src/index.js @@ -1,126 +1,102 @@ -// Append the node_modules of the github workspace and the node_modules of this action -// to NODE_PATH. This supports lighthouse plugins - all the workspace needs to do is -// `npm install` the plugin. The copy of lighthouse within this action will be used. - -const nodePathDelim = require('is-windows')() ? ';' : ':' -const nodePathParts = [ - ...(process.env.NODE_PATH || '').split(nodePathDelim), - `${__dirname}/../node_modules`, - `${process.env.GITHUB_WORKSPACE}/node_modules` -] -process.env.NODE_PATH = nodePathParts.join(nodePathDelim) - +require('./support-lh-plugins') // add automatic support for LH Plugins env const core = require('@actions/core') -const childProcess = require('child_process') +const { join } = require('path') +const { exec } = require('@actions/exec') const lhciCliPath = require.resolve('@lhci/cli/src/cli.js') -const input = require('./input.js') -const output = require('./output.js') +const { getInputArgs } = require('./input.js') // audit urls with Lighthouse CI + async function main() { + core.setOutput('resultsPath', join(process.cwd(), '.lighthouserc')) + let assertStatus = 0 core.startGroup('Action config') - console.log('Input args:', input) + const input = getInputArgs() + core.info(`Input args: ${JSON.stringify(input, null, ' ')}`) core.endGroup() // Action config /*******************************COLLECTING***********************************/ core.startGroup(`Collecting`) - let args = [] + const collectArgs = ['collect'] if (input.staticDistDir) { - args.push(`--static-dist-dir=${input.staticDistDir}`) + collectArgs.push(`--static-dist-dir=${input.staticDistDir}`) } else if (input.urls) { for (const url of input.urls) { - args.push(`--url=${url}`) + collectArgs.push(`--url=${url}`) } } // else LHCI will panic with a non-zero exit code... if (input.rcCollect) { - args.push(`--config=${input.configPath}`) + collectArgs.push(`--config=${input.configPath}`) // This should only happen in local testing, when the default is not sent } + // Command line args should override config files - if (input.numberOfRuns) { - args.push(`--numberOfRuns=${input.numberOfRuns}`) + if (input.runs) { + collectArgs.push(`--numberOfRuns=${input.runs}`) } // else, no args and will default to 3 in LHCI. - let status = await runChildCommand('collect', args) - if (status !== 0) { - throw new Error(`LHCI 'collect' has encountered a problem.`) - } + const collectStatus = await exec(lhciCliPath, collectArgs) + if (collectStatus !== 0) throw new Error(`LHCI 'collect' has encountered a problem.`) core.endGroup() // Collecting /*******************************ASSERTING************************************/ if (input.budgetPath || input.rcAssert) { core.startGroup(`Asserting`) - args = [] + const assertArgs = ['assert'] if (input.budgetPath) { - args.push(`--budgetsFile=${input.budgetPath}`) + assertArgs.push(`--budgetsFile=${input.budgetPath}`) } else { - // @ts-ignore checked this already - args.push(`--config=${input.configPath}`) + assertArgs.push(`--config=${input.configPath}`) } - status = await runChildCommand('assert', args) - - if (status !== 0) { + assertStatus = await exec(lhciCliPath, assertArgs) + if (assertStatus !== 0) { // TODO(exterkamp): Output what urls failed and record a nice rich error. core.setFailed(`Assertions have failed.`) - // continue } core.endGroup() // Asserting } /*******************************UPLOADING************************************/ - await output.sendNotifications({ status }) - - if ((input.serverBaseUrl && input.token) || input.canUpload) { + if (input.uploadServerBaseUrl || input.temporaryPublicStorage || input.gistUploadToken) { core.startGroup(`Uploading`) - args = [] - if (input.serverBaseUrl) { - args.push('--target=lhci', `--serverBaseUrl=${input.serverBaseUrl}`, `--token=${input.token}`) - } else { - args.push('--target=temporary-public-storage') + if (input.uploadServerBaseUrl) { + const uploadStatus = await exec(lhciCliPath, [ + 'upload', + '--target=lhci', + `--serverBaseUrl=${input.uploadServerBaseUrl}`, + `--token=${input.uploadToken}` + ]) + if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to LHCI server.`) } - status = await runChildCommand('upload', args) + if (input.gistUploadToken) { + const uploadStatus = await exec(lhciCliPath, ['upload', '--target=temporary-public-storage']) + if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to temporary public storage.`) + } - if (status !== 0) { - throw new Error(`LHCI 'upload' has encountered a problem.`) + if (input.gistUploadToken) { + // TODO(alekseykulikov): upload to gists } + core.endGroup() // Uploading } - // set results path - core.setOutput('resultsPath', '.lighthouserc') + + /*******************************NOTIFYING************************************/ + if ((input.githubToken || input.slackWebhookUrl) && assertStatus > 0) { + // TODO(alekseykulikov): handle notifications + } } // run `main()` + main() - .catch( - /** @param {Error} err */ err => { - core.setFailed(err.message) - } - ) - .then(() => { - console.log(`done in ${process.uptime()}s`) - }) - -/** - * Run a child command synchronously. - * - * @param {'collect'|'assert'|'upload'} command - * @param {string[]} [args] - * @return {number} - */ -function runChildCommand(command, args = []) { - const combinedArgs = [lhciCliPath, command, ...args] - const { status = -1 } = childProcess.spawnSync(process.argv[0], combinedArgs, { - stdio: 'inherit' - }) - - return status || 0 -} + .catch(err => core.setFailed(err.message)) + .then(() => core.debug(`done in ${process.uptime()}s`)) diff --git a/src/input.js b/src/input.js index 1e5249c63..9b0a4e711 100644 --- a/src/input.js +++ b/src/input.js @@ -1,11 +1,11 @@ const core = require('@actions/core') -const { loadRcFile } = require('@lhci/utils/src/lighthouserc') +const { loadRcFile } = require('@lhci/utils/src/lighthouserc.js') -function getArgs() { +exports.getInputArgs = function getInputArgs() { // Make sure we don't have LHCI xor API token - const serverBaseUrl = getArg('upload.serverBaseUrl') - const token = getArg('upload.token') - if (!!serverBaseUrl != !!token) { + const uploadServerBaseUrl = getArg('upload.serverBaseUrl') + const uploadToken = getArg('upload.token') + if (!!uploadServerBaseUrl != !!uploadToken) { // Fail and exit core.setFailed(`Need both a LHCI server url and an API token`) process.exit(1) @@ -57,23 +57,20 @@ function getArgs() { ) } - const logLevel = getArg('logLevel') - return { urls, staticDistDir, - canUpload: getArg('temporaryPublicStorage') ? true : false, + temporaryPublicStorage: getArg('temporaryPublicStorage') === 'true' ? true : false, budgetPath: getArg('budgetPath'), + configPath, slackWebhookUrl: getArg('slackWebhookUrl'), - logLevel: logLevel ? logLevel : 'info', - numberOfRuns: getIntArg('runs'), + runs: getIntArg('runs'), githubToken: getArg('githubToken'), gistUploadToken: getArg('gistUploadToken'), - serverBaseUrl, - token, + uploadServerBaseUrl, + uploadToken, rcCollect, - rcAssert, - configPath + rcAssert } } @@ -127,5 +124,3 @@ function interpolateProcessIntoURLs(urls) { return url }) } - -module.exports = getArgs() diff --git a/src/support-lh-plugins.js b/src/support-lh-plugins.js new file mode 100644 index 000000000..532eb42d6 --- /dev/null +++ b/src/support-lh-plugins.js @@ -0,0 +1,13 @@ +// https://github.com/treosh/lighthouse-ci-action/pull/19 +// +// Append the node_modules of the github workspace and the node_modules of this action +// to NODE_PATH. This supports lighthouse plugins - all the workspace needs to do is +// `npm install` the plugin. The copy of lighthouse within this action will be used. + +const nodePathDelim = require('is-windows')() ? ';' : ':' +const nodePathParts = [ + ...(process.env.NODE_PATH || '').split(nodePathDelim), + `${__dirname}/../node_modules`, + `${process.env.GITHUB_WORKSPACE}/node_modules` +] +process.env.NODE_PATH = nodePathParts.join(nodePathDelim) diff --git a/src/types.d.ts b/src/types.d.ts index 37d6b3fc3..888a5d94a 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,3 +1,3 @@ -declare module '@lhci/utils/src/lighthouserc' { +declare module '@lhci/utils/src/lighthouserc.js' { export function loadRcFile(path: string): { ci?: { collect?: { url?: string; staticDistDir?: string } } } } diff --git a/yarn.lock b/yarn.lock index bb4e43860..30c56e3a7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7,6 +7,13 @@ resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95" integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w== +"@actions/exec@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.0.3.tgz#b967f8700d6ff011dcc91243b58bafc1bb9ab95f" + integrity sha512-TogJGnueOmM7ntCi0ASTUj4LapRRtDfj57Ja4IhPmg2fls28uVOPbAn8N+JifaOumN2UG3oEO/Ixek2A4NcYSA== + dependencies: + "@actions/io" "^1.0.1" + "@actions/github@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@actions/github/-/github-2.1.1.tgz#bcabedff598196d953f58ba750d5e75549a75142" @@ -23,6 +30,11 @@ dependencies: tunnel "0.0.6" +"@actions/io@^1.0.1": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.0.2.tgz#2f614b6e69ce14d191180451eb38e6576a6e6b27" + integrity sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg== + "@lhci/cli@0.3.9": version "0.3.9" resolved "https://registry.yarnpkg.com/@lhci/cli/-/cli-0.3.9.tgz#5c7054b1633ae16ab6f7fb3700e77d0d941ac2e6" From fe232bdc840b97b20b3995d182c500a12cfe24ae Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Mon, 9 Mar 2020 18:11:53 +0100 Subject: [PATCH 05/19] upload artifacts automatically --- .github/workflows/LHCI-advanced-config.yml | 18 -- .github/workflows/LHCI-upload-artifact.yml | 5 - README.md | 22 +- .../__mocks__/internal-config-variables.js | 35 +++ .../@actions/artifact/lib/artifact-client.js | 11 + .../artifact/lib/internal-artifact-client.js | 149 +++++++++++ .../artifact/lib/internal-config-variables.js | 56 ++++ .../artifact/lib/internal-contracts.js | 3 + .../lib/internal-download-http-client.js | 130 ++++++++++ .../artifact/lib/internal-download-options.js | 3 + .../lib/internal-download-response.js | 3 + .../lib/internal-download-specification.js | 53 ++++ .../lib/internal-upload-http-client.js | 243 ++++++++++++++++++ .../artifact/lib/internal-upload-options.js | 3 + .../artifact/lib/internal-upload-response.js | 3 + .../lib/internal-upload-specification.js | 85 ++++++ .../@actions/artifact/lib/internal-utils.js | 121 +++++++++ node_modules/@actions/artifact/package.json | 42 +++ package.json | 1 + src/{input.js => config.js} | 74 +++--- src/index.js | 87 ++++--- src/types.d.ts | 3 - src/{ => utils}/support-lh-plugins.js | 0 src/utils/upload-artifacts.js | 12 + tsconfig.json | 2 +- types/action.d.ts | 3 + yarn.lock | 12 +- 27 files changed, 1056 insertions(+), 123 deletions(-) delete mode 100644 .github/workflows/LHCI-advanced-config.yml create mode 100644 node_modules/@actions/artifact/lib/__mocks__/internal-config-variables.js create mode 100644 node_modules/@actions/artifact/lib/artifact-client.js create mode 100644 node_modules/@actions/artifact/lib/internal-artifact-client.js create mode 100644 node_modules/@actions/artifact/lib/internal-config-variables.js create mode 100644 node_modules/@actions/artifact/lib/internal-contracts.js create mode 100644 node_modules/@actions/artifact/lib/internal-download-http-client.js create mode 100644 node_modules/@actions/artifact/lib/internal-download-options.js create mode 100644 node_modules/@actions/artifact/lib/internal-download-response.js create mode 100644 node_modules/@actions/artifact/lib/internal-download-specification.js create mode 100644 node_modules/@actions/artifact/lib/internal-upload-http-client.js create mode 100644 node_modules/@actions/artifact/lib/internal-upload-options.js create mode 100644 node_modules/@actions/artifact/lib/internal-upload-response.js create mode 100644 node_modules/@actions/artifact/lib/internal-upload-specification.js create mode 100644 node_modules/@actions/artifact/lib/internal-utils.js create mode 100644 node_modules/@actions/artifact/package.json rename src/{input.js => config.js} (57%) delete mode 100644 src/types.d.ts rename src/{ => utils}/support-lh-plugins.js (100%) create mode 100644 src/utils/upload-artifacts.js create mode 100644 types/action.d.ts diff --git a/.github/workflows/LHCI-advanced-config.yml b/.github/workflows/LHCI-advanced-config.yml deleted file mode 100644 index dda538f6e..000000000 --- a/.github/workflows/LHCI-advanced-config.yml +++ /dev/null @@ -1,18 +0,0 @@ -# This is an advanced fully hermetic run of Lighthouse, using custom config. -name: LHCI-advanced-config -on: push -jobs: - hermetic-advanced: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Run Lighthouse on urls with lighthouserc - uses: ./ - with: - urls: 'https://example.com/' - configPath: '.github/lighthouse/lighthouserc-custom-config.json' - - name: Save .lighthouseci artifacts - uses: actions/upload-artifact@v1 - with: - name: advanced-hermetic-artifacts - path: '.lighthouseci' diff --git a/.github/workflows/LHCI-upload-artifact.yml b/.github/workflows/LHCI-upload-artifact.yml index 7efd202d0..a3328d6a6 100644 --- a/.github/workflows/LHCI-upload-artifact.yml +++ b/.github/workflows/LHCI-upload-artifact.yml @@ -12,8 +12,3 @@ jobs: urls: | https://treo.sh/ https://treo.sh/demo - - name: Save results - uses: actions/upload-artifact@v1 - with: - name: lighthouse-results - path: '.lighthouseci' diff --git a/README.md b/README.md index 9c836a3b1..a6888907e 100644 --- a/README.md +++ b/README.md @@ -17,10 +17,10 @@ ## Examples -**Basic example**: run Lighthouse on each push to the repo and save results as action artifacts. +Run Lighthouse on each push to the repo and attach results to the action. Create `.github/workflows/main.yml` with the list of URLs to audit using Lighthouse. -The results will be stored as a build artifact: +Provide `githubToken` and `temporaryPublicStorage` to automatically attach results to the action for quick debuging. ```yml name: Lighthouse @@ -34,13 +34,10 @@ jobs: uses: treosh/lighthouse-ci-action@v2 with: urls: | - https://treo.sh/ - https://treo.sh/demo - - name: Save results - uses: actions/upload-artifact@v1 - with: - name: lighthouse-results - path: '.lighthouseci' # This will save the Lighthouse results as .json files + https://example.com/ + https://example.com/blog + temporaryPublicStorage: true # (optional) save Lighthouse results for a quick preview + githubToken: ${{ secrets.GITHUB_TOKEN }} # set action status with details about runs ``` [⚙️ See this workflow in use](https://github.com/treosh/lighthouse-ci-action/actions?workflow=LHCI-upload-artifact) @@ -153,6 +150,13 @@ upload.serverBaseUrl: ${{ secrets.LHCI_SERVER }} upload.token: ${{ secrets.LHCI_TOKEN }} ``` +By default, the action automatically upload Lighthouse results as an artifact. +Set `upload.artifacts: false` to disable this behavior, for example, in case of using an LHCI Server. + +```yml +upload.artifacts: false +``` + #### `gistUploadToken` Specify an API token for the LHCI server. [Learn how to generate a token](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#historical-reports--diffing-lighthouse-ci-server). diff --git a/node_modules/@actions/artifact/lib/__mocks__/internal-config-variables.js b/node_modules/@actions/artifact/lib/__mocks__/internal-config-variables.js new file mode 100644 index 000000000..47aa0fbba --- /dev/null +++ b/node_modules/@actions/artifact/lib/__mocks__/internal-config-variables.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Mocks default limits for easier testing + */ +function getUploadFileConcurrency() { + return 1; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +function getUploadChunkConcurrency() { + return 1; +} +exports.getUploadChunkConcurrency = getUploadChunkConcurrency; +function getUploadChunkSize() { + return 4 * 1024 * 1024; // 4 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +/** + * Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables + * that are only available from a node context on the runner. This allows for tests to run + * locally without the env variables actually being set + */ +function getRuntimeToken() { + return 'totally-valid-token'; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + return 'https://www.example.com/'; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + return '15'; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +//# sourceMappingURL=internal-config-variables.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/artifact-client.js b/node_modules/@actions/artifact/lib/artifact-client.js new file mode 100644 index 000000000..26be8151a --- /dev/null +++ b/node_modules/@actions/artifact/lib/artifact-client.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const internal_artifact_client_1 = require("./internal-artifact-client"); +/** + * Constructs an ArtifactClient + */ +function create() { + return internal_artifact_client_1.DefaultArtifactClient.create(); +} +exports.create = create; +//# sourceMappingURL=artifact-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-artifact-client.js b/node_modules/@actions/artifact/lib/internal-artifact-client.js new file mode 100644 index 000000000..4278045e7 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-artifact-client.js @@ -0,0 +1,149 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const internal_upload_specification_1 = require("./internal-upload-specification"); +const internal_upload_http_client_1 = require("./internal-upload-http-client"); +const internal_utils_1 = require("./internal-utils"); +const internal_download_http_client_1 = require("./internal-download-http-client"); +const internal_download_specification_1 = require("./internal-download-specification"); +const internal_config_variables_1 = require("./internal-config-variables"); +const path_1 = require("path"); +class DefaultArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create() { + return new DefaultArtifactClient(); + } + /** + * Uploads an artifact + */ + uploadArtifact(name, files, rootDirectory, options) { + return __awaiter(this, void 0, void 0, function* () { + internal_utils_1.checkArtifactName(name); + // Get specification for the files being uploaded + const uploadSpecification = internal_upload_specification_1.getUploadSpecification(name, rootDirectory, files); + const uploadResponse = { + artifactName: name, + artifactItems: [], + size: 0, + failedItems: [] + }; + if (uploadSpecification.length === 0) { + core.warning(`No files found that can be uploaded`); + } + else { + // Create an entry for the artifact in the file container + const response = yield internal_upload_http_client_1.createArtifactInFileContainer(name); + if (!response.fileContainerResourceUrl) { + core.debug(response.toString()); + throw new Error('No URL provided by the Artifact Service to upload an artifact to'); + } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + // Upload each of the files that were found concurrently + const uploadResult = yield internal_upload_http_client_1.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); + //Update the size of the artifact to indicate we are done uploading + yield internal_upload_http_client_1.patchArtifactSize(uploadResult.size, name); + core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`); + uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); + uploadResponse.size = uploadResult.size; + uploadResponse.failedItems = uploadResult.failedItems; + } + return uploadResponse; + }); + } + downloadArtifact(name, path, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const artifacts = yield internal_download_http_client_1.listArtifacts(); + if (artifacts.count === 0) { + throw new Error(`Unable to find any artifacts for the associated workflow`); + } + const artifactToDownload = artifacts.value.find(artifact => { + return artifact.name === name; + }); + if (!artifactToDownload) { + throw new Error(`Unable to find an artifact with the name: ${name}`); + } + const items = yield internal_download_http_client_1.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); + if (!path) { + path = internal_config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories + const downloadSpecification = internal_download_specification_1.getDownloadSpecification(name, items.value, path, ((_a = options) === null || _a === void 0 ? void 0 : _a.createArtifactFolder) || false); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + } + else { + // Create all necessary directories recursively before starting any download + yield internal_utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield internal_download_http_client_1.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + return { + artifactName: name, + downloadPath: downloadSpecification.rootDownloadLocation + }; + }); + } + downloadAllArtifacts(path) { + return __awaiter(this, void 0, void 0, function* () { + const response = []; + const artifacts = yield internal_download_http_client_1.listArtifacts(); + if (artifacts.count === 0) { + core.info('Unable to find any artifacts for the associated workflow'); + return response; + } + if (!path) { + path = internal_config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + const ARTIFACT_CONCURRENCY = internal_config_variables_1.getDownloadArtifactConcurrency(); + const parallelDownloads = [...new Array(ARTIFACT_CONCURRENCY).keys()]; + let downloadedArtifacts = 0; + yield Promise.all(parallelDownloads.map(() => __awaiter(this, void 0, void 0, function* () { + while (downloadedArtifacts < artifacts.count) { + const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; + downloadedArtifacts += 1; + // Get container entries for the specific artifact + const items = yield internal_download_http_client_1.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); + // Promise.All is not correctly inferring that 'path' is no longer possibly undefined: https://github.com/microsoft/TypeScript/issues/34925 + const downloadSpecification = internal_download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, // eslint-disable-line @typescript-eslint/no-non-null-assertion + true); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + } + else { + yield internal_utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield internal_download_http_client_1.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + response.push({ + artifactName: currentArtifactToDownload.name, + downloadPath: downloadSpecification.rootDownloadLocation + }); + } + }))); + return response; + }); + } +} +exports.DefaultArtifactClient = DefaultArtifactClient; +//# sourceMappingURL=internal-artifact-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-config-variables.js b/node_modules/@actions/artifact/lib/internal-config-variables.js new file mode 100644 index 000000000..f545662db --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-config-variables.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function getUploadFileConcurrency() { + return 2; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +function getUploadChunkConcurrency() { + return 1; +} +exports.getUploadChunkConcurrency = getUploadChunkConcurrency; +function getUploadChunkSize() { + return 4 * 1024 * 1024; // 4 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +function getDownloadFileConcurrency() { + return 2; +} +exports.getDownloadFileConcurrency = getDownloadFileConcurrency; +function getDownloadArtifactConcurrency() { + // when downloading all artifact at once, this is number of concurrent artifacts being downloaded + return 1; +} +exports.getDownloadArtifactConcurrency = getDownloadArtifactConcurrency; +function getRuntimeToken() { + const token = process.env['ACTIONS_RUNTIME_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); + } + return token; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); + } + return runtimeUrl; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + const workFlowRunId = process.env['GITHUB_RUN_ID']; + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable'); + } + return workFlowRunId; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +function getWorkSpaceDirectory() { + const workspaceDirectory = process.env['GITHUB_WORKSPACE']; + if (!workspaceDirectory) { + throw new Error('Unable to get GITHUB_WORKSPACE env variable'); + } + return workspaceDirectory; +} +exports.getWorkSpaceDirectory = getWorkSpaceDirectory; +//# sourceMappingURL=internal-config-variables.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-contracts.js b/node_modules/@actions/artifact/lib/internal-contracts.js new file mode 100644 index 000000000..aef62b08d --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-contracts.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-contracts.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-download-http-client.js b/node_modules/@actions/artifact/lib/internal-download-http-client.js new file mode 100644 index 000000000..785036821 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-download-http-client.js @@ -0,0 +1,130 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(require("fs")); +const internal_utils_1 = require("./internal-utils"); +const url_1 = require("url"); +const internal_config_variables_1 = require("./internal-config-variables"); +const core_1 = require("@actions/core"); +/** + * Gets a list of all artifacts that are in a specific container + */ +function listArtifacts() { + return __awaiter(this, void 0, void 0, function* () { + const artifactUrl = internal_utils_1.getArtifactUrl(); + const client = internal_utils_1.createHttpClient(); + const requestOptions = internal_utils_1.getRequestOptions('application/json'); + const rawResponse = yield client.get(artifactUrl, requestOptions); + const body = yield rawResponse.readBody(); + if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) { + return JSON.parse(body); + } + // eslint-disable-next-line no-console + console.log(rawResponse); + throw new Error(`Unable to list artifacts for the run`); + }); +} +exports.listArtifacts = listArtifacts; +/** + * Fetches a set of container items that describe the contents of an artifact + * @param artifactName the name of the artifact + * @param containerUrl the artifact container URL for the run + */ +function getContainerItems(artifactName, containerUrl) { + return __awaiter(this, void 0, void 0, function* () { + // The itemPath search parameter controls which containers will be returned + const resourceUrl = new url_1.URL(containerUrl); + resourceUrl.searchParams.append('itemPath', artifactName); + const client = internal_utils_1.createHttpClient(); + const rawResponse = yield client.get(resourceUrl.toString()); + const body = yield rawResponse.readBody(); + if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) { + return JSON.parse(body); + } + // eslint-disable-next-line no-console + console.log(rawResponse); + throw new Error(`Unable to get ContainersItems from ${resourceUrl}`); + }); +} +exports.getContainerItems = getContainerItems; +/** + * Concurrently downloads all the files that are part of an artifact + * @param downloadItems information about what items to download and where to save them + */ +function downloadSingleArtifact(downloadItems) { + return __awaiter(this, void 0, void 0, function* () { + const DOWNLOAD_CONCURRENCY = internal_config_variables_1.getDownloadFileConcurrency(); + // Limit the number of files downloaded at a single time + const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; + const client = internal_utils_1.createHttpClient(); + let downloadedFiles = 0; + yield Promise.all(parallelDownloads.map(() => __awaiter(this, void 0, void 0, function* () { + while (downloadedFiles < downloadItems.length) { + const currentFileToDownload = downloadItems[downloadedFiles]; + downloadedFiles += 1; + yield downloadIndividualFile(client, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); + } + }))); + }); +} +exports.downloadSingleArtifact = downloadSingleArtifact; +/** + * Downloads an individual file + * @param client http client that will be used to make the necessary calls + * @param artifactLocation origin location where a file will be downloaded from + * @param downloadPath destination location for the file being downloaded + */ +function downloadIndividualFile(client, artifactLocation, downloadPath) { + return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createWriteStream(downloadPath); + const response = yield client.get(artifactLocation); + if (internal_utils_1.isSuccessStatusCode(response.message.statusCode)) { + yield pipeResponseToStream(response, stream); + } + else if (internal_utils_1.isRetryableStatusCode(response.message.statusCode)) { + core_1.warning(`Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds`); + yield new Promise(resolve => setTimeout(resolve, 10000)); + const retryResponse = yield client.get(artifactLocation); + if (internal_utils_1.isSuccessStatusCode(retryResponse.message.statusCode)) { + yield pipeResponseToStream(response, stream); + } + else { + // eslint-disable-next-line no-console + console.log(retryResponse); + throw new Error(`Unable to download ${artifactLocation}`); + } + } + else { + // eslint-disable-next-line no-console + console.log(response); + throw new Error(`Unable to download ${artifactLocation}`); + } + }); +} +exports.downloadIndividualFile = downloadIndividualFile; +function pipeResponseToStream(response, stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => { + response.message.pipe(stream).on('close', () => { + resolve(); + }); + }); + }); +} +exports.pipeResponseToStream = pipeResponseToStream; +//# sourceMappingURL=internal-download-http-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-download-options.js b/node_modules/@actions/artifact/lib/internal-download-options.js new file mode 100644 index 000000000..397241b02 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-download-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-download-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-download-response.js b/node_modules/@actions/artifact/lib/internal-download-response.js new file mode 100644 index 000000000..0d388cb7f --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-download-response.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-download-response.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-download-specification.js b/node_modules/@actions/artifact/lib/internal-download-specification.js new file mode 100644 index 000000000..d213a07bd --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-download-specification.js @@ -0,0 +1,53 @@ +"use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = __importStar(require("path")); +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { + const directories = new Set(); + const specifications = { + rootDownloadLocation: includeRootDirectory + ? path.join(downloadPath, artifactName) + : downloadPath, + directoryStructure: [], + filesToDownload: [] + }; + for (const entry of artifactEntries) { + // Ignore artifacts in the container that don't begin with the same name + if (entry.path.startsWith(`${artifactName}/`) || + entry.path.startsWith(`${artifactName}\\`)) { + // normalize all separators to the local OS + const normalizedPathEntry = path.normalize(entry.path); + // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path + const filePath = path.join(downloadPath, includeRootDirectory + ? normalizedPathEntry + : normalizedPathEntry.replace(artifactName, '')); + // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' + // itemType cannot be relied upon. The file must be used to determine the directory structure + if (entry.itemType === 'file') { + // Get the directories that we need to create from the filePath for each individual file + directories.add(path.dirname(filePath)); + specifications.filesToDownload.push({ + sourceLocation: entry.contentLocation, + targetPath: filePath + }); + } + } + } + specifications.directoryStructure = Array.from(directories); + return specifications; +} +exports.getDownloadSpecification = getDownloadSpecification; +//# sourceMappingURL=internal-download-specification.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-upload-http-client.js b/node_modules/@actions/artifact/lib/internal-upload-http-client.js new file mode 100644 index 000000000..36aaa9641 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-upload-http-client.js @@ -0,0 +1,243 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core_1 = require("@actions/core"); +const fs = __importStar(require("fs")); +const url_1 = require("url"); +const internal_utils_1 = require("./internal-utils"); +const internal_config_variables_1 = require("./internal-config-variables"); +/** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ +function createArtifactInFileContainer(artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const parameters = { + Type: 'actions_storage', + Name: artifactName + }; + const data = JSON.stringify(parameters, null, 2); + const artifactUrl = internal_utils_1.getArtifactUrl(); + const client = internal_utils_1.createHttpClient(); + const requestOptions = internal_utils_1.getRequestOptions('application/json'); + const rawResponse = yield client.post(artifactUrl, data, requestOptions); + const body = yield rawResponse.readBody(); + if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) { + return JSON.parse(body); + } + else { + // eslint-disable-next-line no-console + console.log(rawResponse); + throw new Error(`Unable to create a container for the artifact ${artifactName}`); + } + }); +} +exports.createArtifactInFileContainer = createArtifactInFileContainer; +/** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ +function uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { + return __awaiter(this, void 0, void 0, function* () { + const client = internal_utils_1.createHttpClient(); + const FILE_CONCURRENCY = internal_config_variables_1.getUploadFileConcurrency(); + const CHUNK_CONCURRENCY = internal_config_variables_1.getUploadChunkConcurrency(); + const MAX_CHUNK_SIZE = internal_config_variables_1.getUploadChunkSize(); + core_1.debug(`File Concurrency: ${FILE_CONCURRENCY}, Chunk Concurrency: ${CHUNK_CONCURRENCY} and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parameters = []; + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true; + if (options) { + if (options.continueOnError === false) { + continueOnError = false; + } + } + // Prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new url_1.URL(uploadUrl); + resourceUrl.searchParams.append('itemPath', file.uploadFilePath); + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + restClient: client, + concurrency: CHUNK_CONCURRENCY, + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }); + } + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; + const failedItemsToReport = []; + let uploadedFiles = 0; + let fileSizes = 0; + let abortPendingFileUploads = false; + // Only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (uploadedFiles < filesToUpload.length) { + const currentFileParameters = parameters[uploadedFiles]; + uploadedFiles += 1; + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file); + continue; + } + const uploadFileResult = yield uploadFileAsync(currentFileParameters); + fileSizes += uploadFileResult.successfulUploadSize; + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file); + if (!continueOnError) { + // Existing uploads will be able to finish however all pending uploads will fail fast + abortPendingFileUploads = true; + } + } + } + }))); + core_1.info(`Total size of all the files uploaded is ${fileSizes} bytes`); + return { + size: fileSizes, + failedItems: failedItemsToReport + }; + }); +} +exports.uploadArtifactToFileContainer = uploadArtifactToFileContainer; +/** + * Asynchronously uploads a file. If the file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ +function uploadFileAsync(parameters) { + return __awaiter(this, void 0, void 0, function* () { + const fileSize = fs.statSync(parameters.file).size; + const parallelUploads = [...new Array(parameters.concurrency).keys()]; + let offset = 0; + let isUploadSuccessful = true; + let failedChunkSizes = 0; + let abortFileUpload = false; + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, parameters.maxChunkSize); + if (abortFileUpload) { + // if we don't want to continue on error, any pending upload chunk will be marked as failed + failedChunkSizes += chunkSize; + continue; + } + const start = offset; + const end = offset + chunkSize - 1; + offset += parameters.maxChunkSize; + const chunk = fs.createReadStream(parameters.file, { + start, + end, + autoClose: false + }); + const result = yield uploadChunk(parameters.restClient, parameters.resourceUrl, chunk, start, end, fileSize); + if (!result) { + /** + * Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + * successfully uploaded so the server may report a different size for what was uploaded + **/ + isUploadSuccessful = false; + failedChunkSizes += chunkSize; + core_1.warning(`Aborting upload for ${parameters.file} due to failure`); + abortFileUpload = true; + } + } + }))); + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: fileSize - failedChunkSizes + }; + }); +} +/** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {HttpClient} restClient RestClient that will be making the appropriate HTTP call + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} totalSize Total size of the file in bytes that is being uploaded + * @returns if the chunk was successfully uploaded + */ +function uploadChunk(restClient, resourceUrl, data, start, end, totalSize) { + return __awaiter(this, void 0, void 0, function* () { + core_1.info(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${internal_utils_1.getContentRange(start, end, totalSize)}`); + const requestOptions = internal_utils_1.getRequestOptions('application/octet-stream', totalSize, internal_utils_1.getContentRange(start, end, totalSize)); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + return yield restClient.sendStream('PUT', resourceUrl, data, requestOptions); + }); + const response = yield uploadChunkRequest(); + if (internal_utils_1.isSuccessStatusCode(response.message.statusCode)) { + core_1.debug(`Chunk for ${start}:${end} was successfully uploaded to ${resourceUrl}`); + return true; + } + else if (internal_utils_1.isRetryableStatusCode(response.message.statusCode)) { + core_1.info(`Received http ${response.message.statusCode} during chunk upload, will retry at offset ${start} after 10 seconds.`); + yield new Promise(resolve => setTimeout(resolve, 10000)); + const retryResponse = yield uploadChunkRequest(); + if (internal_utils_1.isSuccessStatusCode(retryResponse.message.statusCode)) { + return true; + } + else { + core_1.info(`Unable to upload chunk even after retrying`); + // eslint-disable-next-line no-console + console.log(response); + return false; + } + } + // Upload must have failed spectacularly somehow, log full result for diagnostic purposes + // eslint-disable-next-line no-console + console.log(response); + return false; + }); +} +/** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact. A server side check will be run + * to check that the artifact size is correct for billing purposes + */ +function patchArtifactSize(size, artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const client = internal_utils_1.createHttpClient(); + const requestOptions = internal_utils_1.getRequestOptions('application/json'); + const resourceUrl = new url_1.URL(internal_utils_1.getArtifactUrl()); + resourceUrl.searchParams.append('artifactName', artifactName); + const parameters = { Size: size }; + const data = JSON.stringify(parameters, null, 2); + core_1.debug(`URL is ${resourceUrl.toString()}`); + const rawResponse = yield client.patch(resourceUrl.toString(), data, requestOptions); + const body = yield rawResponse.readBody(); + if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode)) { + core_1.debug(`Artifact ${artifactName} has been successfully uploaded, total size ${size}`); + core_1.debug(body); + } + else if (rawResponse.message.statusCode === 404) { + throw new Error(`An Artifact with the name ${artifactName} was not found`); + } + else { + // eslint-disable-next-line no-console + console.log(body); + throw new Error(`Unable to finish uploading artifact ${artifactName}`); + } + }); +} +exports.patchArtifactSize = patchArtifactSize; +//# sourceMappingURL=internal-upload-http-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-upload-options.js b/node_modules/@actions/artifact/lib/internal-upload-options.js new file mode 100644 index 000000000..471352963 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-upload-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-upload-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-upload-response.js b/node_modules/@actions/artifact/lib/internal-upload-response.js new file mode 100644 index 000000000..4675d9b3e --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-upload-response.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-upload-response.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-upload-specification.js b/node_modules/@actions/artifact/lib/internal-upload-specification.js new file mode 100644 index 000000000..8f4cd09ff --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-upload-specification.js @@ -0,0 +1,85 @@ +"use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(require("fs")); +const core_1 = require("@actions/core"); +const path_1 = require("path"); +const internal_utils_1 = require("./internal-utils"); +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { + internal_utils_1.checkArtifactName(artifactName); + const specifications = []; + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = path_1.normalize(rootDirectory); + rootDirectory = path_1.resolve(rootDirectory); + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`); + } + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = path_1.normalize(file); + file = path_1.resolve(file); + if (!file.startsWith(rootDirectory)) { + throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); + } + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: path_1.join(artifactName, file.replace(rootDirectory, '')) + }); + } + else { + // Directories are rejected by the server during upload + core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + } + } + return specifications; +} +exports.getUploadSpecification = getUploadSpecification; +//# sourceMappingURL=internal-upload-specification.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal-utils.js b/node_modules/@actions/artifact/lib/internal-utils.js new file mode 100644 index 000000000..b9811aca6 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal-utils.js @@ -0,0 +1,121 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core_1 = require("@actions/core"); +const fs_1 = require("fs"); +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/auth"); +const internal_config_variables_1 = require("./internal-config-variables"); +/** + * Parses a env variable that is a number + */ +function parseEnvNumber(key) { + const value = Number(process.env[key]); + if (Number.isNaN(value) || value < 0) { + return undefined; + } + return value; +} +exports.parseEnvNumber = parseEnvNumber; +/** + * Various utility functions to help with the necessary API calls + */ +function getApiVersion() { + return '6.0-preview'; +} +exports.getApiVersion = getApiVersion; +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function getContentRange(start, end, total) { + // Format: `bytes start-end/fileSize + // start and end are inclusive + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/200 + return `bytes ${start}-${end}/${total}`; +} +exports.getContentRange = getContentRange; +function getRequestOptions(contentType, contentLength, contentRange) { + const requestOptions = { + Accept: `application/json;api-version=${getApiVersion()}` + }; + if (contentType) { + requestOptions['Content-Type'] = contentType; + } + if (contentLength) { + requestOptions['Content-Length'] = contentLength; + } + if (contentRange) { + requestOptions['Content-Range'] = contentRange; + } + return requestOptions; +} +exports.getRequestOptions = getRequestOptions; +function createHttpClient() { + return new http_client_1.HttpClient('action/artifact', [ + new auth_1.BearerCredentialHandler(internal_config_variables_1.getRuntimeToken()) + ]); +} +exports.createHttpClient = createHttpClient; +function getArtifactUrl() { + const artifactUrl = `${internal_config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${internal_config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; + core_1.debug(`Artifact Url: ${artifactUrl}`); + return artifactUrl; +} +exports.getArtifactUrl = getArtifactUrl; +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + */ +const invalidCharacters = ['\\', '/', '"', ':', '<', '>', '|', '*', '?', ' ']; +/** + * Scans the name of the item being uploaded to make sure there are no illegal characters + */ +function checkArtifactName(name) { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`); + } + for (const invalidChar of invalidCharacters) { + if (name.includes(invalidChar)) { + throw new Error(`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid characters include: ${invalidCharacters.toString()}.`); + } + } +} +exports.checkArtifactName = checkArtifactName; +function createDirectoriesForArtifact(directories) { + return __awaiter(this, void 0, void 0, function* () { + for (const directory of directories) { + yield fs_1.promises.mkdir(directory, { + recursive: true + }); + } + }); +} +exports.createDirectoriesForArtifact = createDirectoriesForArtifact; +//# sourceMappingURL=internal-utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/package.json b/node_modules/@actions/artifact/package.json new file mode 100644 index 000000000..fab71fd1d --- /dev/null +++ b/node_modules/@actions/artifact/package.json @@ -0,0 +1,42 @@ +{ + "name": "@actions/artifact", + "version": "0.1.0", + "preview": true, + "description": "Actions artifact lib", + "keywords": [ + "github", + "actions", + "artifact" + ], + "homepage": "https://github.com/actions/toolkit/tree/master/packages/artifact", + "license": "MIT", + "main": "lib/artifact-client.js", + "types": "lib/artifact-client.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/artifact" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --audit-level=moderate", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.1", + "@actions/http-client": "^1.0.6" + } +} diff --git a/package.json b/package.json index ec8f96ac3..edf09969e 100644 --- a/package.json +++ b/package.json @@ -11,6 +11,7 @@ "test": "prettier -c src/** package.json README.md && tsc -p ." }, "dependencies": { + "@actions/artifact": "^0.1.0", "@actions/core": "^1.2.3", "@actions/exec": "^1.0.3", "@actions/github": "^2.1.1", diff --git a/src/input.js b/src/config.js similarity index 57% rename from src/input.js rename to src/config.js index 9b0a4e711..09cdaf8be 100644 --- a/src/input.js +++ b/src/config.js @@ -1,23 +1,23 @@ const core = require('@actions/core') -const { loadRcFile } = require('@lhci/utils/src/lighthouserc.js') +const { loadRcFile } = require('@lhci/utils/src/lighthouserc') +const { get } = require('lodash') + +exports.getInput = function getInputArgs() { + const uploadServerBaseUrl = core.getInput('upload.serverBaseUrl') + const uploadToken = core.getInput('upload.token') -exports.getInputArgs = function getInputArgs() { // Make sure we don't have LHCI xor API token - const uploadServerBaseUrl = getArg('upload.serverBaseUrl') - const uploadToken = getArg('upload.token') if (!!uploadServerBaseUrl != !!uploadToken) { // Fail and exit core.setFailed(`Need both a LHCI server url and an API token`) process.exit(1) } - let rcCollect = false - let rcAssert = false - let staticDistDir = undefined - let urls = undefined + let staticDistDir = null + let urls = null // Inspect lighthouserc file for malformations - const configPath = getArg('configPath') + const configPath = core.getInput('configPath') if (configPath) { const rcFileObj = loadRcFile(configPath) if (!rcFileObj.ci) { @@ -25,23 +25,21 @@ exports.getInputArgs = function getInputArgs() { core.setFailed(`Config missing top level 'ci' property`) process.exit(1) } - rcCollect = 'collect' in rcFileObj.ci - rcAssert = 'assert' in rcFileObj.ci // Check if we have a static-dist-dir if (rcFileObj.ci.collect) { - if ('url' in rcFileObj.ci.collect) { + if (rcFileObj.ci.collect.url) { urls = rcFileObj.ci.collect.url } - if ('staticDistDir' in rcFileObj.ci.collect) { + if (rcFileObj.ci.collect.staticDistDir) { staticDistDir = rcFileObj.ci.collect.staticDistDir } } } // Get and interpolate URLs - urls = urls || interpolateProcessIntoURLs(getList('urls')) + urls = urls || interpolateProcessIntoUrls(getList('urls')) // Make sure we have either urls or a static-dist-dir if (!urls && !staticDistDir) { @@ -58,50 +56,44 @@ exports.getInputArgs = function getInputArgs() { } return { + // collect urls, + runs: parseInt(core.getInput('runs'), 10) || 1, staticDistDir, - temporaryPublicStorage: getArg('temporaryPublicStorage') === 'true' ? true : false, - budgetPath: getArg('budgetPath'), - configPath, - slackWebhookUrl: getArg('slackWebhookUrl'), - runs: getIntArg('runs'), - githubToken: getArg('githubToken'), - gistUploadToken: getArg('gistUploadToken'), + // upload + gistUploadToken: core.getInput('gistUploadToken') || null, + temporaryPublicStorage: core.getInput('temporaryPublicStorage') === 'true' ? true : false, uploadServerBaseUrl, uploadToken, - rcCollect, - rcAssert + uploadArtifacts: core.getInput('upload.artifacts') !== 'false' ? true : false, + // assert + budgetPath: core.getInput('budgetPath') || null, + configPath, + slackWebhookUrl: core.getInput('slackWebhookUrl') || null, + githubToken: core.getInput('githubToken') || null } } /** - * Wrapper for core.getInput. + * Check if the file under `configPath` has `assert` params set. * - * @param {string} arg - * @return {string | undefined} + * @param {string} configPath */ -function getArg(arg) { - return core.getInput(arg) || undefined -} -/** - * Wrapper for core.getInput for a numeric input. - * - * @param {string} arg - * @return {number | undefined} - */ -function getIntArg(arg) { - return parseInt(core.getInput(arg)) || undefined +exports.hasAssertConfig = function hasAssertConfig(configPath) { + if (!configPath) return false + const rcFileObj = loadRcFile(configPath) + return Boolean(get(rcFileObj, 'ci.assert')) } /** * Wrapper for core.getInput for a list input. * * @param {string} arg - * @return {string[]} */ + function getList(arg, separator = '\n') { - const input = getArg(arg) + const input = core.getInput(arg) if (!input) return [] return input.split(separator).map(url => url.trim()) } @@ -111,9 +103,9 @@ function getList(arg, separator = '\n') { * any declared ENV vars into them * * @param {string[]} urls - * @return {string[]} */ -function interpolateProcessIntoURLs(urls) { + +function interpolateProcessIntoUrls(urls) { return urls.map(url => { if (!url.includes('$')) return url Object.keys(process.env).forEach(key => { diff --git a/src/index.js b/src/index.js index 608b9312a..4441bf499 100644 --- a/src/index.js +++ b/src/index.js @@ -1,23 +1,27 @@ -require('./support-lh-plugins') // add automatic support for LH Plugins env +require('./utils/support-lh-plugins') // add automatic support for LH Plugins env const core = require('@actions/core') const { join } = require('path') const { exec } = require('@actions/exec') -const lhciCliPath = require.resolve('@lhci/cli/src/cli.js') -const { getInputArgs } = require('./input.js') +const lhciCliPath = require.resolve('@lhci/cli/src/cli') +const { getInput, hasAssertConfig } = require('./config') +const { uploadArtifacts } = require('./utils/upload-artifacts') -// audit urls with Lighthouse CI +/** + * Audit urls with Lighthouse CI in 3 stages: + * 1. collect (using lhci collect or the custom PSI runner, store results as artifacts) + * 2. upload (upload results to LHCI Server, Temporary Public Storage, or Github Gist for more convinient preview) + * 3. assert (assert results and send notification if the build failed) + */ async function main() { - core.setOutput('resultsPath', join(process.cwd(), '.lighthouserc')) - let assertStatus = 0 core.startGroup('Action config') - const input = getInputArgs() + const input = getInput() core.info(`Input args: ${JSON.stringify(input, null, ' ')}`) core.endGroup() // Action config - /*******************************COLLECTING***********************************/ + /******************************* 1. COLLECT ***********************************/ core.startGroup(`Collecting`) - const collectArgs = ['collect'] + const collectArgs = ['collect', `--numberOfRuns=${input.runs}`] if (input.staticDistDir) { collectArgs.push(`--static-dist-dir=${input.staticDistDir}`) @@ -25,45 +29,21 @@ async function main() { for (const url of input.urls) { collectArgs.push(`--url=${url}`) } + } else { + // LHCI will panic with a non-zero exit code... } - // else LHCI will panic with a non-zero exit code... - - if (input.rcCollect) { - collectArgs.push(`--config=${input.configPath}`) - // This should only happen in local testing, when the default is not sent - } - - // Command line args should override config files - if (input.runs) { - collectArgs.push(`--numberOfRuns=${input.runs}`) - } - // else, no args and will default to 3 in LHCI. + if (input.configPath) collectArgs.push(`--config=${input.configPath}`) const collectStatus = await exec(lhciCliPath, collectArgs) if (collectStatus !== 0) throw new Error(`LHCI 'collect' has encountered a problem.`) - core.endGroup() // Collecting - /*******************************ASSERTING************************************/ - if (input.budgetPath || input.rcAssert) { - core.startGroup(`Asserting`) - const assertArgs = ['assert'] + const resultsPath = join(process.cwd(), '.lighthouserc') + core.setOutput('resultsPath', resultsPath) + if (input.uploadArtifacts) await uploadArtifacts(resultsPath) - if (input.budgetPath) { - assertArgs.push(`--budgetsFile=${input.budgetPath}`) - } else { - assertArgs.push(`--config=${input.configPath}`) - } - - assertStatus = await exec(lhciCliPath, assertArgs) - if (assertStatus !== 0) { - // TODO(exterkamp): Output what urls failed and record a nice rich error. - core.setFailed(`Assertions have failed.`) - } - - core.endGroup() // Asserting - } + core.endGroup() // Collecting - /*******************************UPLOADING************************************/ + /******************************* 2. UPLOAD ************************************/ if (input.uploadServerBaseUrl || input.temporaryPublicStorage || input.gistUploadToken) { core.startGroup(`Uploading`) @@ -89,9 +69,28 @@ async function main() { core.endGroup() // Uploading } - /*******************************NOTIFYING************************************/ - if ((input.githubToken || input.slackWebhookUrl) && assertStatus > 0) { - // TODO(alekseykulikov): handle notifications + /******************************* 3. ASSERT ************************************/ + if (input.budgetPath || hasAssertConfig(input.configPath)) { + core.startGroup(`Asserting`) + const assertArgs = ['assert'] + + if (input.budgetPath) { + assertArgs.push(`--budgetsFile=${input.budgetPath}`) + } else { + assertArgs.push(`--config=${input.configPath}`) + } + + const assertStatus = await exec(lhciCliPath, assertArgs) + if (assertStatus !== 0) { + // TODO(exterkamp): Output what urls failed and record a nice rich error. + core.setFailed(`Assertions have failed.`) + } + + if ((input.githubToken || input.slackWebhookUrl) && assertStatus !== 0) { + // TODO(alekseykulikov): handle notifications + } + + core.endGroup() // Asserting } } diff --git a/src/types.d.ts b/src/types.d.ts deleted file mode 100644 index 888a5d94a..000000000 --- a/src/types.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare module '@lhci/utils/src/lighthouserc.js' { - export function loadRcFile(path: string): { ci?: { collect?: { url?: string; staticDistDir?: string } } } -} diff --git a/src/support-lh-plugins.js b/src/utils/support-lh-plugins.js similarity index 100% rename from src/support-lh-plugins.js rename to src/utils/support-lh-plugins.js diff --git a/src/utils/upload-artifacts.js b/src/utils/upload-artifacts.js new file mode 100644 index 000000000..85553e8fb --- /dev/null +++ b/src/utils/upload-artifacts.js @@ -0,0 +1,12 @@ +const artifact = require('@actions/artifact') +const fs = require('fs') +const { promisify } = require('util') +const readdir = promisify(fs.readdir) + +/** @param {string} rootDirectory */ +exports.uploadArtifacts = async function uploadArtifacts(rootDirectory) { + const artifactClient = artifact.create() + const artifactName = 'lighthouse-results' + const files = await readdir(rootDirectory) + return artifactClient.uploadArtifact(artifactName, files, rootDirectory, { continueOnError: true }) +} diff --git a/tsconfig.json b/tsconfig.json index 93e981eb3..a7209a623 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -13,5 +13,5 @@ "noUnusedParameters": true, "esModuleInterop": true }, - "include": ["src"] + "include": ["src", "types"] } diff --git a/types/action.d.ts b/types/action.d.ts new file mode 100644 index 000000000..673a5ba17 --- /dev/null +++ b/types/action.d.ts @@ -0,0 +1,3 @@ +declare module '@lhci/utils/src/lighthouserc' { + export function loadRcFile(path: string): { ci?: { collect?: { url?: [string]; staticDistDir?: string } } } +} diff --git a/yarn.lock b/yarn.lock index 30c56e3a7..ae4d27a92 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,7 +2,15 @@ # yarn lockfile v1 -"@actions/core@^1.2.3": +"@actions/artifact@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@actions/artifact/-/artifact-0.1.0.tgz#ba13f47105b2729e5563c4963eb31a4e4c01cb87" + integrity sha512-UOGv9FXfNuzs7v9JzX++h2j2/0lqhKp2yso0LwbaNsUTPyemLr5v4BUZj5R7d292jQvpuhh/+Psgwepc3g/DDg== + dependencies: + "@actions/core" "^1.2.1" + "@actions/http-client" "^1.0.6" + +"@actions/core@^1.2.1", "@actions/core@^1.2.3": version "1.2.3" resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95" integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w== @@ -23,7 +31,7 @@ "@octokit/graphql" "^4.3.1" "@octokit/rest" "^16.43.1" -"@actions/http-client@^1.0.3": +"@actions/http-client@^1.0.3", "@actions/http-client@^1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-1.0.6.tgz#6f9267ca50e1d74d8581f4a894a943cd4c97b49a" integrity sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA== From 331c8a3e7f5dbd948a0e234eb8ba0d9ac9358b34 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Mon, 9 Mar 2020 18:24:50 +0100 Subject: [PATCH 06/19] don't use upload namespace --- .github/workflows/LHCI-upload-artifact.yml | 14 ------- .../LHCI-upload-to-private-server.yml | 5 ++- README.md | 38 +++++++++---------- action.yml | 9 +++-- src/config.js | 13 ++++--- src/index.js | 8 ++-- 6 files changed, 37 insertions(+), 50 deletions(-) delete mode 100644 .github/workflows/LHCI-upload-artifact.yml diff --git a/.github/workflows/LHCI-upload-artifact.yml b/.github/workflows/LHCI-upload-artifact.yml deleted file mode 100644 index a3328d6a6..000000000 --- a/.github/workflows/LHCI-upload-artifact.yml +++ /dev/null @@ -1,14 +0,0 @@ -# This is a basic fully hermetic run of Lighthouse, that saves results as run artifacts. -name: LHCI-upload-artifact -on: push -jobs: - hermetic-basic: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Run Lighthouse on urls - uses: ./ - with: - urls: | - https://treo.sh/ - https://treo.sh/demo diff --git a/.github/workflows/LHCI-upload-to-private-server.yml b/.github/workflows/LHCI-upload-to-private-server.yml index 24f6cb310..51b4e0eae 100644 --- a/.github/workflows/LHCI-upload-to-private-server.yml +++ b/.github/workflows/LHCI-upload-to-private-server.yml @@ -11,5 +11,6 @@ jobs: with: urls: | https://treo.sh/ - upload.serverBaseUrl: ${{ secrets.LHCI_SERVER }} - upload.token: ${{ secrets.LHCI_TOKEN }} + serverBaseUrl: ${{ secrets.LHCI_SERVER_BASE_URL }} + serverToken: ${{ secrets.LHCI_SERVER_TOKEN }} + uploadArtifacts: false diff --git a/README.md b/README.md index a6888907e..01c50547f 100644 --- a/README.md +++ b/README.md @@ -40,8 +40,6 @@ jobs: githubToken: ${{ secrets.GITHUB_TOKEN }} # set action status with details about runs ``` -[⚙️ See this workflow in use](https://github.com/treosh/lighthouse-ci-action/actions?workflow=LHCI-upload-artifact) - **Advanced example**: run Lighthouse audit for each unique deployment, test performance budgets, and save results to the [public storage](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/cli.md#upload) for a quick debugging. URLs support interpolation of process env vars so that you can write URLs like: @@ -82,7 +80,7 @@ urls: | Upload reports to the [_temporary public storage_](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#collect-lighthouse-results). > **Note**: As the name implies, this is temporary and public storage. If you're uncomfortable with the idea of your Lighthouse reports being stored -> on a public URL on Google Cloud, use a private [LHCI server](#upload) or [Gist](). Reports are automatically deleted 7 days after upload. +> on a public URL on Google Cloud, use a private [LHCI server](#serverBaseUrl) or [Gist](#gistUploadToken). Reports are automatically deleted 7 days after upload. ```yml temporaryPublicStorage: true @@ -138,23 +136,25 @@ Use `lighthouserc` to configure the collection of data (via Lighthouse config an configPath: ./lighthouserc.json ``` -#### `upload` +#### `uploadArtifacts` (default: true) -Upload Lighthouse results to a private [LHCI server](https://github.com/GoogleChrome/lighthouse-ci) by specifying both `upload.serverBaseUrl` and `upload.token`. -This will replace uploading to `temporary-public-storage`. - -> **Note**: Use [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your server address hidden! +By default, action automatically uploads Lighthouse results as an artifact. +Use `uploadArtifacts: false` to disable this behavior, for example, in case of using an LHCI Server. ```yml -upload.serverBaseUrl: ${{ secrets.LHCI_SERVER }} -upload.token: ${{ secrets.LHCI_TOKEN }} +uploadArtifacts: false ``` -By default, the action automatically upload Lighthouse results as an artifact. -Set `upload.artifacts: false` to disable this behavior, for example, in case of using an LHCI Server. +#### `serverBaseUrl` + +Upload Lighthouse results to a private [LHCI server](https://github.com/GoogleChrome/lighthouse-ci) by specifying both `serverBaseUrl` and `serverToken`. +This will replace uploading to `temporary-public-storage`. + +> **Note**: Use [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your server address hidden! ```yml -upload.artifacts: false +serverBaseUrl: ${{ secrets.LHCI_SERVER_BASE_URL }} +serverToken: ${{ secrets.LHCI_SERVER_TOKEN }} ``` #### `gistUploadToken` @@ -345,8 +345,9 @@ jobs: uses: treosh/lighthouse-ci-action@v2 with: urls: 'https://example.com/' - upload.serverBaseUrl: ${{ secrets.LHCI_SERVER }} - upload.token: ${{ secrets.LHCI_API_TOKEN }} + serverBaseUrl: ${{ secrets.LHCI_SERVER_BASE_URL }} + serverToken: ${{ secrets.LHCI_SERVER_TOKEN }} + uploadArtifacts: false # don't store artifacts as a part of action ``` Lighthouse CI Action @@ -478,7 +479,7 @@ against each of them. More details on this process are in the [Lighthouse CI doc
- Use a Lighthouse plugin.
+ Use with a Lighthouse plugin.
#### main.yml @@ -497,11 +498,6 @@ jobs: urls: | https://www.example.com/ configPath: ./lighthouserc.json - - name: Save results - uses: actions/upload-artifact@v1 - with: - name: lighthouse-results - path: '.lighthouseci' # This will save the Lighthouse results as .json files ``` #### lighthouserc.json diff --git a/action.yml b/action.yml index 8ba8bde81..dc34ff323 100644 --- a/action.yml +++ b/action.yml @@ -5,7 +5,7 @@ inputs: description: 'List of URL(s) to analyze' runs: description: 'Number of runs to do per URL' - default: '1' + default: 1 budgetPath: description: 'Path to a Lighthouse budgets.json file' configPath: @@ -19,9 +19,12 @@ inputs: description: 'Shared github token to create a debug check' gistUploadToken: description: 'GitHub access token to upload results to Gist' - upload.serverBaseUrl: + serverBaseUrl: description: 'Address of a LHCI server' - upload.token: + uploadArtifacts: + description: 'Opt-out of saving Lighthouse results as an action artifacts' + default: true + serverToken: description: 'API token to push to LHCI server' outputs: resultsPath: diff --git a/src/config.js b/src/config.js index 09cdaf8be..5d74c7928 100644 --- a/src/config.js +++ b/src/config.js @@ -3,11 +3,12 @@ const { loadRcFile } = require('@lhci/utils/src/lighthouserc') const { get } = require('lodash') exports.getInput = function getInputArgs() { - const uploadServerBaseUrl = core.getInput('upload.serverBaseUrl') - const uploadToken = core.getInput('upload.token') + // fallback to upload.serverBaseUrl + upload.token for previous API support + const serverBaseUrl = core.getInput('serverBaseUrl') || core.getInput('upload.serverBaseUrl') + const serverToken = core.getInput('serverToken') || core.getInput('upload.token') // Make sure we don't have LHCI xor API token - if (!!uploadServerBaseUrl != !!uploadToken) { + if (!!serverBaseUrl != !!serverToken) { // Fail and exit core.setFailed(`Need both a LHCI server url and an API token`) process.exit(1) @@ -63,9 +64,9 @@ exports.getInput = function getInputArgs() { // upload gistUploadToken: core.getInput('gistUploadToken') || null, temporaryPublicStorage: core.getInput('temporaryPublicStorage') === 'true' ? true : false, - uploadServerBaseUrl, - uploadToken, - uploadArtifacts: core.getInput('upload.artifacts') !== 'false' ? true : false, + serverBaseUrl, + serverToken, + uploadArtifacts: core.getInput('uploadArtifacts') !== 'false' ? true : false, // assert budgetPath: core.getInput('budgetPath') || null, configPath, diff --git a/src/index.js b/src/index.js index 4441bf499..5f6dd30d4 100644 --- a/src/index.js +++ b/src/index.js @@ -44,15 +44,15 @@ async function main() { core.endGroup() // Collecting /******************************* 2. UPLOAD ************************************/ - if (input.uploadServerBaseUrl || input.temporaryPublicStorage || input.gistUploadToken) { + if (input.serverToken || input.temporaryPublicStorage || input.gistUploadToken) { core.startGroup(`Uploading`) - if (input.uploadServerBaseUrl) { + if (input.serverToken) { const uploadStatus = await exec(lhciCliPath, [ 'upload', '--target=lhci', - `--serverBaseUrl=${input.uploadServerBaseUrl}`, - `--token=${input.uploadToken}` + `--serverBaseUrl=${input.serverToken}`, + `--token=${input.serverToken}` ]) if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to LHCI server.`) } From 6fae67489df669ca097c352255127ab8671037dc Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Tue, 10 Mar 2020 10:15:31 +0100 Subject: [PATCH 07/19] uploadArtifacts is not a default --- README.md | 62 ++++++++++++++++++++++++++------------------------- src/config.js | 2 +- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 01c50547f..ed7e82501 100644 --- a/README.md +++ b/README.md @@ -17,13 +17,12 @@ ## Examples -Run Lighthouse on each push to the repo and attach results to the action. +**Basic example**: run Lighthouse on each push to the repo and save results as action artifacts. Create `.github/workflows/main.yml` with the list of URLs to audit using Lighthouse. -Provide `githubToken` and `temporaryPublicStorage` to automatically attach results to the action for quick debuging. ```yml -name: Lighthouse +name: Lighthouse CI on: push jobs: lighthouse: @@ -36,31 +35,35 @@ jobs: urls: | https://example.com/ https://example.com/blog - temporaryPublicStorage: true # (optional) save Lighthouse results for a quick preview - githubToken: ${{ secrets.GITHUB_TOKEN }} # set action status with details about runs + uploadArtifacts: true # save results as artifacts ``` -**Advanced example**: run Lighthouse audit for each unique deployment, test performance budgets, and save results to the [public storage](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/cli.md#upload) for a quick debugging. +**Advanced example**: run Lighthouse audit for each commit, test performance budgets, and get a detailed error report with results saved in the [public storage](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/cli.md#upload) for a quick debugging. -URLs support interpolation of process env vars so that you can write URLs like: +URLs support interpolation of process env variables so that you can write URLs like: ```yml -- name: Run Lighthouse and test budgets - uses: treosh/lighthouse-ci-action@v2 - with: - urls: | - https://pr-$PR_NUMBER.staging-example.com/ - https://pr-$PR_NUMBER.staging-example.com/blog - budgetPath: ./budgets.json - temporaryPublicStorage: true - env: - PR_NUMBER: ${{ github.event.pull_request.number }} +name: Lighthouse CI +on: push +jobs: + lighthouse: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Run Lighthouse and test budgets + uses: treosh/lighthouse-ci-action@v2 + with: + urls: | + https://pr-$PR_NUMBER.staging-example.com/ + https://pr-$PR_NUMBER.staging-example.com/blog + budgetPath: ./budgets.json + temporaryPublicStorage: true + githubToken: ${{ secrets.GITHUB_TOKEN }} + slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} + env: + PR_NUMBER: ${{ github.event.pull_request.number }} ``` -[⚙️ See this workflow in use](https://github.com/treosh/lighthouse-ci-action/actions?workflow=LHCI-urls-interpolation) - -> **Note**: to view the reports download the JSON files from the artifacts and open them with the [Lighthouse Viewer App](https://googlechrome.github.io/lighthouse/viewer/) or follow the `temporary-public-storage` link printed in the action. - ## Inputs #### `urls` @@ -95,6 +98,14 @@ By default for Action environment it's allowed via `${{ secrets.GITHUB_TOKEN }}` githubToken: ${{ secrets.GITHUB_TOKEN }} ``` +#### `uploadArtifacts` (default: false) + +Upload Lighthouse results as [action artifacts](https://help.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts) to persist results. It's a shortuct to using [`actions/upload-artifact`](https://github.com/actions/upload-artifact). + +```yml +uploadArtifacts: true +``` + ### slackWebhookUrl Allows to send notification in [Slack](https://slack.com/intl/en-ua/) channel. @@ -136,15 +147,6 @@ Use `lighthouserc` to configure the collection of data (via Lighthouse config an configPath: ./lighthouserc.json ``` -#### `uploadArtifacts` (default: true) - -By default, action automatically uploads Lighthouse results as an artifact. -Use `uploadArtifacts: false` to disable this behavior, for example, in case of using an LHCI Server. - -```yml -uploadArtifacts: false -``` - #### `serverBaseUrl` Upload Lighthouse results to a private [LHCI server](https://github.com/GoogleChrome/lighthouse-ci) by specifying both `serverBaseUrl` and `serverToken`. diff --git a/src/config.js b/src/config.js index 5d74c7928..eb76e6cd1 100644 --- a/src/config.js +++ b/src/config.js @@ -66,7 +66,7 @@ exports.getInput = function getInputArgs() { temporaryPublicStorage: core.getInput('temporaryPublicStorage') === 'true' ? true : false, serverBaseUrl, serverToken, - uploadArtifacts: core.getInput('uploadArtifacts') !== 'false' ? true : false, + uploadArtifacts: core.getInput('uploadArtifacts') === 'true' ? true : false, // assert budgetPath: core.getInput('budgetPath') || null, configPath, From 7f0d577a559338bbdb219a5177d4a4d7450d5f8b Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Tue, 10 Mar 2020 11:25:25 +0100 Subject: [PATCH 08/19] try `uploadUrlMap` but it seems doesn't work --- src/index.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/index.js b/src/index.js index 5f6dd30d4..976292065 100644 --- a/src/index.js +++ b/src/index.js @@ -58,7 +58,11 @@ async function main() { } if (input.gistUploadToken) { - const uploadStatus = await exec(lhciCliPath, ['upload', '--target=temporary-public-storage']) + const uploadStatus = await exec(lhciCliPath, [ + 'upload', + '--target=temporary-public-storage', + '--uploadUrlMap=true' + ]) if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to temporary public storage.`) } From e9fe51fc98e6496774278a6702e97f1b7cbe2edf Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Tue, 10 Mar 2020 16:19:45 +0100 Subject: [PATCH 09/19] many changes: - remove gistUploadToken - move upload after assert to support action status set - extract slack/github utils from output - explicitly use @lhci/utils --- README.md | 11 - action.yml | 2 - package.json | 1 + src/config.js | 2 +- src/index.js | 95 ++++--- .../{upload-artifacts.js => artifacts.js} | 6 +- src/{output.js => utils/github.js} | 253 +++++------------- src/utils/slack.js | 46 ++++ yarn.lock | 2 +- 9 files changed, 173 insertions(+), 245 deletions(-) rename src/utils/{upload-artifacts.js => artifacts.js} (60%) rename src/{output.js => utils/github.js} (53%) create mode 100644 src/utils/slack.js diff --git a/README.md b/README.md index ed7e82501..39f306e8e 100644 --- a/README.md +++ b/README.md @@ -159,17 +159,6 @@ serverBaseUrl: ${{ secrets.LHCI_SERVER_BASE_URL }} serverToken: ${{ secrets.LHCI_SERVER_TOKEN }} ``` -#### `gistUploadToken` - -Specify an API token for the LHCI server. [Learn how to generate a token](https://github.com/GoogleChrome/lighthouse-ci/blob/master/docs/getting-started.md#historical-reports--diffing-lighthouse-ci-server). - -Personal Github token to allow Action upload results to your secret [gist](https://help.github.com/en/enterprise/2.13/user/articles/about-gists) and provide report link directly in notification. -Action will upload results to your gist, get gist id and compose url report using [Lighthouse Report Viewer](https://googlechrome.github.io/lighthouse/viewer/). - -```yml -gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} -``` - > **Note**: Use [Github secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) to keep your token hidden! ## Recipes diff --git a/action.yml b/action.yml index dc34ff323..5e52d4e8d 100644 --- a/action.yml +++ b/action.yml @@ -17,8 +17,6 @@ inputs: description: 'Slack webhook url to send failed results to Slack' githubToken: description: 'Shared github token to create a debug check' - gistUploadToken: - description: 'GitHub access token to upload results to Gist' serverBaseUrl: description: 'Address of a LHCI server' uploadArtifacts: diff --git a/package.json b/package.json index edf09969e..43e9714ac 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "@actions/exec": "^1.0.3", "@actions/github": "^2.1.1", "@lhci/cli": "0.3.9", + "@lhci/utils": "^0.3.9", "@slack/webhook": "^5.0.2", "is-windows": "^1.0.2", "lodash": "^4.17.15" diff --git a/src/config.js b/src/config.js index eb76e6cd1..e7e94225a 100644 --- a/src/config.js +++ b/src/config.js @@ -62,7 +62,6 @@ exports.getInput = function getInputArgs() { runs: parseInt(core.getInput('runs'), 10) || 1, staticDistDir, // upload - gistUploadToken: core.getInput('gistUploadToken') || null, temporaryPublicStorage: core.getInput('temporaryPublicStorage') === 'true' ? true : false, serverBaseUrl, serverToken, @@ -70,6 +69,7 @@ exports.getInput = function getInputArgs() { // assert budgetPath: core.getInput('budgetPath') || null, configPath, + // notify slackWebhookUrl: core.getInput('slackWebhookUrl') || null, githubToken: core.getInput('githubToken') || null } diff --git a/src/index.js b/src/index.js index 976292065..f3d3a015e 100644 --- a/src/index.js +++ b/src/index.js @@ -4,17 +4,21 @@ const { join } = require('path') const { exec } = require('@actions/exec') const lhciCliPath = require.resolve('@lhci/cli/src/cli') const { getInput, hasAssertConfig } = require('./config') -const { uploadArtifacts } = require('./utils/upload-artifacts') +const { uploadArtifacts } = require('./utils/artifacts') +const { createGithubCheck } = require('./utils/github') +const { sendSlackNotification } = require('./utils/slack') /** * Audit urls with Lighthouse CI in 3 stages: * 1. collect (using lhci collect or the custom PSI runner, store results as artifacts) - * 2. upload (upload results to LHCI Server, Temporary Public Storage, or Github Gist for more convinient preview) - * 3. assert (assert results and send notification if the build failed) + * 2. assert (assert results using budgets or LHCI assertions) + * 3. upload (upload results to LHCI Server, Temporary Public Storage) + * 4. notify (create github check or send slack notification) */ async function main() { core.startGroup('Action config') + const resultsPath = join(process.cwd(), '.lighthouserc') const input = getInput() core.info(`Input args: ${JSON.stringify(input, null, ' ')}`) core.endGroup() // Action config @@ -37,43 +41,13 @@ async function main() { const collectStatus = await exec(lhciCliPath, collectArgs) if (collectStatus !== 0) throw new Error(`LHCI 'collect' has encountered a problem.`) - const resultsPath = join(process.cwd(), '.lighthouserc') - core.setOutput('resultsPath', resultsPath) + // upload artifacts as soon as collected if (input.uploadArtifacts) await uploadArtifacts(resultsPath) core.endGroup() // Collecting - /******************************* 2. UPLOAD ************************************/ - if (input.serverToken || input.temporaryPublicStorage || input.gistUploadToken) { - core.startGroup(`Uploading`) - - if (input.serverToken) { - const uploadStatus = await exec(lhciCliPath, [ - 'upload', - '--target=lhci', - `--serverBaseUrl=${input.serverToken}`, - `--token=${input.serverToken}` - ]) - if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to LHCI server.`) - } - - if (input.gistUploadToken) { - const uploadStatus = await exec(lhciCliPath, [ - 'upload', - '--target=temporary-public-storage', - '--uploadUrlMap=true' - ]) - if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to temporary public storage.`) - } - - if (input.gistUploadToken) { - // TODO(alekseykulikov): upload to gists - } - - core.endGroup() // Uploading - } - - /******************************* 3. ASSERT ************************************/ + /******************************* 2. ASSERT ************************************/ + let isAssertFailed = false if (input.budgetPath || hasAssertConfig(input.configPath)) { core.startGroup(`Asserting`) const assertArgs = ['assert'] @@ -85,16 +59,53 @@ async function main() { } const assertStatus = await exec(lhciCliPath, assertArgs) - if (assertStatus !== 0) { - // TODO(exterkamp): Output what urls failed and record a nice rich error. - core.setFailed(`Assertions have failed.`) + isAssertFailed = assertStatus !== 0 + core.endGroup() // Asserting + } + + /******************************* 3. UPLOAD ************************************/ + if (input.serverToken || input.temporaryPublicStorage) { + core.startGroup(`Uploading`) + const uploadParams = ['upload'] + if (input.githubToken) uploadParams.push(`--githubToken=${input.githubToken}`) + + if (input.serverToken) { + uploadParams.push('--target=lhci', `--serverBaseUrl=${input.serverToken}`, `--token=${input.serverToken}`) + } else if (input.temporaryPublicStorage) { + uploadParams.push('--target=temporary-public-storage', '--uploadUrlMap=true') + } + + const uploadStatus = await exec(lhciCliPath, uploadParams) + if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to LHCI server.`) + + core.endGroup() // Uploading + } + + /******************************* 4. NOTIFY ************************************/ + if (input.githubToken || input.slackWebhookUrl) { + core.startGroup(`Notifying`) + if (input.githubToken) { + await createGithubCheck({ + githubToken: input.githubToken, + isSuccess: !isAssertFailed + }) } - if ((input.githubToken || input.slackWebhookUrl) && assertStatus !== 0) { - // TODO(alekseykulikov): handle notifications + // send slack notification only on error + if (input.slackWebhookUrl && isAssertFailed) { + await sendSlackNotification({ + slackWebhookUrl: input.slackWebhookUrl, + isSuccess: !isAssertFailed, + resultsPath + }) } - core.endGroup() // Asserting + core.endGroup() // Notifying + } + + // set failing exit code for the action + if (isAssertFailed) { + core.setFailed(`Assertions have failed.`) } } diff --git a/src/utils/upload-artifacts.js b/src/utils/artifacts.js similarity index 60% rename from src/utils/upload-artifacts.js rename to src/utils/artifacts.js index 85553e8fb..6d90445df 100644 --- a/src/utils/upload-artifacts.js +++ b/src/utils/artifacts.js @@ -1,12 +1,10 @@ const artifact = require('@actions/artifact') const fs = require('fs') -const { promisify } = require('util') -const readdir = promisify(fs.readdir) /** @param {string} rootDirectory */ -exports.uploadArtifacts = async function uploadArtifacts(rootDirectory) { +exports.uploadArtifacts = function uploadArtifacts(rootDirectory) { const artifactClient = artifact.create() const artifactName = 'lighthouse-results' - const files = await readdir(rootDirectory) + const files = fs.readdirSync(rootDirectory) return artifactClient.uploadArtifact(artifactName, files, rootDirectory, { continueOnError: true }) } diff --git a/src/output.js b/src/utils/github.js similarity index 53% rename from src/output.js rename to src/utils/github.js index 1b347caed..78fb953a4 100644 --- a/src/output.js +++ b/src/utils/github.js @@ -1,229 +1,118 @@ const { groupBy, find, get, isEmpty, head } = require('lodash') -const { IncomingWebhook } = require('@slack/webhook') const github = require('@actions/github') const { readFile, readdirSync, existsSync } = require('fs') const { promisify } = require('util') const { join } = require('path') -const input = require('./input') - const pReadFile = promisify(readFile) -/** - * @typedef {Object} ChangesURL - * @property {string} sha - * @property {string} pullRequest - * - * @typedef {Object} Gist - * @property {string} [id] - * @property {string} [sha] - * @property {string} [url] - * - * @typedef {Object} LHResult - * @property {string} auditId - * @property {string} auditProperty - * @property {string} auditTitle - * @property {string} expected - * @property {string} operator - * @property {string} actual - * @property {string} url - * - * @tod fix the type - * @typedef {*} LHResultsByURL - */ +/** @typedef {{ sha: string, pullRequest: string }} ChangesURL */ +/** @typedef {{ id: string, sha: string, url: string }} Gist */ +/** @typedef {{ auditId: string, auditProperty: string, auditTitle: string, operator: string, expected: string, actual: string, url: string }} LHResult */ -/** @type {string} */ -const githubRepo = get(process.env, 'GITHUB_REPOSITORY', '') -const githubSHA = get(process.env, 'GITHUB_SHA', '') +const githubRepo = process.env.GITHUB_REPOSITORY || '' +const githubSha = process.env.GITHUB_SHA || '' const reportTitle = 'Lighthouse CI Action' const resultsDirPath = join(process.cwd(), '.lighthouseci') const lhAssertResultsPath = join(resultsDirPath, 'assertion-results.json') -/** - * @param {{ status: number }} params - */ -async function sendNotifications({ status }) { - try { - const { slackWebhookUrl, githubToken, gistUploadToken } = input - - if (!status) { - return Promise.resolve() - } - - const slackEnabled = slackWebhookUrl - const githubEnabled = githubToken - - /** - * @type {[ LHResultsByURL, ChangesURL, Gist[] ]} - */ - const [groupedResults, changesURL, gists] = await Promise.all([ - getGroupedAssertionResultsByURL(), - getChangesUrl({ githubToken: gistUploadToken }), - // keep uploading as part of Promise all instead of separate request - uploadResultsToGist({ githubToken: gistUploadToken }) - ]) - - const slackData = { status, slackWebhookUrl, changesURL, gists, groupedResults } - const githubData = { status, githubToken: githubToken, changesURL, gists, groupedResults } - - if (githubEnabled) { - try { - await githubNotification(githubData) - console.log('Github notification successfully sent') - } catch (e) { - console.log('Failed to send Github notification', e) - } - } - - if (slackEnabled) { - try { - await slackNotification(slackData) - console.log('Slack notification successfully sent') - } catch (e) { - console.log('Failed to send Slack notification', e) - } - } - } catch (e) { - console.log(e) - throw e - } -} +// https://user-images.githubusercontent.com/158189/76324191-ef4c2880-62e5-11ea-8bf1-ac5ff7571eef.png /** - * @param {{status: number, slackWebhookUrl?: string, changesURL: ChangesURL, gists: Gist[], groupedResults: LHResultsByURL }} params - * @return {Promise<*>} + * Send notifications. + * + * @param {{ githubToken: string, isSuccess: boolean }} params */ -async function slackNotification({ status, slackWebhookUrl = '', changesURL, groupedResults, gists }) { - console.log('Running Slack notification') - const webhook = new IncomingWebhook(slackWebhookUrl, { - icon_url: 'https://user-images.githubusercontent.com/54980164/75099367-8bc5b980-55c9-11ea-9e10-2a6ee69e8e70.png' - }) - const color = status === 0 ? 'good' : 'danger' - const conclusion = status === 0 ? 'success' : 'failure' - const changesTitle = changesURL.pullRequest - ? `Pull Request ${conclusion} - <${changesURL.pullRequest} | View on GitHub>` - : `Changes ${conclusion} - <${changesURL.sha} | View SHA Changes>` - const attachments = formatAssertResults({ groupedResults, status, gists }) - - return webhook.send({ - attachments: [ - { - pretext: reportTitle, - title: changesTitle, - color - }, - ...attachments - ] - }) -} +exports.createGithubCheck = async function sendNotifications({ githubToken, isSuccess }) { + const [groupedResults, changesURL, gists] = await Promise.all([getGroupedAssertionResultsByUrl()]) -/** - * @param {{status: number, githubToken?: string, changesURL: ChangesURL, gists: Gist[], groupedResults: LHResultsByURL }} params - * @return {Promise<*>} - */ -async function githubNotification({ status, githubToken = '', changesURL, gists, groupedResults }) { console.log('Running Github notification') const octokit = new github.GitHub(githubToken) const checkBody = { owner: githubRepo.split('/')[0], repo: githubRepo.split('/')[1], - head_sha: githubSHA, + head_sha: githubSha, name: reportTitle, status: /** @type {'completed'} */ ('completed'), - conclusion: /** @type {'success' | 'failure'} */ (status === 0 ? 'success' : 'failure'), + conclusion: /** @type {'success' | 'failure'} */ (isSuccess ? 'success' : 'failure'), output: getSummaryMarkdownOutput({ status, changesURL, groupedResults, gists }) } - await octokit.checks.createSuite({ - owner: githubRepo.split('/')[0], - repo: githubRepo.split('/')[1], - head_sha: githubSHA - }) + // await octokit.checks.createSuite({ + // owner: githubRepo.split('/')[0], + // repo: githubRepo.split('/')[1], + // head_sha: githubSha + // }) await octokit.checks.create(checkBody) } /** - * @return {Promise<*>} + * @param {{ groupedResults: LHResultsByURL, gists: Gist[], status: number }} params + * @return {{color: *, text: string, fields: *}[]} */ -async function getGroupedAssertionResultsByURL() { - if (!existsSync(lhAssertResultsPath)) { - console.log(`No LH Assert results in ${lhAssertResultsPath}`) - return [] - } - const assertionResultsBuffer = await pReadFile(lhAssertResultsPath) - /** @type {[LHResult]} **/ - const assertionResults = JSON.parse(assertionResultsBuffer.toString()) - return groupBy(assertionResults, 'url') -} +function formatAssertResults({ groupedResults, status, gists }) { + const color = status === 0 ? 'good' : 'danger' -/** - * @param {{ githubToken?: string }} params - * @return {Promise} - */ -function uploadResultsToGist({ githubToken }) { - if (!githubToken) { - return Promise.resolve([{}]) - } + return Object.values(groupedResults).reduce((acc, groupedResult) => { + const resultUrl = get(head(groupedResult), 'url', '') + const gist = find(gists, ({ url }) => url === resultUrl) || {} - const LHRNamesFromPath = getLHRNameFromPath(resultsDirPath) - return Promise.all( - LHRNamesFromPath.map( - async LHRNameFromPath => await uploadResultToGist({ githubToken, resultPath: LHRNameFromPath }) + const fields = groupedResult.map( + /** + * @param {LHResult} res + * @todo typedef for return object + * @return {{title: string, value: string}} + */ + res => { + const title = res.auditProperty ? `${res.auditId}.${res.auditProperty}` : res.auditId + return { + title, + value: `${res.auditTitle} \n _Expected ${res.expected} ${ + res.operator === '<=' ? ' less then' : ' greater than' + } actual ${res.actual}_` + } + } ) - ) -} -/** - * @param {{ githubToken?: string, resultPath: string }} params - * @return {Promise} - */ -async function uploadResultToGist({ githubToken, resultPath }) { - if (!githubToken || !resultPath) { - return {} - } + const reportURL = getLHReportURL(gist) + const reportUrlField = reportURL + ? { + title: `View Detailed Lighthouse Report`, + title_link: reportURL, + color + } + : {} - const resultsBuffer = await pReadFile(join(resultsDirPath, resultPath)) - const results = JSON.parse(resultsBuffer.toString()) - const url = get(results, 'finalUrl', '') - const urlPrefixName = url.replace(/(^\w+:|^)\/\//, '') - const gistName = `lhci-action-lhr-${githubRepo.split('/').join('-')}-${urlPrefixName - .split('/') - .filter(Boolean) - .join('-')}.json` - const octokit = new github.GitHub(githubToken) - const gists = await octokit.gists.list() - const existingGist = find( - gists.data, - gist => Object.keys(gist.files).filter(filename => filename === gistName).length - ) - const gistParams = { - files: { - [gistName]: { - content: resultsBuffer.toString() - } - } - } - const gist = await (existingGist - ? octokit.gists.update({ ...gistParams, gist_id: get(existingGist, 'id') }) - : octokit.gists.create(gistParams)) + acc.push({ + text: `${groupedResult.length} result(s) for ${resultUrl}`, + color, + fields + }) + acc.push(reportUrlField) + return acc + }, []) +} - return { - url, - id: get(gist, 'data.id', '').split('/'), - sha: get(gist, ['data', 'history', 0, 'version'], '') +async function getGroupedAssertionResultsByUrl() { + if (!existsSync(lhAssertResultsPath)) { + console.log(`No LH Assert results in ${lhAssertResultsPath}`) + return {} } + const assertionResultsBuffer = await pReadFile(lhAssertResultsPath) + const assertionResults = /** @type {[LHResult]} **/ JSON.parse(assertionResultsBuffer.toString()) + return groupBy(assertionResults, 'url') } /** * @param {{ githubToken?: string }} params * @return {Promise} */ -async function getChangesUrl({ githubToken }) { - const shaChangesURL = ['https://github.com', githubRepo, 'commit', githubSHA].join('/') +async function getChangesUrl({ githubToken }) { + const shaChangesURL = ['https://github.com', githubRepo, 'commit', githubSha].join('/') if (!githubToken) { return { pullRequest: '', @@ -232,13 +121,12 @@ async function getChangesUrl({ githubToken }) { } const octokit = new github.GitHub(githubToken) - const pulls = await octokit.pulls.list({ owner: githubRepo.split('/')[0], repo: githubRepo.split('/')[1] }) - const pullRequest = find(get(pulls, 'data', []), ['head.sha', githubSHA]) + const pullRequest = find(get(pulls, 'data', []), ['head.sha', githubSha]) return { pullRequest: get(pullRequest, 'html_url', ''), @@ -377,9 +265,10 @@ ${summaryResultsTemplate(summaryResults)} } /** - * @param { Gist } gist + * @param {Gist} gist * @return {string} */ + function getLHReportURL(gist) { return isEmpty(gist) ? '' : `https://googlechrome.github.io/lighthouse/viewer/?gist=${gist.id}/${gist.sha}` } @@ -413,7 +302,3 @@ function getLHRNameFromPath(path = '') { ) || [''] ) } - -module.exports = { - sendNotifications -} diff --git a/src/utils/slack.js b/src/utils/slack.js new file mode 100644 index 000000000..238594974 --- /dev/null +++ b/src/utils/slack.js @@ -0,0 +1,46 @@ +const { IncomingWebhook } = require('@slack/webhook') +const core = require('@actions/core') +const { join } = require('path') +const fs = require('fs') +const { groupBy } = require('lodash') + +/** @typedef {{ name: string, expected: number, actual: number, values: number[], operator: string, passed: boolean, + auditId: string, level: 'warn' | 'error', url: string, auditTitle: string, auditDocumentationLink: string }} LHCIAssertResult */ + +/** + * Send Slack Notification as an incoming webhook. + * + * @param {{ slackWebhookUrl: string, resultsPath: string, isSuccess: boolean }} params + */ + +exports.sendSlackNotification = async function sendSlackNotification(params) { + core.info('Running Slack notification') + const { slackWebhookUrl, isSuccess, resultsPath } = params + if (!isSuccess) return // ignore success checks for now + + const webhook = new IncomingWebhook(slackWebhookUrl, { + username: 'Lighthouse CI Action', + icon_url: 'https://user-images.githubusercontent.com/54980164/75099367-8bc5b980-55c9-11ea-9e10-2a6ee69e8e70.png' // action Logo + }) + + /** @type {Object} **/ + const links = JSON.parse(fs.readFileSync(join(resultsPath, 'links.json'), 'utf8')) + /** @type {LHCIAssertResult[]} **/ + const assertionResults = JSON.parse(fs.readFileSync(join(resultsPath, 'assertion-results.json'), 'utf8')) + const assertionResultsByUrl = groupBy(assertionResults, 'url') + + // const color = 'danger' + // const conclusion = 'failure' + // const changesTitle = changesURL.pullRequest + // ? `Pull Request ${conclusion} - <${changesURL.pullRequest} | View on GitHub>` + // : `Changes ${conclusion} - <${changesURL.sha} | View SHA Changes>` + // const attachments = formatAssertResults({ groupedResults, status, gists }) + + return webhook.send({ + blocks: [ + { + type: 'section' + } + ] + }) +} diff --git a/yarn.lock b/yarn.lock index ae4d27a92..d249c31fc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -60,7 +60,7 @@ yargs "^12.0.5" yargs-parser "^11.1.1" -"@lhci/utils@0.3.9": +"@lhci/utils@0.3.9", "@lhci/utils@^0.3.9": version "0.3.9" resolved "https://registry.yarnpkg.com/@lhci/utils/-/utils-0.3.9.tgz#e495f2edc33b32d3ae55a9908047f515526fe19d" integrity sha512-qXHi54VgKu/MA9InSyer0cGdNjccdi70nunBp51CMBvw2jHNcL0pBatYmmPzOO4weuWPj1HzHUEGWlFmFXKsbQ== From 337a37fffdb386007298a927f514492596fe34c3 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 12:19:48 +0100 Subject: [PATCH 10/19] add asserts for slack notification --- CONTRIBUTING.md | 6 ++++++ assets/alert-triangle-slack.png | Bin 0 -> 1845 bytes assets/alert-triangle.png | Bin 0 -> 783 bytes assets/alert-triangle.svg | 1 + assets/x-slack.png | Bin 0 -> 1196 bytes assets/x.png | Bin 0 -> 215 bytes assets/x.svg | 1 + 7 files changed, 8 insertions(+) create mode 100644 assets/alert-triangle-slack.png create mode 100644 assets/alert-triangle.png create mode 100644 assets/alert-triangle.svg create mode 100644 assets/x-slack.png create mode 100644 assets/x.png create mode 100644 assets/x.svg diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2a42e45f3..3c0a6b057 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -48,3 +48,9 @@ INPUT_CONFIGPATH=".github/lighthouse/lighthouserc-static-dist-dir.yml" INPUT_RUN INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_GISTUPLOADTOKEN="github-token" INPUT_GITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/impossible-budget.json" INPUT_RUNS="1" INPUT_SLACKWEBHOOKURL="custom-webhook-url" INPUT_GISTUPLOADTOKEN="github-github" INPUT_GITHUBTOKEN="github-token" INPUT_NOTIFICATIONS='slack' GITHUB_REPOSITORY="repo-name" GITHUB_SHA="githib-pr-head-sha" node src/index.js ``` + +### Slack notification + +Images and failure/warning images are stored in (assets)[./assets] folder. Original SVGs are downloaded from https://feathericons.com/, like all Github Action icons. + +Use Block Kit Builder to design the notification, [current version]() diff --git a/assets/alert-triangle-slack.png b/assets/alert-triangle-slack.png new file mode 100644 index 0000000000000000000000000000000000000000..16c32e72bdb0115a211fe0c531adfbce336cb754 GIT binary patch literal 1845 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY1|&n@ZgvM!Ea{HEjtmSN`?>!lvI6-E$sR$z z3=CCj3=9n|3=F@3LJcn%7)lKo7+xhXFj&oCU=S~uvn$XBC{d9b;hE;^%b*2hb1<+n z3NbJPS&Tr)z$nE4G7ZRL@M4sPvx68lplX;H7}_%#SfFa6fHVkzoCneap_vvi!Bu!H zU`DV(N_7q|jssGh1s;*b3=G`DAk4@xYmNj115;XNNJL45ua8x7ey(0(N`6wRUPW#J zNEd@mg%yyQn_7~nP?4LHS8P>bs{~eI1!RMS^_3LBN=mYAl_Got6rA&mQWZ?~O!N$t z?6?#Z6l{u8(yW49+@RWlJX@uVl9B=|ef{$Ca=mh6z5JqdeM3u2OML?)eIp}XpbFjM z%Dj@q3f;V7Wr!g#b6ir3lZ!G7N;32F6hP)CCgqow*eWT3EP?}wJ4-Ut5H{r%L%jv` zpgu@O-%!s$ADgz+icB2ZKr#?*V1GfZvjG`q4RiL(?ASF6PSyNBx8=Z12d8-|F|LflY)&`TrOMq_l~+3+tz#8rkPLkX>4jnzre}A0ELelDtdI zy249x>bFN3pL*Ka`luy(!}ro?*NrSud$>HA6Ra{1@(CZ+l!&eT5dXPrrLocBd+{C> zRudcE9y*};i&bX@;@ubRr5|RgzI=7#WbRxR{{=Sk zZ4V?>>{tC?@UFinf^n{r0LRJZOzHdKs~;2{D zzt&Fil-XDF^M2_vwNl%ii_Pu*ufNNAr+V0=yWvUu+7Au3aleg!{lD$MD*ix=xk5+I z?r$RX_wK1W{_$v$Jj~-3oPYh@3#Xj03wM>4T-klD?}>IL@mu0An;Kg^tlX#O>HLa?OR#WV{)>x;^Y*6P*Z#lmll$(g zC3%-`Jl;Qj{d4YD+ue8+zpcJ<{v#vPWAh^cvWgXZ;!5J;cppYqGF1t)oD7&{{`Jlc zhNp8b9V)tOxpJPB@l3{zzas@!U)?KI`6~9)lShv(<=U!;`5Q(ttPb3~efxn7`3f9H;moEg1$GD8YlMky_VmX?k)7PM+* z?SfjhFb!HpwJ1ZB3hc3RA41v6qEWA**hbnFXhu=E>ES%DijH$y%uM;3hZ@uecX7_2 z@B7a8KlgIse}Ya@s*{v%g(F1?_fzKVwH~9K&9t6)DkGQV7|VsYMll9Xjn#*80VvBn z7oC{ZR}8mcg2ad!Kx7DTR*ck9SynMzuD5^()u6XII-HN6H-Yw3hR~ZHMQ)pou@6ySI}b9RdflJ`-n1fFKa9orgnY-22LWz z1-SztF6rIuBivx%ho;yukU-Cq9KLnKxImj-Rn4M#72F`4b|7c zqRY_wipJ=tLesCtLdbVur9|HY9!+VWG7ug4oj`3$4AqB#2y|{$9H62l{6&N=Si5io zz`<%Kz1WX#B01k_$F|1ewmFgO-2E-mg)og$=ftcw0AnuXGl3fbh_n^6*j$iSmIUcY z7u<{uRaT2*TW1sofRNW8Ik{c&)kxUBm!PLm&KoIJSH`hiZFcQcJguQ_0EL8vZObYZ z-l6Q#xJa@?G!S-eDeSR N002ovPDHLkV1k7DSo#0} literal 0 HcmV?d00001 diff --git a/assets/alert-triangle.svg b/assets/alert-triangle.svg new file mode 100644 index 000000000..4513eee81 --- /dev/null +++ b/assets/alert-triangle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/assets/x-slack.png b/assets/x-slack.png new file mode 100644 index 0000000000000000000000000000000000000000..ec1ec75653f6f9ec13fe4f882e3d31c9e7749d3c GIT binary patch literal 1196 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY1|&n@ZgvM!Ea{HEjtmSN`?>!lvI6-E$sR$z z3=CCj3=9n|3=F@3LJcn%7)lKo7+xhXFj&oCU=S~uvn$XBDAAG{;hE;^%b*2hb1<+n z3NbJPS&Tr)z$nE4G7ZRL@M4sPvx68lplX;H7}_%#SfFa6fHVkzoCneap_vyjB1~Ms z1Xt~`fEmFCX-nqTegmX93p^r=85p>QL70(Y)*J~22Bx&kkcg59UmvUF{9L`nl>DSr zy^7odkS+$B3M(KpH?<^Dp&~aYuh^=>Rtc=a3djZt>nkaMm6T-LDnNc-DA*LGq*(>IxIwi8dA3R!B_#z``ugSN<$C4Ddih1^`i7R4mih)p`bI{& zKoz>hm3bwJ6}oxF$`C_f=D4I5Cl_TFlw{`TDS*sPOv*1Uu~kw6Sp)|Vca~(PA#BPk zhI$L=L4A;nzM-ChJ~nNs6`44+fn*@s!2W_*X9F_K%D*Tx73g4)v+N9Qz!sp0A)E(M zACy|0pHm7_9-5a~VrK-^f+mcl3uL#IbAC>KQE)+Gaw^CYbRl%r0htx4IT0?Ixv6=@ zAa@y=K=q(YVAU9jOCyp5RHKbPC?=3%1`_RHQDD5=aoOm@(~uokwob1YF#Rfcx;TbZ zFuuKhFzb+m0NaBrh*Nfy|3g+heWl$z|C*az|iVsQ8xc{`g@nia|I(tIqCP4*< zh6CQL>n?BUuW{cuP47-t-qznsZ(Tj(ez^UF=_l#Rjw{ddOxJ$fAl0zcZPCs1C;XR~ z)^)u(R-BZx=4PMiYCAuMv?VF)#QgSu<~*V&xw`W+!%G>5;~w!gzUD$b6|J`q6?j9bCx5fK?k7X&T3VgO!pSikq3d0$T+J`Ep zr4tra?w9)OdEy)6lXTN&L$?M7MkW>x0ibKiV;qp?`ok!hxBjL<)0NYpJm~4_=d#Wz Gp$Pz>)L%{j literal 0 HcmV?d00001 diff --git a/assets/x.png b/assets/x.png new file mode 100644 index 0000000000000000000000000000000000000000..29c4a03cb12d23e87f06318e9b0c84363e2d9a45 GIT binary patch literal 215 zcmeAS@N?(olHy`uVBq!ia0vp^5+KaM1|%Pp+x`GjY)RhkE)4%caKYZ?lYt^Vo-U3d z9-VKeIC3>R2(X;~<7oK5zRUchh@+z2vb}3Kx?blTs|fhL%Qkca53~O}Nk`QTmez&Q zEqPOwR \ No newline at end of file From b650eb314f2a0f5f592dafcfeb62f97eb84d0699 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 15:30:42 +0100 Subject: [PATCH 11/19] use bar chart as a branding icon --- action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yml b/action.yml index 5e52d4e8d..03b207918 100644 --- a/action.yml +++ b/action.yml @@ -31,5 +31,5 @@ runs: using: 'node12' main: 'src/index.js' branding: - icon: 'activity' + icon: 'bar-chart-2' color: 'gray-dark' From 59ec59492f464fe880fb714f8ea670df981144a9 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 15:31:03 +0100 Subject: [PATCH 12/19] finalize slack notification --- src/utils/lhci-helpers.js | 30 ++++++++++ src/utils/slack.js | 120 +++++++++++++++++++++++++++++--------- 2 files changed, 121 insertions(+), 29 deletions(-) create mode 100644 src/utils/lhci-helpers.js diff --git a/src/utils/lhci-helpers.js b/src/utils/lhci-helpers.js new file mode 100644 index 000000000..311461e52 --- /dev/null +++ b/src/utils/lhci-helpers.js @@ -0,0 +1,30 @@ +const { groupBy } = require('lodash') +const { join } = require('path') +const fs = require('fs') + +/** + * Get links by url. + * + * @param {string} resultsPath + * @return {Object} + */ + +exports.getLinksByUrl = function getLinksByUrl(resultsPath) { + return JSON.parse(fs.readFileSync(join(resultsPath, 'links.json'), 'utf8')) +} + +/** + * Get assertions grouped by url. + * + * @typedef {{ name: string, expected: number, actual: number, values: number[], operator: string, passed: boolean, + * auditId: string, level: 'warn' | 'error', url: string, auditTitle: string, auditDocumentationLink: string }} LHCIAssertion + * + * @param {string} resultsPath + * @return {Object} + */ + +exports.getAssertionsByUrl = function getAssertionsByUrl(resultsPath) { + /** @type {LHCIAssertion[]} **/ + const assertionResults = JSON.parse(fs.readFileSync(join(resultsPath, 'assertion-results.json'), 'utf8')) + return groupBy(assertionResults, 'url') +} diff --git a/src/utils/slack.js b/src/utils/slack.js index 238594974..37138c7b4 100644 --- a/src/utils/slack.js +++ b/src/utils/slack.js @@ -1,11 +1,7 @@ +const { flatten, orderBy } = require('lodash') const { IncomingWebhook } = require('@slack/webhook') const core = require('@actions/core') -const { join } = require('path') -const fs = require('fs') -const { groupBy } = require('lodash') - -/** @typedef {{ name: string, expected: number, actual: number, values: number[], operator: string, passed: boolean, - auditId: string, level: 'warn' | 'error', url: string, auditTitle: string, auditDocumentationLink: string }} LHCIAssertResult */ +const { getLinksByUrl, getAssertionsByUrl } = require('./lhci-helpers') /** * Send Slack Notification as an incoming webhook. @@ -13,34 +9,100 @@ const { groupBy } = require('lodash') * @param {{ slackWebhookUrl: string, resultsPath: string, isSuccess: boolean }} params */ -exports.sendSlackNotification = async function sendSlackNotification(params) { - core.info('Running Slack notification') - const { slackWebhookUrl, isSuccess, resultsPath } = params - if (!isSuccess) return // ignore success checks for now +exports.sendSlackNotification = function sendSlackNotification({ slackWebhookUrl, isSuccess, resultsPath }) { + if (isSuccess) return // ignore success checks for now + core.info('Send Slack notification') + const params = { + blocks: [ + { + type: 'section', + text: { type: 'mrkdwn', text: generateIntro(resultsPath) } + }, + ...generateAssertionBlocks(resultsPath) + ] + } + core.info(JSON.stringify(params, null, ' ')) const webhook = new IncomingWebhook(slackWebhookUrl, { username: 'Lighthouse CI Action', - icon_url: 'https://user-images.githubusercontent.com/54980164/75099367-8bc5b980-55c9-11ea-9e10-2a6ee69e8e70.png' // action Logo + icon_emoji: ':small_red_triangle:' }) + return webhook.send(params) +} - /** @type {Object} **/ - const links = JSON.parse(fs.readFileSync(join(resultsPath, 'links.json'), 'utf8')) - /** @type {LHCIAssertResult[]} **/ - const assertionResults = JSON.parse(fs.readFileSync(join(resultsPath, 'assertion-results.json'), 'utf8')) - const assertionResultsByUrl = groupBy(assertionResults, 'url') +/** + * Generate intro text, using env variables: + * https://help.github.com/en/actions/configuring-and-managing-workflows/using-environment-variables#default-environment-variables + * + * @param {string} resultsPath + */ - // const color = 'danger' - // const conclusion = 'failure' - // const changesTitle = changesURL.pullRequest - // ? `Pull Request ${conclusion} - <${changesURL.pullRequest} | View on GitHub>` - // : `Changes ${conclusion} - <${changesURL.sha} | View SHA Changes>` - // const attachments = formatAssertResults({ groupedResults, status, gists }) +function generateIntro(resultsPath) { + const totalUrls = Object.keys(getAssertionsByUrl(resultsPath)).length + const repo = process.env.GITHUB_REPOSITORY || 'unknown/repository' + const runId = process.env.GITHUB_RUN_ID || 0 + const runIndex = process.env.GITHUB_RUN_NUMBER || 1 + const workflow = process.env.GITHUB_WORKFLOW || 'Workflow' + const actionId = process.env.GITHUB_ACTION || 'no-id' + const author = process.env.GITHUB_ACTOR || 'no-name' + const ref = process.env.GITHUB_REF || 'refs/heads/master' + const branch = ref.substr(ref.lastIndexOf('/') + 1) + return ( + `Failed to check assertions against of ${totalUrls} URL${totalUrls === 1 ? '' : 's'}\n\n` + + `Action: \n` + + `Repository: \n` + + `Author: ${author}` + ) +} - return webhook.send({ - blocks: [ - { - type: 'section' - } - ] - }) +const errorImgUrl = 'https://user-images.githubusercontent.com/158189/76324191-ef4c2880-62e5-11ea-8bf1-ac5ff7571eef.png' +const warnImgurl = 'https://user-images.githubusercontent.com/158189/76411224-a356bd80-6391-11ea-8a58-8003213a7afa.png' + +/** + * Generate Blocks for each assertion + * https://api.slack.com/reference/block-kit/block-elements + * + * @param {string} resultsPath + */ + +function generateAssertionBlocks(resultsPath) { + const linksByUrl = getLinksByUrl(resultsPath) + const assertionsByUrl = getAssertionsByUrl(resultsPath) + + return flatten( + Object.entries(assertionsByUrl).map(([url, assertions]) => { + const link = linksByUrl[url] + const sortedAssertions = orderBy(assertions, a => (a.level === 'error' ? 0 : 1)) + return [ + { type: 'divider' }, + { + type: 'section', + text: { + type: 'mrkdwn', + text: `${assertions.length} result${assertions.length === 1 ? '' : 's'} for ${url}` + }, + ...(link + ? { + accessory: { type: 'button', text: { type: 'plain_text', text: 'View Report', emoji: true }, url: link } + } + : {}) + }, + ...sortedAssertions.map(a => { + const text = + `*${a.auditId}* ${a.level === 'error' ? 'failure' : 'warning'} for *${a.name}* assertion\n` + + `${a.auditTitle} <${a.auditDocumentationLink} | [...]>\n` + + `Expected * ${a.operator} ${a.expected}*, but found *${a.actual}*` + return { + type: 'section', + text: { type: 'mrkdwn', text }, + accessory: { + type: 'image', + image_url: a.level === 'error' ? errorImgUrl : warnImgurl, + alt_text: a.level + } + } + }) + ] + }) + ) } From 8abd0d2a07c7e8c1a824266fc3d37e2306ce4a1f Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 22:34:19 +0100 Subject: [PATCH 13/19] prevent temporaryPublicStorage and server option --- src/config.js | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/config.js b/src/config.js index e7e94225a..2178a1efc 100644 --- a/src/config.js +++ b/src/config.js @@ -10,7 +10,13 @@ exports.getInput = function getInputArgs() { // Make sure we don't have LHCI xor API token if (!!serverBaseUrl != !!serverToken) { // Fail and exit - core.setFailed(`Need both a LHCI server url and an API token`) + core.setFailed(`Need both a LHCI server url and an API token.`) + process.exit(1) + } + + const temporaryPublicStorage = core.getInput('temporaryPublicStorage') === 'true' ? true : false + if (serverBaseUrl && temporaryPublicStorage) { + core.setFailed(`Both LHCI server and Temporary storage are set, choose one upload method.`) process.exit(1) } @@ -61,11 +67,11 @@ exports.getInput = function getInputArgs() { urls, runs: parseInt(core.getInput('runs'), 10) || 1, staticDistDir, + uploadArtifacts: core.getInput('uploadArtifacts') === 'true' ? true : false, // upload - temporaryPublicStorage: core.getInput('temporaryPublicStorage') === 'true' ? true : false, serverBaseUrl, serverToken, - uploadArtifacts: core.getInput('uploadArtifacts') === 'true' ? true : false, + temporaryPublicStorage, // assert budgetPath: core.getInput('budgetPath') || null, configPath, From a33cd541576de84bb8932bf61ddbcab3107a587b Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 22:34:52 +0100 Subject: [PATCH 14/19] send github comment --- CONTRIBUTING.md | 4 + src/index.js | 18 +- src/utils/github.js | 345 +++++++------------------------------- src/utils/lhci-helpers.js | 25 ++- src/utils/slack.js | 21 +-- 5 files changed, 102 insertions(+), 311 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3c0a6b057..f5b9e1829 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -53,4 +53,8 @@ INPUT_URLS="https://alekseykulikov.com/" INPUT_BUDGETPATH=".github/lighthouse/im Images and failure/warning images are stored in (assets)[./assets] folder. Original SVGs are downloaded from https://feathericons.com/, like all Github Action icons. +```js +GITHUB_REPOSITORY=treosh/lighthouse-ci-action GITHUB_ACTION=build GITHUB_ACTOR=alekseykulikov node -e "require('./src/utils/slack').sendSlackNotification({ resultsPath: '/Users/aleksey/code/treosh/website/.lighthouseci', slackWebhookUrl: 'https://hooks.slack.com/..' })" +``` + Use Block Kit Builder to design the notification, [current version]() diff --git a/src/index.js b/src/index.js index f3d3a015e..cc0decd4d 100644 --- a/src/index.js +++ b/src/index.js @@ -5,7 +5,7 @@ const { exec } = require('@actions/exec') const lhciCliPath = require.resolve('@lhci/cli/src/cli') const { getInput, hasAssertConfig } = require('./config') const { uploadArtifacts } = require('./utils/artifacts') -const { createGithubCheck } = require('./utils/github') +const { sendGithubComment } = require('./utils/github') const { sendSlackNotification } = require('./utils/slack') /** @@ -67,7 +67,6 @@ async function main() { if (input.serverToken || input.temporaryPublicStorage) { core.startGroup(`Uploading`) const uploadParams = ['upload'] - if (input.githubToken) uploadParams.push(`--githubToken=${input.githubToken}`) if (input.serverToken) { uploadParams.push('--target=lhci', `--serverBaseUrl=${input.serverToken}`, `--token=${input.serverToken}`) @@ -82,22 +81,15 @@ async function main() { } /******************************* 4. NOTIFY ************************************/ - if (input.githubToken || input.slackWebhookUrl) { + if ((input.githubToken || input.slackWebhookUrl) && isAssertFailed) { core.startGroup(`Notifying`) if (input.githubToken) { - await createGithubCheck({ - githubToken: input.githubToken, - isSuccess: !isAssertFailed - }) + await sendGithubComment({ githubToken: input.githubToken, resultsPath }) } // send slack notification only on error - if (input.slackWebhookUrl && isAssertFailed) { - await sendSlackNotification({ - slackWebhookUrl: input.slackWebhookUrl, - isSuccess: !isAssertFailed, - resultsPath - }) + if (input.slackWebhookUrl) { + await sendSlackNotification({ slackWebhookUrl: input.slackWebhookUrl, resultsPath }) } core.endGroup() // Notifying diff --git a/src/utils/github.js b/src/utils/github.js index 78fb953a4..f84c249b7 100644 --- a/src/utils/github.js +++ b/src/utils/github.js @@ -1,304 +1,79 @@ -const { groupBy, find, get, isEmpty, head } = require('lodash') +const { orderBy } = require('lodash') const github = require('@actions/github') -const { readFile, readdirSync, existsSync } = require('fs') -const { promisify } = require('util') -const { join } = require('path') -const pReadFile = promisify(readFile) - -/** @typedef {{ sha: string, pullRequest: string }} ChangesURL */ -/** @typedef {{ id: string, sha: string, url: string }} Gist */ -/** @typedef {{ auditId: string, auditProperty: string, auditTitle: string, operator: string, expected: string, actual: string, url: string }} LHResult */ - -const githubRepo = process.env.GITHUB_REPOSITORY || '' -const githubSha = process.env.GITHUB_SHA || '' -const reportTitle = 'Lighthouse CI Action' -const resultsDirPath = join(process.cwd(), '.lighthouseci') -const lhAssertResultsPath = join(resultsDirPath, 'assertion-results.json') - -// https://user-images.githubusercontent.com/158189/76324191-ef4c2880-62e5-11ea-8bf1-ac5ff7571eef.png +const core = require('@actions/core') +const { getLinksByUrl, getAssertionsByUrl } = require('./lhci-helpers') /** - * Send notifications. + * Create Github Comment. + * https://github.com/actions/toolkit/blob/master/docs/github-package.md#sending-requests-to-the-github-api * - * @param {{ githubToken: string, isSuccess: boolean }} params - */ - -exports.createGithubCheck = async function sendNotifications({ githubToken, isSuccess }) { - const [groupedResults, changesURL, gists] = await Promise.all([getGroupedAssertionResultsByUrl()]) - - console.log('Running Github notification') - - const octokit = new github.GitHub(githubToken) - const checkBody = { - owner: githubRepo.split('/')[0], - repo: githubRepo.split('/')[1], - head_sha: githubSha, - name: reportTitle, - status: /** @type {'completed'} */ ('completed'), - conclusion: /** @type {'success' | 'failure'} */ (isSuccess ? 'success' : 'failure'), - output: getSummaryMarkdownOutput({ status, changesURL, groupedResults, gists }) - } - - // await octokit.checks.createSuite({ - // owner: githubRepo.split('/')[0], - // repo: githubRepo.split('/')[1], - // head_sha: githubSha - // }) - - await octokit.checks.create(checkBody) -} - -/** - * @param {{ groupedResults: LHResultsByURL, gists: Gist[], status: number }} params - * @return {{color: *, text: string, fields: *}[]} + * @param {{ githubToken: string, resultsPath: string }} params */ -function formatAssertResults({ groupedResults, status, gists }) { - const color = status === 0 ? 'good' : 'danger' - - return Object.values(groupedResults).reduce((acc, groupedResult) => { - const resultUrl = get(head(groupedResult), 'url', '') - const gist = find(gists, ({ url }) => url === resultUrl) || {} - - const fields = groupedResult.map( - /** - * @param {LHResult} res - * @todo typedef for return object - * @return {{title: string, value: string}} - */ - res => { - const title = res.auditProperty ? `${res.auditId}.${res.auditProperty}` : res.auditId - return { - title, - value: `${res.auditTitle} \n _Expected ${res.expected} ${ - res.operator === '<=' ? ' less then' : ' greater than' - } actual ${res.actual}_` - } - } - ) - - const reportURL = getLHReportURL(gist) - const reportUrlField = reportURL - ? { - title: `View Detailed Lighthouse Report`, - title_link: reportURL, - color - } - : {} - - acc.push({ - text: `${groupedResult.length} result(s) for ${resultUrl}`, - color, - fields - }) - acc.push(reportUrlField) - return acc - }, []) -} - -async function getGroupedAssertionResultsByUrl() { - if (!existsSync(lhAssertResultsPath)) { - console.log(`No LH Assert results in ${lhAssertResultsPath}`) - return {} - } - const assertionResultsBuffer = await pReadFile(lhAssertResultsPath) - const assertionResults = /** @type {[LHResult]} **/ JSON.parse(assertionResultsBuffer.toString()) - return groupBy(assertionResults, 'url') -} - -/** - * @param {{ githubToken?: string }} params - * @return {Promise} - */ +exports.sendGithubComment = function sendGithubComment({ githubToken, resultsPath }) { + core.info('Running Github notification') + core.info('Context: ' + JSON.stringify(github.context, null, ' ')) + const client = new github.GitHub(githubToken) + const { issue } = github.context -async function getChangesUrl({ githubToken }) { - const shaChangesURL = ['https://github.com', githubRepo, 'commit', githubSha].join('/') - if (!githubToken) { - return { - pullRequest: '', - sha: shaChangesURL - } + if (github.context.payload.action !== 'opened') { + core.info('No issue or pull request was opened, skipping') + return } - const octokit = new github.GitHub(githubToken) - const pulls = await octokit.pulls.list({ - owner: githubRepo.split('/')[0], - repo: githubRepo.split('/')[1] - }) - - const pullRequest = find(get(pulls, 'data', []), ['head.sha', githubSha]) - - return { - pullRequest: get(pullRequest, 'html_url', ''), - sha: shaChangesURL + const params = { + owner: issue.owner, + repo: issue.repo, + issue_number: issue.number, + body: generateWelcomeMessage(resultsPath) } -} - -/** - * @param {{ groupedResults: LHResultsByURL, gists: Gist[], status: number }} params - * @return {{color: *, text: string, fields: *}[]} - */ -function formatAssertResults({ groupedResults, status, gists }) { - const color = status === 0 ? 'good' : 'danger' - - return Object.values(groupedResults).reduce((acc, groupedResult) => { - const resultUrl = get(head(groupedResult), 'url', '') - const gist = find(gists, ({ url }) => url === resultUrl) || {} - - const fields = groupedResult.map( - /** - * @param {LHResult} res - * @todo typedef for return object - * @return {{title: string, value: string}} - */ - res => { - const title = res.auditProperty ? `${res.auditId}.${res.auditProperty}` : res.auditId - return { - title, - value: `${res.auditTitle} \n _Expected ${res.expected} ${ - res.operator === '<=' ? ' less then' : ' greater than' - } actual ${res.actual}_` - } - } - ) - const reportURL = getLHReportURL(gist) - const reportUrlField = reportURL - ? { - title: `View Detailed Lighthouse Report`, - title_link: reportURL, - color - } - : {} - - acc.push({ - text: `${groupedResult.length} result(s) for ${resultUrl}`, - color, - fields - }) - acc.push(reportUrlField) - return acc - }, []) + core.info(JSON.stringify(params, null, ' ')) + return client.issues.createComment(params) } -/** - * @param {{ status: number, changesURL: ChangesURL, gists: Gist[], groupedResults: LHResultsByURL }} params - * @return {{summary: string, title: string}} - */ -function getSummaryMarkdownOutput({ status, changesURL, groupedResults, gists }) { - const conclusion = status === 0 ? 'success' : 'failure' - const title = changesURL.pullRequest ? `Pull Request ${conclusion}` : `Changes ${conclusion}` - const changesLink = changesURL.pullRequest - ? `[View on GitHub](${changesURL.pullRequest})` - : `[View SHA Changes](${changesURL.sha})` - const summaryResults = formatAssertResults({ groupedResults, gists, status }) - - /** - * @param {{ fields?: { title: string, value: string}[], title_link?: string, title?: string }} params - * @return {string} - */ - const fieldsTemplate = ({ fields, title_link, title }) => { - if (fields) { - let details = '' - if (fields.length > 2) { - const detailsFields = [...fields] - // make only 2 first results visible - fields = fields.slice(0, 2) - // move other results to markdown details section - detailsFields.splice(0, 2) - details = fieldsDetailsTemplate(detailsFields) - } - - return fields - .map(field => `**${field.title}**\n${field.value}`.trim()) - .join('\n') - .concat(details) - } - - if (title_link) { - return `[${title}](${title_link})` - } - - return '\n' - } - - /** - * @param {{ text?: string }} params - * @return {string} - */ - const resultTitle = ({ text }) => { - return text ? `### ${text}` : '' - } - - /** - * @param {{ title: string, value: string}[] } fields - * @return {string} - */ - const fieldsDetailsTemplate = fields => { - return ` -
- View more... - ${fields.map(field => `**${field.title}**\n${field.value}`.trim()).join('\n')} -
- ` - .trim() - .concat('\n') - } - - /** - * - * @param {{ text: string, fields: { title: string, value: string}[] }[]} summaryResults - * @return {string} - */ - const summaryResultsTemplate = summaryResults => { - return summaryResults.map(result => `${resultTitle(result)}\n${fieldsTemplate(result)}`.trim()).join('\n') - } - - const summary = ` -${changesLink}\n -${summaryResultsTemplate(summaryResults)} -` - return { - title, - summary - } -} +const emojiTab = '       ' /** - * @param {Gist} gist - * @return {string} - */ - -function getLHReportURL(gist) { - return isEmpty(gist) ? '' : `https://googlechrome.github.io/lighthouse/viewer/?gist=${gist.id}/${gist.sha}` -} + * Generate + * + * Current format: + +Failed to check assertions against of 1 URL. +#### 2 results for https://treo.sh/ – [[report]()] [[compare]()] +❌ `offscreen-images` failure for `maxLength` assertion: defer offscreen images [[...]](https://web.dev/offscreen-images) +       Expected **<= 0**, but found **1** + +⚠️ `mainthread-work-breakdown` warning for `minScore` assertion: minimize main-thread work [[...]](https://web.dev/mainthread-work-breakdown) +        Expected **>=0.9**, but found **0.83** + +* @param {string} resultsPath +*/ + +function generateWelcomeMessage(resultsPath) { + const linksByUrl = getLinksByUrl(resultsPath) + const assertionsByUrl = getAssertionsByUrl(resultsPath) + const totalUrls = Object.keys(assertionsByUrl).length + + const assertionTexts = Object.entries(assertionsByUrl).map(([url, assertions]) => { + const link = linksByUrl[url] + const sortedAssertions = orderBy(assertions, a => (a.level === 'error' ? 0 : 1)) + const assertionsText = sortedAssertions.map(a => { + const emoji = a.level === 'error' ? '❌' : '⚠️' + return ( + `${emoji} \`${a.auditId}\` ${a.level === 'error' ? 'failure' : 'warning'} for \`${a.name}\` assertion – ` + + `${emojiTab}${a.auditTitle} [[...]](${a.auditDocumentationLink})\n` + + `${emojiTab}Expected **${a.operator} ${a.expected}**, but found **${a.actual}**` + ) + }) + return ( + `#### ${assertions.length} result${assertions.length === 1 ? '' : 's'} for ${url}` + + `${link ? `– [[report](${link})]` : ''}\n` + + assertionsText.join('\n') + ) + }) -/** - * @param {string} path - * @return {string[]} - */ -function getLHRNameFromPath(path = '') { - let dir = readdirSync(path) return ( - dir - .filter( - /** - * @param {string} fileName - * @return { boolean } - */ - (fileName = '') => { - return !!fileName.match(/lhr-\d+\.json/g) - } - ) - .map( - /** - * @param {string} fileName - * @return { string } - */ - (fileName = '') => { - const match = fileName.match(/lhr-\d+\.json/g) - return match ? match[0] : '' - } - ) || [''] + `Failed to check assertions against of ${totalUrls} URL${totalUrls === 1 ? '' : 's'}.\n` + assertionTexts.join('\n') ) } diff --git a/src/utils/lhci-helpers.js b/src/utils/lhci-helpers.js index 311461e52..d524543e6 100644 --- a/src/utils/lhci-helpers.js +++ b/src/utils/lhci-helpers.js @@ -1,4 +1,4 @@ -const { groupBy } = require('lodash') +const { groupBy, fromPairs } = require('lodash') const { join } = require('path') const fs = require('fs') @@ -28,3 +28,26 @@ exports.getAssertionsByUrl = function getAssertionsByUrl(resultsPath) { const assertionResults = JSON.parse(fs.readFileSync(join(resultsPath, 'assertion-results.json'), 'utf8')) return groupBy(assertionResults, 'url') } + +/** + * Get Lighthouse results by url. + * + * @typedef {{ requestedUrl: string }} LHResult + * + * @param {string} resultsPath + * @return {Object} + */ + +exports.getResultsByUrl = function getResultsByUrl(resultsPath) { + const lhrFileNames = fs + .readdirSync(resultsPath) + .filter(fileName => fileName.startsWith('lhr-') && fileName.endsWith('.json')) + return fromPairs( + lhrFileNames.map(fileName => { + /** @type {LHResult} **/ + const lhr = JSON.parse(fs.readFileSync(join(resultsPath, fileName), 'utf8')) + const url = lhr.requestedUrl || '' + return [url, lhr] + }) + ) +} diff --git a/src/utils/slack.js b/src/utils/slack.js index 37138c7b4..e17d1e73f 100644 --- a/src/utils/slack.js +++ b/src/utils/slack.js @@ -6,27 +6,24 @@ const { getLinksByUrl, getAssertionsByUrl } = require('./lhci-helpers') /** * Send Slack Notification as an incoming webhook. * - * @param {{ slackWebhookUrl: string, resultsPath: string, isSuccess: boolean }} params + * @param {{ slackWebhookUrl: string, resultsPath: string }} params */ -exports.sendSlackNotification = function sendSlackNotification({ slackWebhookUrl, isSuccess, resultsPath }) { - if (isSuccess) return // ignore success checks for now +exports.sendSlackNotification = function sendSlackNotification({ slackWebhookUrl, resultsPath }) { core.info('Send Slack notification') + + const webhook = new IncomingWebhook(slackWebhookUrl, { + username: 'Lighthouse CI Action', + icon_emoji: ':small_red_triangle:' + }) const params = { blocks: [ - { - type: 'section', - text: { type: 'mrkdwn', text: generateIntro(resultsPath) } - }, + { type: 'section', text: { type: 'mrkdwn', text: generateIntro(resultsPath) } }, ...generateAssertionBlocks(resultsPath) ] } - core.info(JSON.stringify(params, null, ' ')) - const webhook = new IncomingWebhook(slackWebhookUrl, { - username: 'Lighthouse CI Action', - icon_emoji: ':small_red_triangle:' - }) + core.info(JSON.stringify(params, null, ' ')) return webhook.send(params) } From 508063db6b3b1fb702dcdca1735c44b2d08a3a61 Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 23:27:41 +0100 Subject: [PATCH 15/19] add problem matchers support --- .github/lhci.json | 16 ++++++++++++++++ src/index.js | 4 ++++ src/utils/github.js | 4 +--- src/utils/lhci-helpers.js | 8 +++++--- src/utils/problem-matchers.js | 28 ++++++++++++++++++++++++++++ src/utils/slack.js | 5 ++--- 6 files changed, 56 insertions(+), 9 deletions(-) create mode 100644 .github/lhci.json create mode 100644 src/utils/problem-matchers.js diff --git a/.github/lhci.json b/.github/lhci.json new file mode 100644 index 000000000..136873455 --- /dev/null +++ b/.github/lhci.json @@ -0,0 +1,16 @@ +{ + "problemMatcher": [ + { + "owner": "lhci", + "pattern": [ + { + "regexp": "^(.*)\\s–\\s(error|warning)\\s–\\s(.*)\\s–\\s(.*)$", + "file": 1, + "code": 2, + "severity": 3, + "message": 4 + } + ] + } + ] +} diff --git a/src/index.js b/src/index.js index cc0decd4d..ffd7280a1 100644 --- a/src/index.js +++ b/src/index.js @@ -7,6 +7,7 @@ const { getInput, hasAssertConfig } = require('./config') const { uploadArtifacts } = require('./utils/artifacts') const { sendGithubComment } = require('./utils/github') const { sendSlackNotification } = require('./utils/slack') +const { enableProblemMatcher } = require('./utils/problem-matchers') /** * Audit urls with Lighthouse CI in 3 stages: @@ -58,8 +59,11 @@ async function main() { assertArgs.push(`--config=${input.configPath}`) } + // run lhci with problem matcher + // https://github.com/actions/toolkit/blob/master/docs/commands.md#problem-matchers const assertStatus = await exec(lhciCliPath, assertArgs) isAssertFailed = assertStatus !== 0 + if (isAssertFailed) enableProblemMatcher(resultsPath) core.endGroup() // Asserting } diff --git a/src/utils/github.js b/src/utils/github.js index f84c249b7..107202bbb 100644 --- a/src/utils/github.js +++ b/src/utils/github.js @@ -1,4 +1,3 @@ -const { orderBy } = require('lodash') const github = require('@actions/github') const core = require('@actions/core') const { getLinksByUrl, getAssertionsByUrl } = require('./lhci-helpers') @@ -57,8 +56,7 @@ function generateWelcomeMessage(resultsPath) { const assertionTexts = Object.entries(assertionsByUrl).map(([url, assertions]) => { const link = linksByUrl[url] - const sortedAssertions = orderBy(assertions, a => (a.level === 'error' ? 0 : 1)) - const assertionsText = sortedAssertions.map(a => { + const assertionsText = assertions.map(a => { const emoji = a.level === 'error' ? '❌' : '⚠️' return ( `${emoji} \`${a.auditId}\` ${a.level === 'error' ? 'failure' : 'warning'} for \`${a.name}\` assertion – ` + diff --git a/src/utils/lhci-helpers.js b/src/utils/lhci-helpers.js index d524543e6..6f59027c0 100644 --- a/src/utils/lhci-helpers.js +++ b/src/utils/lhci-helpers.js @@ -1,4 +1,4 @@ -const { groupBy, fromPairs } = require('lodash') +const { groupBy, fromPairs, mapValues, orderBy } = require('lodash') const { join } = require('path') const fs = require('fs') @@ -14,7 +14,7 @@ exports.getLinksByUrl = function getLinksByUrl(resultsPath) { } /** - * Get assertions grouped by url. + * Get assertions grouped by url and sorted with error first. * * @typedef {{ name: string, expected: number, actual: number, values: number[], operator: string, passed: boolean, * auditId: string, level: 'warn' | 'error', url: string, auditTitle: string, auditDocumentationLink: string }} LHCIAssertion @@ -26,7 +26,9 @@ exports.getLinksByUrl = function getLinksByUrl(resultsPath) { exports.getAssertionsByUrl = function getAssertionsByUrl(resultsPath) { /** @type {LHCIAssertion[]} **/ const assertionResults = JSON.parse(fs.readFileSync(join(resultsPath, 'assertion-results.json'), 'utf8')) - return groupBy(assertionResults, 'url') + return mapValues(groupBy(assertionResults, 'url'), assertions => { + return orderBy(assertions, a => (a.level === 'error' ? 0 : 1) + a.auditId) + }) } /** diff --git a/src/utils/problem-matchers.js b/src/utils/problem-matchers.js new file mode 100644 index 000000000..8a5d5933f --- /dev/null +++ b/src/utils/problem-matchers.js @@ -0,0 +1,28 @@ +const { getAssertionsByUrl } = require('./lhci-helpers') + +/** + * Problem matchers allow to add anotatsion to Action output: + * https://github.com/actions/toolkit/blob/master/docs/problem-matchers.md + * + * LHCI assertion output is designed to be readable by humans, not machines, and it's hard to parse with regexp + * This method enables lhci.json problem matcher for a single string, examples: + * + * https://likeeper.com/ – error – offscreen-images – `offscreen-images` failure for `maxLength` assertion, expected **<= 0**, but found **1**. + * https://likeeper.com/ – warning – mainthread-work-breakdown – `mainthread-work-breakdown` warning for `minScore` assertion, expected **>= 0.9**, but found **0.83** + * + * @param {string} resultsPath + */ + +exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { + console.log('::add-matcher::lhci.json') + const assertionsByUrl = getAssertionsByUrl(resultsPath) + Object.values(assertionsByUrl).forEach(assertions => { + assertions.forEach(a => { + const message = + `\`${a.auditId}\` ${a.level === 'error' ? 'failure' : 'warning'} for \`${a.name}\` assertion, ` + + `expected **${a.operator} ${a.expected}**, but found **${a.actual}**.` + console.log('%s – %s – %s – %s', a.url, a.level, a.auditId, message) + }) + }) + console.log('::remove-matcher owner=lhci::') +} diff --git a/src/utils/slack.js b/src/utils/slack.js index e17d1e73f..3a298afe5 100644 --- a/src/utils/slack.js +++ b/src/utils/slack.js @@ -1,4 +1,4 @@ -const { flatten, orderBy } = require('lodash') +const { flatten } = require('lodash') const { IncomingWebhook } = require('@slack/webhook') const core = require('@actions/core') const { getLinksByUrl, getAssertionsByUrl } = require('./lhci-helpers') @@ -69,7 +69,6 @@ function generateAssertionBlocks(resultsPath) { return flatten( Object.entries(assertionsByUrl).map(([url, assertions]) => { const link = linksByUrl[url] - const sortedAssertions = orderBy(assertions, a => (a.level === 'error' ? 0 : 1)) return [ { type: 'divider' }, { @@ -84,7 +83,7 @@ function generateAssertionBlocks(resultsPath) { } : {}) }, - ...sortedAssertions.map(a => { + ...assertions.map(a => { const text = `*${a.auditId}* ${a.level === 'error' ? 'failure' : 'warning'} for *${a.name}* assertion\n` + `${a.auditTitle} <${a.auditDocumentationLink} | [...]>\n` + From 50e7bc2b67b3984ef2c19a284322dd719385d04a Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 23:41:10 +0100 Subject: [PATCH 16/19] fix tests --- ...I-assert-on-budget-github-notification.yml | 15 - .../LHCI-assert-on-budget-notification.yml | 16 - ...CI-assert-on-budget-slack-notification.yml | 15 - node_modules/@actions/exec/lib/exec.js | 37 -- node_modules/@actions/exec/lib/interfaces.js | 3 - node_modules/@actions/exec/lib/toolrunner.js | 587 ------------------ node_modules/@actions/exec/package.json | 40 -- node_modules/@actions/io/lib/io-util.js | 195 ------ node_modules/@actions/io/lib/io.js | 290 --------- node_modules/@actions/io/package.json | 37 -- package.json | 1 - src/index.js | 34 +- src/utils/problem-matchers.js | 3 + yarn.lock | 12 - 14 files changed, 28 insertions(+), 1257 deletions(-) delete mode 100644 .github/workflows/LHCI-assert-on-budget-github-notification.yml delete mode 100644 .github/workflows/LHCI-assert-on-budget-notification.yml delete mode 100644 .github/workflows/LHCI-assert-on-budget-slack-notification.yml delete mode 100644 node_modules/@actions/exec/lib/exec.js delete mode 100644 node_modules/@actions/exec/lib/interfaces.js delete mode 100644 node_modules/@actions/exec/lib/toolrunner.js delete mode 100644 node_modules/@actions/exec/package.json delete mode 100644 node_modules/@actions/io/lib/io-util.js delete mode 100644 node_modules/@actions/io/lib/io.js delete mode 100644 node_modules/@actions/io/package.json diff --git a/.github/workflows/LHCI-assert-on-budget-github-notification.yml b/.github/workflows/LHCI-assert-on-budget-github-notification.yml deleted file mode 100644 index 355007ee0..000000000 --- a/.github/workflows/LHCI-assert-on-budget-github-notification.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: LHCI-assert-on-budget-github-notification -on: push -jobs: - # This pass/fails a build with a budgets.json. - assert-on-budget: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Run Lighthouse on urls and validate with budgets.json - uses: ./ - with: - urls: 'https://alekseykulikov.com/' - budgetPath: '.github/lighthouse/budget.json' - githubToken: ${{ secrets.GITHUB_TOKEN }} - gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} diff --git a/.github/workflows/LHCI-assert-on-budget-notification.yml b/.github/workflows/LHCI-assert-on-budget-notification.yml deleted file mode 100644 index 8a98ba376..000000000 --- a/.github/workflows/LHCI-assert-on-budget-notification.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: LHCI-assert-on-budget-notification -on: push -jobs: - # This pass/fails a build with a budgets.json. - assert-on-budget: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Run Lighthouse on urls and validate with budgets.json - uses: ./ - with: - urls: 'https://alekseykulikov.com/' - budgetPath: '.github/lighthouse/budget.json' - slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} - githubToken: ${{ secrets.GITHUB_TOKEN }} - gistUploadToken: ${{ secrets.GIST_UPLOAD_TOKEN }} diff --git a/.github/workflows/LHCI-assert-on-budget-slack-notification.yml b/.github/workflows/LHCI-assert-on-budget-slack-notification.yml deleted file mode 100644 index 166e0b374..000000000 --- a/.github/workflows/LHCI-assert-on-budget-slack-notification.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: LHCI-assert-on-budget-slack-notification -on: push -jobs: - # This pass/fails a build with a budgets.json. - assert-on-budget: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Run Lighthouse on urls and validate with budgets.json - uses: ./ - with: - urls: 'https://alekseykulikov.com/' - budgetPath: '.github/lighthouse/budget.json' - slackWebhookUrl: ${{ secrets.SLACK_WEBHOOK_URL }} - githubToken: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/@actions/exec/lib/exec.js b/node_modules/@actions/exec/lib/exec.js deleted file mode 100644 index 2748debcc..000000000 --- a/node_modules/@actions/exec/lib/exec.js +++ /dev/null @@ -1,37 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const tr = require("./toolrunner"); -/** - * Exec a command. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code - */ -function exec(commandLine, args, options) { - return __awaiter(this, void 0, void 0, function* () { - const commandArgs = tr.argStringToArray(commandLine); - if (commandArgs.length === 0) { - throw new Error(`Parameter 'commandLine' cannot be null or empty.`); - } - // Path to tool to execute should be first arg - const toolPath = commandArgs[0]; - args = commandArgs.slice(1).concat(args || []); - const runner = new tr.ToolRunner(toolPath, args, options); - return runner.exec(); - }); -} -exports.exec = exec; -//# sourceMappingURL=exec.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/interfaces.js b/node_modules/@actions/exec/lib/interfaces.js deleted file mode 100644 index db9191150..000000000 --- a/node_modules/@actions/exec/lib/interfaces.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/toolrunner.js b/node_modules/@actions/exec/lib/toolrunner.js deleted file mode 100644 index cbb433d11..000000000 --- a/node_modules/@actions/exec/lib/toolrunner.js +++ /dev/null @@ -1,587 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = require("os"); -const events = require("events"); -const child = require("child_process"); -const path = require("path"); -const io = require("@actions/io"); -const ioUtil = require("@actions/io/lib/io-util"); -/* eslint-disable @typescript-eslint/unbound-method */ -const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. - */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; - } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } - } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows (regular) - else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; - } - } - } - else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); - } - strBuffer = s; - } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - } - } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; - } - } - return this.toolPath; - } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); - } - argline += '"'; - return [argline]; - } - } - return this.args; - } - _endsWith(str, end) { - return str.endsWith(end); - } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); - } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; - } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; - } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; - } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; - } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; - } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; - } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { - return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - const stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - const errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); - } - this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - }); - }); - } -} -exports.ToolRunner = ToolRunner; -/** - * Convert an arg string to an array of args. Handles escaping - * - * @param argString string of arguments - * @returns string[] array of arguments - */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; - } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } - else { - append(c); - } - continue; - } - if (c === '\\' && escaped) { - append(c); - continue; - } - if (c === '\\' && inQuotes) { - escaped = true; - continue; - } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); - } - return args; -} -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit('debug', message); - } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); - } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); - } -} -//# sourceMappingURL=toolrunner.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/package.json b/node_modules/@actions/exec/package.json deleted file mode 100644 index 1b8078fda..000000000 --- a/node_modules/@actions/exec/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@actions/exec", - "version": "1.0.3", - "description": "Actions exec lib", - "keywords": [ - "github", - "actions", - "exec" - ], - "homepage": "https://github.com/actions/toolkit/tree/master/packages/exec", - "license": "MIT", - "main": "lib/exec.js", - "types": "lib/exec.d.ts", - "directories": { - "lib": "lib", - "test": "__tests__" - }, - "files": [ - "lib" - ], - "publishConfig": { - "access": "public" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/actions/toolkit.git", - "directory": "packages/exec" - }, - "scripts": { - "audit-moderate": "npm install && npm audit --audit-level=moderate", - "test": "echo \"Error: run tests from root\" && exit 1", - "tsc": "tsc" - }, - "bugs": { - "url": "https://github.com/actions/toolkit/issues" - }, - "dependencies": { - "@actions/io": "^1.0.1" - } -} diff --git a/node_modules/@actions/io/lib/io-util.js b/node_modules/@actions/io/lib/io-util.js deleted file mode 100644 index 17b3bba58..000000000 --- a/node_modules/@actions/io/lib/io-util.js +++ /dev/null @@ -1,195 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = require("assert"); -const fs = require("fs"); -const path = require("path"); -_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -exports.IS_WINDOWS = process.platform === 'win32'; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello - } - return p.startsWith('/'); -} -exports.isRooted = isRooted; -/** - * Recursively create a directory at `fsPath`. - * - * This implementation is optimistic, meaning it attempts to create the full - * path first, and backs up the path stack from there. - * - * @param fsPath The path to create - * @param maxDepth The maximum recursion depth - * @param depth The current recursion depth - */ -function mkdirP(fsPath, maxDepth = 1000, depth = 1) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - fsPath = path.resolve(fsPath); - if (depth >= maxDepth) - return exports.mkdir(fsPath); - try { - yield exports.mkdir(fsPath); - return; - } - catch (err) { - switch (err.code) { - case 'ENOENT': { - yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); - yield exports.mkdir(fsPath); - return; - } - default: { - let stats; - try { - stats = yield exports.stat(fsPath); - } - catch (err2) { - throw err; - } - if (!stats.isDirectory()) - throw err; - } - } - } - }); -} -exports.mkdirP = mkdirP; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; - }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); - } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); -} -//# sourceMappingURL=io-util.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/lib/io.js b/node_modules/@actions/io/lib/io.js deleted file mode 100644 index ad5bdb926..000000000 --- a/node_modules/@actions/io/lib/io.js +++ /dev/null @@ -1,290 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const childProcess = require("child_process"); -const path = require("path"); -const util_1 = require("util"); -const ioUtil = require("./io-util"); -const exec = util_1.promisify(childProcess.exec); -/** - * Copies a file or folder. - * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js - * - * @param source source path - * @param dest destination path - * @param options optional. See CopyOptions. - */ -function cp(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const { force, recursive } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - // Dest is an existing file, but not forcing - if (destStat && destStat.isFile() && !force) { - return; - } - // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() - ? path.join(dest, path.basename(source)) - : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } - else { - yield cpDirRecursive(source, newDest, 0, force); - } - } - else { - if (path.relative(source, newDest) === '') { - // a file cannot be copied to itself - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); -} -exports.cp = cp; -/** - * Moves a path. - * - * @param source source path - * @param dest destination path - * @param options optional. See MoveOptions. - */ -function mv(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - // If dest is directory copy src into dest - dest = path.join(dest, path.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } - else { - throw new Error('Destination already exists'); - } - } - } - yield mkdirP(path.dirname(dest)); - yield ioUtil.rename(source, dest); - }); -} -exports.mv = mv; -/** - * Remove a path recursively with force - * - * @param inputPath path to remove - */ -function rmRF(inputPath) { - return __awaiter(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another - // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. - try { - if (yield ioUtil.isDirectory(inputPath, true)) { - yield exec(`rd /s /q "${inputPath}"`); - } - else { - yield exec(`del /f /a "${inputPath}"`); - } - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - } - // Shelling out fails to remove a symlink folder with missing source, this unlink catches that - try { - yield ioUtil.unlink(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - } - } - else { - let isDir = false; - try { - isDir = yield ioUtil.isDirectory(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - return; - } - if (isDir) { - yield exec(`rm -rf "${inputPath}"`); - } - else { - yield ioUtil.unlink(inputPath); - } - } - }); -} -exports.rmRF = rmRF; -/** - * Make a directory. Creates the full path with folders in between - * Will throw if it fails - * - * @param fsPath path to create - * @returns Promise - */ -function mkdirP(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - yield ioUtil.mkdirP(fsPath); - }); -} -exports.mkdirP = mkdirP; -/** - * Returns path of a tool had the tool actually been invoked. Resolves via paths. - * If you check and the tool does not exist, it will throw. - * - * @param tool name of the tool - * @param check whether to check if tool exists - * @returns Promise path to tool - */ -function which(tool, check) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // recursive when check=true - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); - } - else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); - } - } - } - try { - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { - for (const extension of process.env.PATHEXT.split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return filePath; - } - return ''; - } - // if any path separators, return empty - if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { - return ''; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // return the first match - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); - if (filePath) { - return filePath; - } - } - return ''; - } - catch (err) { - throw new Error(`which failed with message ${err.message}`); - } - }); -} -exports.which = which; -function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - return { force, recursive }; -} -function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure there is not a run away recursive copy - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - // Recurse - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } - else { - yield copyFile(srcFile, destFile, force); - } - } - // Change the mode for the newly created directory - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); - }); -} -// Buffered file copy -function copyFile(srcFile, destFile, force) { - return __awaiter(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - // unlink/re-link it - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } - catch (e) { - // Try to override file permission - if (e.code === 'EPERM') { - yield ioUtil.chmod(destFile, '0666'); - yield ioUtil.unlink(destFile); - } - // other errors = it doesn't exist, no work to do - } - // Copy over symlink - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); - } - else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); - } - }); -} -//# sourceMappingURL=io.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/package.json b/node_modules/@actions/io/package.json deleted file mode 100644 index 0fd128ef5..000000000 --- a/node_modules/@actions/io/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@actions/io", - "version": "1.0.2", - "description": "Actions io lib", - "keywords": [ - "github", - "actions", - "io" - ], - "homepage": "https://github.com/actions/toolkit/tree/master/packages/io", - "license": "MIT", - "main": "lib/io.js", - "types": "lib/io.d.ts", - "directories": { - "lib": "lib", - "test": "__tests__" - }, - "files": [ - "lib" - ], - "publishConfig": { - "access": "public" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/actions/toolkit.git", - "directory": "packages/io" - }, - "scripts": { - "audit-moderate": "npm install && npm audit --audit-level=moderate", - "test": "echo \"Error: run tests from root\" && exit 1", - "tsc": "tsc" - }, - "bugs": { - "url": "https://github.com/actions/toolkit/issues" - } -} diff --git a/package.json b/package.json index 43e9714ac..a6e693936 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "dependencies": { "@actions/artifact": "^0.1.0", "@actions/core": "^1.2.3", - "@actions/exec": "^1.0.3", "@actions/github": "^2.1.1", "@lhci/cli": "0.3.9", "@lhci/utils": "^0.3.9", diff --git a/src/index.js b/src/index.js index ffd7280a1..942ee20f4 100644 --- a/src/index.js +++ b/src/index.js @@ -1,7 +1,7 @@ require('./utils/support-lh-plugins') // add automatic support for LH Plugins env const core = require('@actions/core') const { join } = require('path') -const { exec } = require('@actions/exec') +const childProcess = require('child_process') const lhciCliPath = require.resolve('@lhci/cli/src/cli') const { getInput, hasAssertConfig } = require('./config') const { uploadArtifacts } = require('./utils/artifacts') @@ -19,14 +19,14 @@ const { enableProblemMatcher } = require('./utils/problem-matchers') async function main() { core.startGroup('Action config') - const resultsPath = join(process.cwd(), '.lighthouserc') + const resultsPath = join(process.cwd(), '.lighthouseci') const input = getInput() core.info(`Input args: ${JSON.stringify(input, null, ' ')}`) core.endGroup() // Action config /******************************* 1. COLLECT ***********************************/ core.startGroup(`Collecting`) - const collectArgs = ['collect', `--numberOfRuns=${input.runs}`] + const collectArgs = [`--numberOfRuns=${input.runs}`] if (input.staticDistDir) { collectArgs.push(`--static-dist-dir=${input.staticDistDir}`) @@ -39,7 +39,7 @@ async function main() { } if (input.configPath) collectArgs.push(`--config=${input.configPath}`) - const collectStatus = await exec(lhciCliPath, collectArgs) + const collectStatus = exec('collect', collectArgs) if (collectStatus !== 0) throw new Error(`LHCI 'collect' has encountered a problem.`) // upload artifacts as soon as collected @@ -51,7 +51,7 @@ async function main() { let isAssertFailed = false if (input.budgetPath || hasAssertConfig(input.configPath)) { core.startGroup(`Asserting`) - const assertArgs = ['assert'] + const assertArgs = [] if (input.budgetPath) { assertArgs.push(`--budgetsFile=${input.budgetPath}`) @@ -61,16 +61,18 @@ async function main() { // run lhci with problem matcher // https://github.com/actions/toolkit/blob/master/docs/commands.md#problem-matchers - const assertStatus = await exec(lhciCliPath, assertArgs) + const assertStatus = exec('assert', assertArgs) isAssertFailed = assertStatus !== 0 - if (isAssertFailed) enableProblemMatcher(resultsPath) core.endGroup() // Asserting } + // annotate assertions + if (isAssertFailed) enableProblemMatcher(resultsPath) + /******************************* 3. UPLOAD ************************************/ if (input.serverToken || input.temporaryPublicStorage) { core.startGroup(`Uploading`) - const uploadParams = ['upload'] + const uploadParams = [] if (input.serverToken) { uploadParams.push('--target=lhci', `--serverBaseUrl=${input.serverToken}`, `--token=${input.serverToken}`) @@ -78,7 +80,7 @@ async function main() { uploadParams.push('--target=temporary-public-storage', '--uploadUrlMap=true') } - const uploadStatus = await exec(lhciCliPath, uploadParams) + const uploadStatus = exec('upload', uploadParams) if (uploadStatus !== 0) throw new Error(`LHCI 'upload' failed to upload to LHCI server.`) core.endGroup() // Uploading @@ -110,3 +112,17 @@ async function main() { main() .catch(err => core.setFailed(err.message)) .then(() => core.debug(`done in ${process.uptime()}s`)) + +/** + * Run a child command synchronously. + * + * @param {'collect'|'assert'|'upload'} command + * @param {string[]} [args] + * @return {number} + */ + +function exec(command, args = []) { + const combinedArgs = [lhciCliPath, command, ...args] + const { status = -1 } = childProcess.spawnSync(process.argv[0], combinedArgs, { stdio: 'inherit' }) + return status || 0 +} diff --git a/src/utils/problem-matchers.js b/src/utils/problem-matchers.js index 8a5d5933f..3f438e0fe 100644 --- a/src/utils/problem-matchers.js +++ b/src/utils/problem-matchers.js @@ -1,4 +1,5 @@ const { getAssertionsByUrl } = require('./lhci-helpers') +const core = require('@actions/core') /** * Problem matchers allow to add anotatsion to Action output: @@ -14,6 +15,7 @@ const { getAssertionsByUrl } = require('./lhci-helpers') */ exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { + core.startGroup(`Annotating`) console.log('::add-matcher::lhci.json') const assertionsByUrl = getAssertionsByUrl(resultsPath) Object.values(assertionsByUrl).forEach(assertions => { @@ -25,4 +27,5 @@ exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { }) }) console.log('::remove-matcher owner=lhci::') + core.endGroup() } diff --git a/yarn.lock b/yarn.lock index d249c31fc..51e82a13e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -15,13 +15,6 @@ resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95" integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w== -"@actions/exec@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.0.3.tgz#b967f8700d6ff011dcc91243b58bafc1bb9ab95f" - integrity sha512-TogJGnueOmM7ntCi0ASTUj4LapRRtDfj57Ja4IhPmg2fls28uVOPbAn8N+JifaOumN2UG3oEO/Ixek2A4NcYSA== - dependencies: - "@actions/io" "^1.0.1" - "@actions/github@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@actions/github/-/github-2.1.1.tgz#bcabedff598196d953f58ba750d5e75549a75142" @@ -38,11 +31,6 @@ dependencies: tunnel "0.0.6" -"@actions/io@^1.0.1": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.0.2.tgz#2f614b6e69ce14d191180451eb38e6576a6e6b27" - integrity sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg== - "@lhci/cli@0.3.9": version "0.3.9" resolved "https://registry.yarnpkg.com/@lhci/cli/-/cli-0.3.9.tgz#5c7054b1633ae16ab6f7fb3700e77d0d941ac2e6" From a26d0fc355cced4c99e647094bdda8b98c4f1ddf Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 23:51:06 +0100 Subject: [PATCH 17/19] fix artefacts upload & notification order --- src/config.js | 2 +- src/index.js | 31 +++++++++++++++---------------- src/utils/artifacts.js | 3 ++- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/config.js b/src/config.js index 2178a1efc..f394ad76e 100644 --- a/src/config.js +++ b/src/config.js @@ -67,7 +67,6 @@ exports.getInput = function getInputArgs() { urls, runs: parseInt(core.getInput('runs'), 10) || 1, staticDistDir, - uploadArtifacts: core.getInput('uploadArtifacts') === 'true' ? true : false, // upload serverBaseUrl, serverToken, @@ -76,6 +75,7 @@ exports.getInput = function getInputArgs() { budgetPath: core.getInput('budgetPath') || null, configPath, // notify + uploadArtifacts: core.getInput('uploadArtifacts') === 'true' ? true : false, slackWebhookUrl: core.getInput('slackWebhookUrl') || null, githubToken: core.getInput('githubToken') || null } diff --git a/src/index.js b/src/index.js index 942ee20f4..9023aec82 100644 --- a/src/index.js +++ b/src/index.js @@ -42,9 +42,6 @@ async function main() { const collectStatus = exec('collect', collectArgs) if (collectStatus !== 0) throw new Error(`LHCI 'collect' has encountered a problem.`) - // upload artifacts as soon as collected - if (input.uploadArtifacts) await uploadArtifacts(resultsPath) - core.endGroup() // Collecting /******************************* 2. ASSERT ************************************/ @@ -66,9 +63,6 @@ async function main() { core.endGroup() // Asserting } - // annotate assertions - if (isAssertFailed) enableProblemMatcher(resultsPath) - /******************************* 3. UPLOAD ************************************/ if (input.serverToken || input.temporaryPublicStorage) { core.startGroup(`Uploading`) @@ -87,24 +81,29 @@ async function main() { } /******************************* 4. NOTIFY ************************************/ - if ((input.githubToken || input.slackWebhookUrl) && isAssertFailed) { - core.startGroup(`Notifying`) - if (input.githubToken) { - await sendGithubComment({ githubToken: input.githubToken, resultsPath }) - } + core.startGroup(`Notifying`) + // upload artifacts as soon as collected + if (input.uploadArtifacts) await uploadArtifacts(resultsPath) - // send slack notification only on error - if (input.slackWebhookUrl) { - await sendSlackNotification({ slackWebhookUrl: input.slackWebhookUrl, resultsPath }) - } + // annotate assertions + if (isAssertFailed) enableProblemMatcher(resultsPath) - core.endGroup() // Notifying + // send gtihub message + if (input.githubToken && isAssertFailed) { + await sendGithubComment({ githubToken: input.githubToken, resultsPath }) + } + + // send slack notification only on error + if (input.slackWebhookUrl && isAssertFailed) { + await sendSlackNotification({ slackWebhookUrl: input.slackWebhookUrl, resultsPath }) } // set failing exit code for the action if (isAssertFailed) { core.setFailed(`Assertions have failed.`) } + + core.endGroup() // Notifying } // run `main()` diff --git a/src/utils/artifacts.js b/src/utils/artifacts.js index 6d90445df..d6455a5e1 100644 --- a/src/utils/artifacts.js +++ b/src/utils/artifacts.js @@ -1,10 +1,11 @@ const artifact = require('@actions/artifact') const fs = require('fs') +const { join } = require('path') /** @param {string} rootDirectory */ exports.uploadArtifacts = function uploadArtifacts(rootDirectory) { const artifactClient = artifact.create() const artifactName = 'lighthouse-results' - const files = fs.readdirSync(rootDirectory) + const files = fs.readdirSync(rootDirectory).map(fileName => join(rootDirectory, fileName)) return artifactClient.uploadArtifact(artifactName, files, rootDirectory, { continueOnError: true }) } From 5508286f0b53b3c15864229546ee85bb7addcfdd Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Wed, 11 Mar 2020 23:55:22 +0100 Subject: [PATCH 18/19] fix matcher location --- src/utils/problem-matchers.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/utils/problem-matchers.js b/src/utils/problem-matchers.js index 3f438e0fe..4a2baf368 100644 --- a/src/utils/problem-matchers.js +++ b/src/utils/problem-matchers.js @@ -1,5 +1,5 @@ const { getAssertionsByUrl } = require('./lhci-helpers') -const core = require('@actions/core') +const { join } = require('path') /** * Problem matchers allow to add anotatsion to Action output: @@ -15,8 +15,8 @@ const core = require('@actions/core') */ exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { - core.startGroup(`Annotating`) - console.log('::add-matcher::lhci.json') + const lhciPath = join(process.cwd(), '.github/lhci.json') + console.log(`::add-matcher::${lhciPath}`) const assertionsByUrl = getAssertionsByUrl(resultsPath) Object.values(assertionsByUrl).forEach(assertions => { assertions.forEach(a => { @@ -27,5 +27,4 @@ exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { }) }) console.log('::remove-matcher owner=lhci::') - core.endGroup() } From e0a1f7298f4c21209e4ac3fa83ccde110a02406d Mon Sep 17 00:00:00 2001 From: Aleksey Kulikov Date: Thu, 12 Mar 2020 00:03:24 +0100 Subject: [PATCH 19/19] fix dash for problem matcher --- .github/lighthouse/budget.json | 4 ++-- .github/{lhci.json => matchers.json} | 4 ++-- src/index.js | 4 ++-- src/utils/problem-matchers.js | 11 +++++------ 4 files changed, 11 insertions(+), 12 deletions(-) rename .github/{lhci.json => matchers.json} (61%) diff --git a/.github/lighthouse/budget.json b/.github/lighthouse/budget.json index 131ec8a5d..9ea61adfb 100644 --- a/.github/lighthouse/budget.json +++ b/.github/lighthouse/budget.json @@ -8,7 +8,7 @@ }, { "resourceType": "script", - "budget": 50 + "budget": 100 }, { "resourceType": "stylesheet", @@ -20,4 +20,4 @@ } ] } -] \ No newline at end of file +] diff --git a/.github/lhci.json b/.github/matchers.json similarity index 61% rename from .github/lhci.json rename to .github/matchers.json index 136873455..b4979837c 100644 --- a/.github/lhci.json +++ b/.github/matchers.json @@ -1,10 +1,10 @@ { "problemMatcher": [ { - "owner": "lhci", + "owner": "lighthouse-ci-action", "pattern": [ { - "regexp": "^(.*)\\s–\\s(error|warning)\\s–\\s(.*)\\s–\\s(.*)$", + "regexp": "^(?:\\s+)?(.*)\\|(error|warning)\\|(.*)\\|(.*)$", "file": 1, "code": 2, "severity": 3, diff --git a/src/index.js b/src/index.js index 9023aec82..73623ff8a 100644 --- a/src/index.js +++ b/src/index.js @@ -7,7 +7,7 @@ const { getInput, hasAssertConfig } = require('./config') const { uploadArtifacts } = require('./utils/artifacts') const { sendGithubComment } = require('./utils/github') const { sendSlackNotification } = require('./utils/slack') -const { enableProblemMatcher } = require('./utils/problem-matchers') +const { runProblemMatchers } = require('./utils/problem-matchers') /** * Audit urls with Lighthouse CI in 3 stages: @@ -86,7 +86,7 @@ async function main() { if (input.uploadArtifacts) await uploadArtifacts(resultsPath) // annotate assertions - if (isAssertFailed) enableProblemMatcher(resultsPath) + if (isAssertFailed) runProblemMatchers(resultsPath) // send gtihub message if (input.githubToken && isAssertFailed) { diff --git a/src/utils/problem-matchers.js b/src/utils/problem-matchers.js index 4a2baf368..372c813a9 100644 --- a/src/utils/problem-matchers.js +++ b/src/utils/problem-matchers.js @@ -1,5 +1,5 @@ -const { getAssertionsByUrl } = require('./lhci-helpers') const { join } = require('path') +const { getAssertionsByUrl } = require('./lhci-helpers') /** * Problem matchers allow to add anotatsion to Action output: @@ -14,17 +14,16 @@ const { join } = require('path') * @param {string} resultsPath */ -exports.enableProblemMatcher = function enableProblemMatcher(resultsPath) { - const lhciPath = join(process.cwd(), '.github/lhci.json') - console.log(`::add-matcher::${lhciPath}`) +exports.runProblemMatchers = function enableProblemMatcher(resultsPath) { + console.log(`::add-matcher::${join(process.cwd(), '.github/matchers.json')}`) const assertionsByUrl = getAssertionsByUrl(resultsPath) Object.values(assertionsByUrl).forEach(assertions => { assertions.forEach(a => { const message = `\`${a.auditId}\` ${a.level === 'error' ? 'failure' : 'warning'} for \`${a.name}\` assertion, ` + `expected **${a.operator} ${a.expected}**, but found **${a.actual}**.` - console.log('%s – %s – %s – %s', a.url, a.level, a.auditId, message) + console.log(`${a.url}|${a.level}|${a.auditId}|${message}`) }) }) - console.log('::remove-matcher owner=lhci::') + console.log('::remove-matcher owner=lighthouse-ci-action::') }