diff --git a/.github/workflows/e2ePerformanceTests.yml b/.github/workflows/e2ePerformanceTests.yml index b48c7b2175eb..089f7886b527 100644 --- a/.github/workflows/e2ePerformanceTests.yml +++ b/.github/workflows/e2ePerformanceTests.yml @@ -221,6 +221,36 @@ jobs: env: GITHUB_TOKEN: ${{ github.token }} + - name: Check if test has skipped tests + id: checkIfSkippedTestsDetected + run: | + if grep -q '⚠️' "./Host_Machine_Files/\$WORKING_DIRECTORY/output.md"; then + # Create an output to the GH action that the tests were skipped: + echo "skippedTestsDetected=true" >> "$GITHUB_OUTPUT" + else + echo "skippedTestsDetected=false" >> "$GITHUB_OUTPUT" + echo '✅ no skipped tests detected' + fi + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: 'Announce skipped tests in Slack' + if: ${{ steps.checkIfSkippedTestsDetected.outputs.skippedTestsDetected == 'true' }} + uses: 8398a7/action-slack@v3 + with: + status: custom + custom_payload: | + { + channel: '#e2e-announce', + attachments: [{ + color: 'danger', + text: `⚠️ ${process.env.AS_REPO} Some of E2E tests were skipped on workflow ⚠️`, + }] + } + env: + GITHUB_TOKEN: ${{ github.token }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} + - name: 'Announce regression in Slack' if: ${{ steps.checkIfRegressionDetected.outputs.performanceRegressionDetected == 'true' }} uses: 8398a7/action-slack@v3 diff --git a/src/libs/E2E/tests/appStartTimeTest.e2e.ts b/src/libs/E2E/tests/appStartTimeTest.e2e.ts index 188dd65c85e9..ccd781e08514 100644 --- a/src/libs/E2E/tests/appStartTimeTest.e2e.ts +++ b/src/libs/E2E/tests/appStartTimeTest.e2e.ts @@ -1,11 +1,14 @@ import Config from 'react-native-config'; +import type {NativeConfig} from 'react-native-config'; import type {PerformanceEntry} from 'react-native-performance'; import E2ELogin from '@libs/E2E/actions/e2eLogin'; import waitForAppLoaded from '@libs/E2E/actions/waitForAppLoaded'; import E2EClient from '@libs/E2E/client'; +import getConfigValueOrThrow from '@libs/E2E/utils/getConfigValueOrThrow'; import Performance from '@libs/Performance'; -const test = () => { +const test = (config: NativeConfig) => { + const name = getConfigValueOrThrow('name', config); // check for login (if already logged in the action will simply resolve) E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -25,7 +28,7 @@ const test = () => { metrics.map((metric) => E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: `App start ${metric.name}`, + name: `${name} ${metric.name}`, metric: metric.duration, unit: 'ms', }), diff --git a/src/libs/E2E/tests/chatOpeningTest.e2e.ts b/src/libs/E2E/tests/chatOpeningTest.e2e.ts index 8e2a0a81da7d..cf0c4889aa69 100644 --- a/src/libs/E2E/tests/chatOpeningTest.e2e.ts +++ b/src/libs/E2E/tests/chatOpeningTest.e2e.ts @@ -15,6 +15,7 @@ const test = (config: NativeConfig) => { console.debug('[E2E] Logging in for chat opening'); const reportID = getConfigValueOrThrow('reportID', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -48,7 +49,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.CHAT_RENDER) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Chat opening', + name: `${name} Chat opening`, metric: entry.duration, unit: 'ms', }) @@ -64,7 +65,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.OPEN_REPORT) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Chat TTI', + name: `${name} Chat TTI`, metric: entry.duration, unit: 'ms', }) diff --git a/src/libs/E2E/tests/linkingTest.e2e.ts b/src/libs/E2E/tests/linkingTest.e2e.ts index c4d580e8c57b..18ba438c2ca6 100644 --- a/src/libs/E2E/tests/linkingTest.e2e.ts +++ b/src/libs/E2E/tests/linkingTest.e2e.ts @@ -24,6 +24,7 @@ const test = (config: NativeConfig) => { const reportID = getConfigValueOrThrow('reportID', config); const linkedReportID = getConfigValueOrThrow('linkedReportID', config); const linkedReportActionID = getConfigValueOrThrow('linkedReportActionID', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -74,7 +75,7 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Comment linking', + name, metric: entry.duration, unit: 'ms', }); diff --git a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts index 48278aee536a..01dfe57ab715 100644 --- a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts +++ b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts @@ -1,16 +1,20 @@ +import type {NativeConfig} from 'react-native-config'; import Config from 'react-native-config'; import * as E2EGenericPressableWrapper from '@components/Pressable/GenericPressable/index.e2e'; import E2ELogin from '@libs/E2E/actions/e2eLogin'; import waitForAppLoaded from '@libs/E2E/actions/waitForAppLoaded'; import E2EClient from '@libs/E2E/client'; +import getConfigValueOrThrow from '@libs/E2E/utils/getConfigValueOrThrow'; import getPromiseWithResolve from '@libs/E2E/utils/getPromiseWithResolve'; import Performance from '@libs/Performance'; import CONST from '@src/CONST'; -const test = () => { +const test = (config: NativeConfig) => { // check for login (if already logged in the action will simply resolve) console.debug('[E2E] Logging in for new search router'); + const name = getConfigValueOrThrow('name', config); + E2ELogin().then((neededLogin: boolean): Promise | undefined => { if (neededLogin) { return waitForAppLoaded().then(() => @@ -59,7 +63,7 @@ const test = () => { if (entry.name === CONST.TIMING.SEARCH_ROUTER_RENDER) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Open Search Router TTI', + name: `${name} Open Search Router TTI`, metric: entry.duration, unit: 'ms', }) @@ -75,7 +79,7 @@ const test = () => { if (entry.name === CONST.TIMING.LOAD_SEARCH_OPTIONS) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Load Search Options', + name: `${name} Load Search Options`, metric: entry.duration, unit: 'ms', }) diff --git a/src/libs/E2E/tests/reportTypingTest.e2e.ts b/src/libs/E2E/tests/reportTypingTest.e2e.ts index efe1c380dfd0..e042a688c37d 100644 --- a/src/libs/E2E/tests/reportTypingTest.e2e.ts +++ b/src/libs/E2E/tests/reportTypingTest.e2e.ts @@ -21,6 +21,7 @@ const test = (config: NativeConfig) => { const reportID = getConfigValueOrThrow('reportID', config); const message = getConfigValueOrThrow('message', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -45,7 +46,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.MESSAGE_SENT) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Message sent', + name: `${name} Message sent`, metric: entry.duration, unit: 'ms', }).then(messageSentResolve); @@ -77,7 +78,7 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Composer typing rerender count', + name: `${name} Composer typing rerender count`, metric: rerenderCount, unit: 'renders', }) diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index 40a728545668..8761043e97e1 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -91,16 +91,23 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric }; } -export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all', metricForTest = {}) => { +type Options = { + outputFile: string; + outputFormat: 'console' | 'markdown' | 'all'; + metricForTest: Record; + hasMissingData: boolean; +}; + +export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, hasMissingData}: Options) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { - printToConsole(outputData); + printToConsole(outputData, hasMissingData); } if (outputFormat === 'markdown' || outputFormat === 'all') { - return writeToMarkdown(outputFile, outputData); + return writeToMarkdown(outputFile, outputData, hasMissingData); } }; export {compareResults}; diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index 41ae5a4e0ccf..c91cebbbb610 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => { /** * Prints the result simply to console. */ -export default (data: Data) => { +export default (data: Data, hasMissingData: boolean) => { // No need to log errors or warnings as these were be logged on the fly console.debug(''); console.debug('❇️ Performance comparison results:'); @@ -38,6 +38,10 @@ export default (data: Data) => { data.meaningless.forEach(printRegularLine); console.debug(''); + + if (hasMissingData) { + console.debug('⚠️ Some tests did not pass successfully, so some results are omitted from final report'); + } }; export type {Data, Entry}; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 32af6c5e22ad..5d049939e139 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => { return collapse ? collapsibleSection('Show entries', content) : content; }; -const buildMarkdown = (data: Data) => { +const buildMarkdown = (data: Data, hasMissingData: boolean) => { let result = '## Performance Comparison Report 📊'; if (data.errors?.length) { @@ -92,6 +92,10 @@ const buildMarkdown = (data: Data) => { result += `\n${buildDetailsTable(data.meaningless)}`; result += '\n'; + if (hasMissingData) { + result += '⚠️ Some tests did not pass successfully, so some results are omitted from final report'; + } + return result; }; @@ -109,8 +113,8 @@ const writeToFile = (filePath: string, content: string) => throw error; }); -const writeToMarkdown = (filePath: string, data: Data) => { - const markdown = buildMarkdown(data); +const writeToMarkdown = (filePath: string, data: Data, hasMissingData: boolean) => { + const markdown = buildMarkdown(data, hasMissingData); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error; diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index 58fb6b9cdae1..a4e9bfab1e0c 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,6 +123,20 @@ const runTests = async (): Promise => { } }; + let hasSkippedTests = false; + const clearTestResults = (test: TestConfig) => { + hasSkippedTests = true; + + Object.keys(results).forEach((branch: string) => { + Object.keys(results[branch]).forEach((metric: string) => { + if (!metric.startsWith(test.name)) { + return; + } + delete results[branch][metric]; + }); + }); + }; + // Collect results while tests are being executed server.addTestResultListener((testResult) => { const {isCritical = true} = testResult; @@ -244,88 +258,102 @@ const runTests = async (): Promise => { server.setTestConfig(test as TestConfig); server.setReadyToAcceptTestResults(false); - const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; - - // For each warmup we allow the warmup to fail three times before we stop the warmup run: - const errorCountWarmupRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // by default we do 2 warmups: - // - first warmup to pass a login flow - // - second warmup to pass an actual flow and cache network requests - const iterations = 2; - for (let i = 0; i < iterations; i++) { - try { - // Warmup the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); - - // Warmup the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); - } catch (e) { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Warmup failed with error: ${e}`); - - errorCountWarmupRef.errorCount++; - i--; // repeat warmup again - - if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { - Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); - throw e; + try { + const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; + + // For each warmup we allow the warmup to fail three times before we stop the warmup run: + const errorCountWarmupRef = { + errorCount: 0, + allowedExceptions: 3, + }; + + // by default we do 2 warmups: + // - first warmup to pass a login flow + // - second warmup to pass an actual flow and cache network requests + const iterations = 2; + for (let i = 0; i < iterations; i++) { + try { + // Warmup the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); + + // Warmup the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); + } catch (e) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Warmup failed with error: ${e}`); + + MeasureUtils.stop('error-warmup'); + server.clearAllTestDoneListeners(); + + errorCountWarmupRef.errorCount++; + i--; // repeat warmup again + + if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { + Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); + throw e; + } } } - } - server.setReadyToAcceptTestResults(true); - - // For each test case we allow the test to fail three times before we stop the test run: - const errorCountRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // We run each test multiple time to average out the results - for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { - const onError = (e: Error) => { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Unexpected error during test execution: ${e}. `); - MeasureUtils.stop('error'); - server.clearAllTestDoneListeners(); - errorCountRef.errorCount += 1; - if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { - Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); - // If the error happened on the first test run, the test is broken - // and we should not continue running it. Or if we have reached the - // maximum number of allowed exceptions, we should stop the test run. - throw e; - } - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); - }; + server.setReadyToAcceptTestResults(true); - const launchArgs = { - mockNetwork: true, + // For each test case we allow the test to fail three times before we stop the test run: + const errorCountRef = { + errorCount: 0, + allowedExceptions: 3, }; - const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; - const mainIterationText = `[MAIN] ${iterationText}`; - const deltaIterationText = `[DELTA] ${iterationText}`; - try { - // Run the test on the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); - - // Run the test on the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); - } catch (e) { - onError(e as Error); + // We run each test multiple time to average out the results + for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { + const onError = (e: Error) => { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Unexpected error during test execution: ${e}. `); + MeasureUtils.stop('error'); + server.clearAllTestDoneListeners(); + errorCountRef.errorCount += 1; + if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { + Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); + // If the error happened on the first test run, the test is broken + // and we should not continue running it. Or if we have reached the + // maximum number of allowed exceptions, we should stop the test run. + throw e; + } + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); + }; + + const launchArgs = { + mockNetwork: true, + }; + + const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; + const mainIterationText = `[MAIN] ${iterationText}`; + const deltaIterationText = `[DELTA] ${iterationText}`; + try { + // Run the test on the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); + + // Run the test on the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); + } catch (e) { + onError(e as Error); + } } + } catch (exception) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`Test ${test?.name} can not be finished due to error: ${exception}`); + clearTestResults(test as TestConfig); } } // Calculate statistics and write them to our work file Logger.info('Calculating statics and writing results'); - compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`, 'all', metricForTest); + compare(results.main, results.delta, { + outputFile: `${config.OUTPUT_DIR}/output.md`, + outputFormat: 'all', + metricForTest, + hasMissingData: hasSkippedTests, + }); await server.stop(); };