From aaa4f350af94cd44c7bb5935195328cd0b2f0172 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Tue, 22 Oct 2024 14:55:56 +0200 Subject: [PATCH 1/7] e2e: allow tests failure --- .github/workflows/e2ePerformanceTests.yml | 30 ++++ .../E2E/tests/openSearchRouterTest.e2e.ts | 1 + tests/e2e/compare/compare.ts | 13 +- tests/e2e/compare/output/console.ts | 6 +- tests/e2e/compare/output/markdown.ts | 10 +- tests/e2e/testRunner.ts | 168 ++++++++++-------- 6 files changed, 151 insertions(+), 77 deletions(-) diff --git a/.github/workflows/e2ePerformanceTests.yml b/.github/workflows/e2ePerformanceTests.yml index b48c7b2175eb..089f7886b527 100644 --- a/.github/workflows/e2ePerformanceTests.yml +++ b/.github/workflows/e2ePerformanceTests.yml @@ -221,6 +221,36 @@ jobs: env: GITHUB_TOKEN: ${{ github.token }} + - name: Check if test has skipped tests + id: checkIfSkippedTestsDetected + run: | + if grep -q '⚠️' "./Host_Machine_Files/\$WORKING_DIRECTORY/output.md"; then + # Create an output to the GH action that the tests were skipped: + echo "skippedTestsDetected=true" >> "$GITHUB_OUTPUT" + else + echo "skippedTestsDetected=false" >> "$GITHUB_OUTPUT" + echo '✅ no skipped tests detected' + fi + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: 'Announce skipped tests in Slack' + if: ${{ steps.checkIfSkippedTestsDetected.outputs.skippedTestsDetected == 'true' }} + uses: 8398a7/action-slack@v3 + with: + status: custom + custom_payload: | + { + channel: '#e2e-announce', + attachments: [{ + color: 'danger', + text: `⚠️ ${process.env.AS_REPO} Some of E2E tests were skipped on workflow ⚠️`, + }] + } + env: + GITHUB_TOKEN: ${{ github.token }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} + - name: 'Announce regression in Slack' if: ${{ steps.checkIfRegressionDetected.outputs.performanceRegressionDetected == 'true' }} uses: 8398a7/action-slack@v3 diff --git a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts index de9464c9c286..02e8eab7b017 100644 --- a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts +++ b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts @@ -1,3 +1,4 @@ +import type {NativeConfig} from 'react-native-config'; import Config from 'react-native-config'; import type {NativeConfig} from 'react-native-config'; import * as E2EGenericPressableWrapper from '@components/Pressable/GenericPressable/index.e2e'; diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index 40a728545668..8761043e97e1 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -91,16 +91,23 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric }; } -export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all', metricForTest = {}) => { +type Options = { + outputFile: string; + outputFormat: 'console' | 'markdown' | 'all'; + metricForTest: Record; + hasMissingData: boolean; +}; + +export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, hasMissingData}: Options) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { - printToConsole(outputData); + printToConsole(outputData, hasMissingData); } if (outputFormat === 'markdown' || outputFormat === 'all') { - return writeToMarkdown(outputFile, outputData); + return writeToMarkdown(outputFile, outputData, hasMissingData); } }; export {compareResults}; diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index 41ae5a4e0ccf..c91cebbbb610 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => { /** * Prints the result simply to console. */ -export default (data: Data) => { +export default (data: Data, hasMissingData: boolean) => { // No need to log errors or warnings as these were be logged on the fly console.debug(''); console.debug('❇️ Performance comparison results:'); @@ -38,6 +38,10 @@ export default (data: Data) => { data.meaningless.forEach(printRegularLine); console.debug(''); + + if (hasMissingData) { + console.debug('⚠️ Some tests did not pass successfully, so some results are omitted from final report'); + } }; export type {Data, Entry}; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 32af6c5e22ad..5d049939e139 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => { return collapse ? collapsibleSection('Show entries', content) : content; }; -const buildMarkdown = (data: Data) => { +const buildMarkdown = (data: Data, hasMissingData: boolean) => { let result = '## Performance Comparison Report 📊'; if (data.errors?.length) { @@ -92,6 +92,10 @@ const buildMarkdown = (data: Data) => { result += `\n${buildDetailsTable(data.meaningless)}`; result += '\n'; + if (hasMissingData) { + result += '⚠️ Some tests did not pass successfully, so some results are omitted from final report'; + } + return result; }; @@ -109,8 +113,8 @@ const writeToFile = (filePath: string, content: string) => throw error; }); -const writeToMarkdown = (filePath: string, data: Data) => { - const markdown = buildMarkdown(data); +const writeToMarkdown = (filePath: string, data: Data, hasMissingData: boolean) => { + const markdown = buildMarkdown(data, hasMissingData); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error; diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index 58fb6b9cdae1..a4e9bfab1e0c 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,6 +123,20 @@ const runTests = async (): Promise => { } }; + let hasSkippedTests = false; + const clearTestResults = (test: TestConfig) => { + hasSkippedTests = true; + + Object.keys(results).forEach((branch: string) => { + Object.keys(results[branch]).forEach((metric: string) => { + if (!metric.startsWith(test.name)) { + return; + } + delete results[branch][metric]; + }); + }); + }; + // Collect results while tests are being executed server.addTestResultListener((testResult) => { const {isCritical = true} = testResult; @@ -244,88 +258,102 @@ const runTests = async (): Promise => { server.setTestConfig(test as TestConfig); server.setReadyToAcceptTestResults(false); - const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; - - // For each warmup we allow the warmup to fail three times before we stop the warmup run: - const errorCountWarmupRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // by default we do 2 warmups: - // - first warmup to pass a login flow - // - second warmup to pass an actual flow and cache network requests - const iterations = 2; - for (let i = 0; i < iterations; i++) { - try { - // Warmup the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); - - // Warmup the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); - } catch (e) { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Warmup failed with error: ${e}`); - - errorCountWarmupRef.errorCount++; - i--; // repeat warmup again - - if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { - Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); - throw e; + try { + const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; + + // For each warmup we allow the warmup to fail three times before we stop the warmup run: + const errorCountWarmupRef = { + errorCount: 0, + allowedExceptions: 3, + }; + + // by default we do 2 warmups: + // - first warmup to pass a login flow + // - second warmup to pass an actual flow and cache network requests + const iterations = 2; + for (let i = 0; i < iterations; i++) { + try { + // Warmup the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); + + // Warmup the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); + } catch (e) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Warmup failed with error: ${e}`); + + MeasureUtils.stop('error-warmup'); + server.clearAllTestDoneListeners(); + + errorCountWarmupRef.errorCount++; + i--; // repeat warmup again + + if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { + Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); + throw e; + } } } - } - server.setReadyToAcceptTestResults(true); - - // For each test case we allow the test to fail three times before we stop the test run: - const errorCountRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // We run each test multiple time to average out the results - for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { - const onError = (e: Error) => { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Unexpected error during test execution: ${e}. `); - MeasureUtils.stop('error'); - server.clearAllTestDoneListeners(); - errorCountRef.errorCount += 1; - if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { - Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); - // If the error happened on the first test run, the test is broken - // and we should not continue running it. Or if we have reached the - // maximum number of allowed exceptions, we should stop the test run. - throw e; - } - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); - }; + server.setReadyToAcceptTestResults(true); - const launchArgs = { - mockNetwork: true, + // For each test case we allow the test to fail three times before we stop the test run: + const errorCountRef = { + errorCount: 0, + allowedExceptions: 3, }; - const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; - const mainIterationText = `[MAIN] ${iterationText}`; - const deltaIterationText = `[DELTA] ${iterationText}`; - try { - // Run the test on the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); - - // Run the test on the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); - } catch (e) { - onError(e as Error); + // We run each test multiple time to average out the results + for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { + const onError = (e: Error) => { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Unexpected error during test execution: ${e}. `); + MeasureUtils.stop('error'); + server.clearAllTestDoneListeners(); + errorCountRef.errorCount += 1; + if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { + Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); + // If the error happened on the first test run, the test is broken + // and we should not continue running it. Or if we have reached the + // maximum number of allowed exceptions, we should stop the test run. + throw e; + } + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); + }; + + const launchArgs = { + mockNetwork: true, + }; + + const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; + const mainIterationText = `[MAIN] ${iterationText}`; + const deltaIterationText = `[DELTA] ${iterationText}`; + try { + // Run the test on the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); + + // Run the test on the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); + } catch (e) { + onError(e as Error); + } } + } catch (exception) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`Test ${test?.name} can not be finished due to error: ${exception}`); + clearTestResults(test as TestConfig); } } // Calculate statistics and write them to our work file Logger.info('Calculating statics and writing results'); - compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`, 'all', metricForTest); + compare(results.main, results.delta, { + outputFile: `${config.OUTPUT_DIR}/output.md`, + outputFormat: 'all', + metricForTest, + hasMissingData: hasSkippedTests, + }); await server.stop(); }; From 973f66689b0d27aba62e9c2c1dff9ea483b77bce Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Wed, 23 Oct 2024 12:13:31 +0200 Subject: [PATCH 2/7] e2e: print skipped tests --- tests/e2e/compare/compare.ts | 8 ++++---- tests/e2e/compare/output/console.ts | 6 +++--- tests/e2e/compare/output/markdown.ts | 10 +++++----- tests/e2e/testRunner.ts | 6 +++--- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index 8761043e97e1..ad38c249bff3 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -95,19 +95,19 @@ type Options = { outputFile: string; outputFormat: 'console' | 'markdown' | 'all'; metricForTest: Record; - hasMissingData: boolean; + skippedTests: string[]; }; -export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, hasMissingData}: Options) => { +export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, skippedTests}: Options) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { - printToConsole(outputData, hasMissingData); + printToConsole(outputData, skippedTests); } if (outputFormat === 'markdown' || outputFormat === 'all') { - return writeToMarkdown(outputFile, outputData, hasMissingData); + return writeToMarkdown(outputFile, outputData, skippedTests); } }; export {compareResults}; diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index c91cebbbb610..2e303ffa1538 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => { /** * Prints the result simply to console. */ -export default (data: Data, hasMissingData: boolean) => { +export default (data: Data, skippedTests: string[]) => { // No need to log errors or warnings as these were be logged on the fly console.debug(''); console.debug('❇️ Performance comparison results:'); @@ -39,8 +39,8 @@ export default (data: Data, hasMissingData: boolean) => { console.debug(''); - if (hasMissingData) { - console.debug('⚠️ Some tests did not pass successfully, so some results are omitted from final report'); + if (skippedTests.length > 0) { + console.debug(`⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`); } }; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 5d049939e139..eaca6a049516 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => { return collapse ? collapsibleSection('Show entries', content) : content; }; -const buildMarkdown = (data: Data, hasMissingData: boolean) => { +const buildMarkdown = (data: Data, skippedTests: string[]) => { let result = '## Performance Comparison Report 📊'; if (data.errors?.length) { @@ -92,8 +92,8 @@ const buildMarkdown = (data: Data, hasMissingData: boolean) => { result += `\n${buildDetailsTable(data.meaningless)}`; result += '\n'; - if (hasMissingData) { - result += '⚠️ Some tests did not pass successfully, so some results are omitted from final report'; + if (skippedTests.length > 0) { + result += `⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`; } return result; @@ -113,8 +113,8 @@ const writeToFile = (filePath: string, content: string) => throw error; }); -const writeToMarkdown = (filePath: string, data: Data, hasMissingData: boolean) => { - const markdown = buildMarkdown(data, hasMissingData); +const writeToMarkdown = (filePath: string, data: Data, skippedTests: string[]) => { + const markdown = buildMarkdown(data, skippedTests); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error; diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index a4e9bfab1e0c..b4e79220b4ed 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,9 +123,9 @@ const runTests = async (): Promise => { } }; - let hasSkippedTests = false; + let skippedTests: string[] = []; const clearTestResults = (test: TestConfig) => { - hasSkippedTests = true; + skippedTests.push(test.name); Object.keys(results).forEach((branch: string) => { Object.keys(results[branch]).forEach((metric: string) => { @@ -352,7 +352,7 @@ const runTests = async (): Promise => { outputFile: `${config.OUTPUT_DIR}/output.md`, outputFormat: 'all', metricForTest, - hasMissingData: hasSkippedTests, + skippedTests, }); await server.stop(); From 0bf8152cf2ff5d30de48f7d4250a1a68a1d44675 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Wed, 23 Oct 2024 12:19:41 +0200 Subject: [PATCH 3/7] fix: CI --- tests/e2e/testRunner.ts | 2 +- tests/unit/E2EMarkdownTest.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index b4e79220b4ed..d1e16b6999e1 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,7 +123,7 @@ const runTests = async (): Promise => { } }; - let skippedTests: string[] = []; + const skippedTests: string[] = []; const clearTestResults = (test: TestConfig) => { skippedTests.push(test.name); diff --git a/tests/unit/E2EMarkdownTest.ts b/tests/unit/E2EMarkdownTest.ts index 74c5659c9487..766ec708f31b 100644 --- a/tests/unit/E2EMarkdownTest.ts +++ b/tests/unit/E2EMarkdownTest.ts @@ -13,6 +13,6 @@ const results = { describe('markdown formatter', () => { it('should format significant changes properly', () => { const data = compareResults(results.main, results.delta, {commentLinking: 'ms'}); - expect(buildMarkdown(data)).toMatchSnapshot(); + expect(buildMarkdown(data, [])).toMatchSnapshot(); }); }); From 473ebc5a062142e7eefcd512f45389a327a6d723 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Thu, 24 Oct 2024 09:34:04 +0200 Subject: [PATCH 4/7] fix: remove accident changes --- src/libs/E2E/tests/openSearchRouterTest.e2e.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts index 02e8eab7b017..de9464c9c286 100644 --- a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts +++ b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts @@ -1,4 +1,3 @@ -import type {NativeConfig} from 'react-native-config'; import Config from 'react-native-config'; import type {NativeConfig} from 'react-native-config'; import * as E2EGenericPressableWrapper from '@components/Pressable/GenericPressable/index.e2e'; From c5782b556b53ce4fdd9c94038e73942d4f0f0045 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Thu, 24 Oct 2024 15:19:32 +0200 Subject: [PATCH 5/7] fix: proper handle a case, when app is totally unusable and always crashes --- tests/e2e/testRunner.ts | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index d1e16b6999e1..ba6ed7a2a335 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -165,7 +165,7 @@ const runTests = async (): Promise => { await launchApp('android', appPackage, config.ACTIVITY_PATH, launchArgs); const {promise, resetTimeout} = withFailTimeout( - new Promise((resolve) => { + new Promise((resolve, reject) => { const removeListener = server.addTestDoneListener(() => { Logger.success(iterationText); @@ -215,9 +215,14 @@ const runTests = async (): Promise => { removeListener(); // something went wrong, let's wait a little bit and try again await sleep(5000); - // simply restart the test - await runTestIteration(appPackage, iterationText, branch, launchArgs); - resolve(); + try { + // simply restart the test + await runTestIteration(appPackage, iterationText, branch, launchArgs); + resolve(); + } catch (e) { + // okay, give up and throw the exception further + reject(e); + } }, }); }), From 655626d33d6b4067c6f3d0b69ffef6763e685406 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Fri, 25 Oct 2024 16:49:05 +0200 Subject: [PATCH 6/7] fix: wait for file write before closing the test --- tests/e2e/testRunner.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index ba6ed7a2a335..5485385ad8c9 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -353,12 +353,13 @@ const runTests = async (): Promise => { // Calculate statistics and write them to our work file Logger.info('Calculating statics and writing results'); - compare(results.main, results.delta, { + await compare(results.main, results.delta, { outputFile: `${config.OUTPUT_DIR}/output.md`, outputFormat: 'all', metricForTest, skippedTests, }); + Logger.info('Finished calculating statics and writing results, stopping the test server'); await server.stop(); }; From d254f8f9bff863b0bfcec8cc5bc4c3330c488fd7 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Fri, 25 Oct 2024 16:52:12 +0200 Subject: [PATCH 7/7] fix: add additional logger to make sure we understand the process better --- tests/e2e/compare/output/markdown.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index eaca6a049516..bd32d2d99ab2 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -115,6 +115,7 @@ const writeToFile = (filePath: string, content: string) => const writeToMarkdown = (filePath: string, data: Data, skippedTests: string[]) => { const markdown = buildMarkdown(data, skippedTests); + Logger.info('Markdown was built successfully, writing to file...', markdown); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error;