diff --git a/src/libs/E2E/reactNativeLaunchingTest.ts b/src/libs/E2E/reactNativeLaunchingTest.ts index 46922091497c..f952998f0aad 100644 --- a/src/libs/E2E/reactNativeLaunchingTest.ts +++ b/src/libs/E2E/reactNativeLaunchingTest.ts @@ -66,7 +66,7 @@ E2EClient.getTestConfig() branch: Config.E2E_BRANCH, name: config.name, error: `Test '${config.name}' not found`, - isCritical: false, + isCritical: false, }); } diff --git a/src/libs/E2E/tests/appStartTimeTest.e2e.ts b/src/libs/E2E/tests/appStartTimeTest.e2e.ts index 321fc3773d51..188dd65c85e9 100644 --- a/src/libs/E2E/tests/appStartTimeTest.e2e.ts +++ b/src/libs/E2E/tests/appStartTimeTest.e2e.ts @@ -26,7 +26,8 @@ const test = () => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: `App start ${metric.name}`, - duration: metric.duration, + metric: metric.duration, + unit: 'ms', }), ), ) diff --git a/src/libs/E2E/tests/chatOpeningTest.e2e.ts b/src/libs/E2E/tests/chatOpeningTest.e2e.ts index 8e43c4ece564..8e2a0a81da7d 100644 --- a/src/libs/E2E/tests/chatOpeningTest.e2e.ts +++ b/src/libs/E2E/tests/chatOpeningTest.e2e.ts @@ -49,7 +49,8 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Chat opening', - duration: entry.duration, + metric: entry.duration, + unit: 'ms', }) .then(() => { console.debug('[E2E] Done with chat opening, exiting…'); @@ -64,7 +65,8 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Chat TTI', - duration: entry.duration, + metric: entry.duration, + unit: 'ms', }) .then(() => { console.debug('[E2E] Done with chat TTI tracking, exiting…'); diff --git a/src/libs/E2E/tests/linkingTest.e2e.ts b/src/libs/E2E/tests/linkingTest.e2e.ts index a3449ce5010b..7e6c7ea697ba 100644 --- a/src/libs/E2E/tests/linkingTest.e2e.ts +++ b/src/libs/E2E/tests/linkingTest.e2e.ts @@ -75,7 +75,8 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Comment linking', - duration: entry.duration, + metric: entry.duration, + unit: 'ms', }); switchReportResolve(); diff --git a/src/libs/E2E/tests/openChatFinderPageTest.e2e.ts b/src/libs/E2E/tests/openChatFinderPageTest.e2e.ts index 4ac7995b914f..c6aead2d5336 100644 --- a/src/libs/E2E/tests/openChatFinderPageTest.e2e.ts +++ b/src/libs/E2E/tests/openChatFinderPageTest.e2e.ts @@ -44,7 +44,8 @@ const test = () => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Open Chat Finder Page TTI', - duration: entry.duration, + metric: entry.duration, + unit: 'ms', }) .then(() => { openSearchPageResolve(); @@ -59,7 +60,8 @@ const test = () => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Load Search Options', - duration: entry.duration, + metric: entry.duration, + unit: 'ms', }) .then(() => { loadSearchOptionsResolve(); diff --git a/src/libs/E2E/tests/reportTypingTest.e2e.ts b/src/libs/E2E/tests/reportTypingTest.e2e.ts index 817bda941611..9624d7ab992b 100644 --- a/src/libs/E2E/tests/reportTypingTest.e2e.ts +++ b/src/libs/E2E/tests/reportTypingTest.e2e.ts @@ -53,7 +53,8 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, name: 'Composer typing rerender count', - renderCount: rerenderCount, + metric: rerenderCount, + unit: 'renders', }).then(E2EClient.submitTestDone); }, 3000); }) diff --git a/src/libs/E2E/types.ts b/src/libs/E2E/types.ts index fdbc01872cb3..8640c76e631e 100644 --- a/src/libs/E2E/types.ts +++ b/src/libs/E2E/types.ts @@ -33,6 +33,8 @@ type TestModule = {default: Test}; type Tests = Record, Test>; +type Unit = 'ms' | 'MB' | '%' | 'renders' | 'FPS'; + type TestResult = { /** Name of the test */ name: string; @@ -40,8 +42,8 @@ type TestResult = { /** The branch where test were running */ branch?: string; - /** Duration in milliseconds */ - duration?: number; + /** The numeric value of the measurement */ + metric?: number; /** Optional, if set indicates that the test run failed and has no valid results. */ error?: string; @@ -52,8 +54,8 @@ type TestResult = { */ isCritical?: boolean; - /** Render count */ - renderCount?: number; + /** The unit of the measurement */ + unit?: Unit; }; -export type {SigninParams, IsE2ETestSession, NetworkCacheMap, NetworkCacheEntry, TestConfig, TestResult, TestModule, Tests}; +export type {SigninParams, IsE2ETestSession, NetworkCacheMap, NetworkCacheEntry, TestConfig, TestResult, TestModule, Tests, Unit}; diff --git a/tests/e2e/ADDING_TESTS.md b/tests/e2e/ADDING_TESTS.md index f262a5ed9a0a..92f6404203c1 100644 --- a/tests/e2e/ADDING_TESTS.md +++ b/tests/e2e/ADDING_TESTS.md @@ -72,7 +72,8 @@ const test = () => { // ... do something with the measurements E2EClient.submitTestResults({ name: "Navigate to report", - duration: measurement.duration, + metric: measurement.duration, + unit: 'ms', }).then(E2EClient.submitTestDone) }); diff --git a/tests/e2e/README.md b/tests/e2e/README.md index ea36172a52ff..1f590a474ad5 100644 --- a/tests/e2e/README.md +++ b/tests/e2e/README.md @@ -148,7 +148,8 @@ const someDurationWeCollected = // ... E2EClient.submitTestResults({ name: 'My test name', - duration: someDurationWeCollected, + metric: someDurationWeCollected, + unit: 'ms', }); ``` diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index c30900526bc2..685c5628633b 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -1,3 +1,4 @@ +import type {Unit} from '@libs/E2E/types'; import type {Stats} from '../measure/math'; import getStats from '../measure/math'; import * as math from './math'; @@ -28,7 +29,7 @@ const PROBABILITY_CONSIDERED_SIGNIFICANCE = 0.02; */ const DURATION_DIFF_THRESHOLD_SIGNIFICANCE = 100; -function buildCompareEntry(name: string, compare: Stats, baseline: Stats): Entry { +function buildCompareEntry(name: string, compare: Stats, baseline: Stats, unit: Unit): Entry { const diff = compare.mean - baseline.mean; const relativeDurationDiff = diff / baseline.mean; @@ -38,6 +39,7 @@ function buildCompareEntry(name: string, compare: Stats, baseline: Stats): Entry const isDurationDiffOfSignificance = prob < PROBABILITY_CONSIDERED_SIGNIFICANCE && Math.abs(diff) >= DURATION_DIFF_THRESHOLD_SIGNIFICANCE; return { + unit, name, baseline, current: compare, @@ -50,7 +52,7 @@ function buildCompareEntry(name: string, compare: Stats, baseline: Stats): Entry /** * Compare results between baseline and current entries and categorize. */ -function compareResults(baselineEntries: Metric | string, compareEntries: Metric | string = baselineEntries) { +function compareResults(baselineEntries: Metric | string, compareEntries: Metric | string = baselineEntries, metricForTest: Record = {}) { // Unique test scenario names const baselineKeys = Object.keys(baselineEntries ?? {}); const names = Array.from(new Set([...baselineKeys])); @@ -66,7 +68,7 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric const deltaStats = getStats(current); if (baseline && current) { - compared.push(buildCompareEntry(name, deltaStats, currentStats)); + compared.push(buildCompareEntry(name, deltaStats, currentStats, metricForTest[name])); } }); } @@ -80,9 +82,9 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric }; } -export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all') => { +export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all', metricForTest = {}) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: - const outputData = compareResults(main, delta); + const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { printToConsole(outputData); diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index 77170e43f4a6..41ae5a4e0ccf 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -1,3 +1,4 @@ +import type {Unit} from '@libs/E2E/types'; import type {Stats} from '../../measure/math'; import * as format from './format'; @@ -8,6 +9,7 @@ type Entry = { diff: number; relativeDurationDiff: number; isDurationDiffOfSignificance: boolean; + unit: Unit; }; type Data = { @@ -18,7 +20,7 @@ type Data = { }; const printRegularLine = (entry: Entry) => { - console.debug(` - ${entry.name}: ${format.formatDurationDiffChange(entry)}`); + console.debug(` - ${entry.name}: ${format.formatMetricDiffChange(entry)}`); }; /** diff --git a/tests/e2e/compare/output/format.ts b/tests/e2e/compare/output/format.ts index 40c9e74d6247..f00684cd5a01 100644 --- a/tests/e2e/compare/output/format.ts +++ b/tests/e2e/compare/output/format.ts @@ -20,16 +20,16 @@ const formatPercentChange = (value: number): string => { return `${value >= 0 ? '+' : '-'}${formatPercent(absValue)}`; }; -const formatDuration = (duration: number): string => `${duration.toFixed(3)} ms`; +const formatMetric = (duration: number, unit: string): string => `${duration.toFixed(3)} ${unit}`; -const formatDurationChange = (value: number): string => { +const formatMetricChange = (value: number, unit: string): string => { if (value > 0) { - return `+${formatDuration(value)}`; + return `+${formatMetric(value, unit)}`; } if (value < 0) { - return `${formatDuration(value)}`; + return `${formatMetric(value, unit)}`; } - return '0 ms'; + return `0 ${unit}`; }; const formatChange = (value: number): string => { @@ -69,13 +69,13 @@ const getDurationSymbols = (entry: Entry): string => { return ''; }; -const formatDurationDiffChange = (entry: Entry): string => { +const formatMetricDiffChange = (entry: Entry): string => { const {baseline, current} = entry; - let output = `${formatDuration(baseline.mean)} → ${formatDuration(current.mean)}`; + let output = `${formatMetric(baseline.mean, entry.unit)} → ${formatMetric(current.mean, entry.unit)}`; if (baseline.mean !== current.mean) { - output += ` (${formatDurationChange(entry.diff)}, ${formatPercentChange(entry.relativeDurationDiff)})`; + output += ` (${formatMetricChange(entry.diff, entry.unit)}, ${formatPercentChange(entry.relativeDurationDiff)})`; } output += ` ${getDurationSymbols(entry)}`; @@ -83,4 +83,4 @@ const formatDurationDiffChange = (entry: Entry): string => { return output; }; -export {formatPercent, formatPercentChange, formatDuration, formatDurationChange, formatChange, getDurationSymbols, formatDurationDiffChange}; +export {formatPercent, formatPercentChange, formatMetric, formatMetricChange, formatChange, getDurationSymbols, formatMetricDiffChange}; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 34bc3251c422..2e6ddfd5f03e 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -11,13 +11,13 @@ const tableHeader = ['Name', 'Duration']; const collapsibleSection = (title: string, content: string) => `
\n${title}\n\n${content}\n
\n\n`; -const buildDurationDetails = (title: string, entry: Stats) => { +const buildDurationDetails = (title: string, entry: Stats, unit: string) => { const relativeStdev = entry.stdev / entry.mean; return [ `**${title}**`, - `Mean: ${format.formatDuration(entry.mean)}`, - `Stdev: ${format.formatDuration(entry.stdev)} (${format.formatPercent(relativeStdev)})`, + `Mean: ${format.formatMetric(entry.mean, unit)}`, + `Stdev: ${format.formatMetric(entry.stdev, unit)} (${format.formatPercent(relativeStdev)})`, entry.entries ? `Runs: ${entry.entries.join(' ')}` : '', ] .filter(Boolean) @@ -25,7 +25,7 @@ const buildDurationDetails = (title: string, entry: Stats) => { }; const buildDurationDetailsEntry = (entry: Entry) => - ['baseline' in entry ? buildDurationDetails('Baseline', entry.baseline) : '', 'current' in entry ? buildDurationDetails('Current', entry.current) : ''] + ['baseline' in entry ? buildDurationDetails('Baseline', entry.baseline, entry.unit) : '', 'current' in entry ? buildDurationDetails('Current', entry.current, entry.unit) : ''] .filter(Boolean) .join('

'); @@ -33,15 +33,15 @@ const formatEntryDuration = (entry: Entry): string => { let formattedDuration = ''; if ('baseline' in entry && 'current' in entry) { - formattedDuration = format.formatDurationDiffChange(entry); + formattedDuration = format.formatMetricDiffChange(entry); } if ('baseline' in entry) { - formattedDuration = format.formatDuration(entry.baseline.mean); + formattedDuration = format.formatMetric(entry.baseline.mean, entry.unit); } if ('current' in entry) { - formattedDuration = format.formatDuration(entry.current.mean); + formattedDuration = format.formatMetric(entry.current.mean, entry.unit); } return formattedDuration; diff --git a/tests/e2e/merge.ts b/tests/e2e/merge.ts deleted file mode 100644 index d7c1b8699c7d..000000000000 --- a/tests/e2e/merge.ts +++ /dev/null @@ -1,27 +0,0 @@ -import compare from './compare/compare'; -import CONFIG from './config'; - -const args = process.argv.slice(2); - -let mainPath = `${CONFIG.OUTPUT_DIR}/main.json`; -if (args.includes('--mainPath')) { - mainPath = args[args.indexOf('--mainPath') + 1]; -} - -let deltaPath = `${CONFIG.OUTPUT_DIR}/delta.json`; -if (args.includes('--deltaPath')) { - deltaPath = args[args.indexOf('--deltaPath') + 1]; -} - -let outputPath = `${CONFIG.OUTPUT_DIR}/output.md`; -if (args.includes('--outputPath')) { - outputPath = args[args.indexOf('--outputPath') + 1]; -} - -async function run() { - await compare(mainPath, deltaPath, outputPath, 'all'); - - process.exit(0); -} - -run(); diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index e5ebca5ad723..3556f311a393 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -17,7 +17,7 @@ import {execSync} from 'child_process'; import fs from 'fs'; import type {TestResult} from '@libs/E2E/client'; -import type {TestConfig} from '@libs/E2E/types'; +import type {TestConfig, Unit} from '@libs/E2E/types'; import compare from './compare/compare'; import defaultConfig from './config'; import createServerInstance from './server'; @@ -96,21 +96,19 @@ const runTests = async (): Promise => { // Create a dict in which we will store the run durations for all tests const results: Record = {}; + const metricForTest: Record = {}; const attachTestResult = (testResult: TestResult) => { let result = 0; - if (testResult?.duration !== undefined) { - if (testResult.duration < 0) { + if (testResult?.metric !== undefined) { + if (testResult.metric < 0) { return; } - result = testResult.duration; - } - if (testResult?.renderCount !== undefined) { - result = testResult.renderCount; + result = testResult.metric; } - Logger.log(`[LISTENER] Test '${testResult?.name}' on '${testResult?.branch}' measured ${result}`); + Logger.log(`[LISTENER] Test '${testResult?.name}' on '${testResult?.branch}' measured ${result}${testResult.unit}`); if (testResult?.branch && !results[testResult.branch]) { results[testResult.branch] = {}; @@ -119,6 +117,10 @@ const runTests = async (): Promise => { if (testResult?.branch && testResult?.name) { results[testResult.branch][testResult.name] = (results[testResult.branch][testResult.name] ?? []).concat(result); } + + if (!metricForTest[testResult.name] && testResult.unit) { + metricForTest[testResult.name] = testResult.unit; + } }; // Collect results while tests are being executed @@ -161,27 +163,32 @@ const runTests = async (): Promise => { attachTestResult({ name: `${test.name} (CPU)`, branch, - duration: metrics.cpu, + metric: metrics.cpu, + unit: '%', }); attachTestResult({ name: `${test.name} (FPS)`, branch, - duration: metrics.fps, + metric: metrics.fps, + unit: 'FPS', }); attachTestResult({ name: `${test.name} (RAM)`, branch, - duration: metrics.ram, + metric: metrics.ram, + unit: 'MB', }); attachTestResult({ name: `${test.name} (CPU/JS)`, branch, - duration: metrics.jsThread, + metric: metrics.jsThread, + unit: '%', }); attachTestResult({ name: `${test.name} (CPU/UI)`, branch, - duration: metrics.uiThread, + metric: metrics.uiThread, + unit: '%', }); } removeListener(); @@ -282,7 +289,7 @@ const runTests = async (): Promise => { // Calculate statistics and write them to our work file Logger.info('Calculating statics and writing results'); - compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`); + compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`, 'all', metricForTest); await server.stop(); };