diff --git a/package.json b/package.json index 0d55515cb8ce..92698102e064 100644 --- a/package.json +++ b/package.json @@ -50,8 +50,8 @@ "analyze-packages": "ANALYZE_BUNDLE=true webpack --config config/webpack/webpack.common.js --env envFile=.env.production", "symbolicate:android": "npx metro-symbolicate android/app/build/generated/sourcemaps/react/release/index.android.bundle.map", "symbolicate:ios": "npx metro-symbolicate main.jsbundle.map", - "test:e2e": "ts-node tests/e2e/testRunner.js --development --skipCheckout --skipInstallDeps --buildMode none", - "test:e2e:dev": "ts-node tests/e2e/testRunner.js --development --skipCheckout --config ./config.dev.js --buildMode skip --skipInstallDeps", + "test:e2e": "ts-node tests/e2e/testRunner.js --config ./config.local.ts", + "test:e2e:dev": "ts-node tests/e2e/testRunner.js --config ./config.dev.js", "gh-actions-unused-styles": "./.github/scripts/findUnusedKeys.sh", "workflow-test": "./workflow_tests/scripts/runWorkflowTests.sh", "workflow-test:generate": "ts-node workflow_tests/utils/preGenerateTest.js", diff --git a/tests/e2e/ADDING_TESTS.md b/tests/e2e/ADDING_TESTS.md index 8d4683636e70..f262a5ed9a0a 100644 --- a/tests/e2e/ADDING_TESTS.md +++ b/tests/e2e/ADDING_TESTS.md @@ -1,51 +1,5 @@ # Adding new E2E Tests -## Running your new test in development mode - -Typically you'd run all the tests with `npm run test:e2e` on your machine. -This will run the tests with some local settings, however that is not -optimal when you add a new test for which you want to quickly test if it works, as the prior command -still runs the release version of the app, which is hard to debug. - -I recommend doing the following. - -1. We need to compile a android development app version that has capturing metrics enabled: -```bash -# Make sure that your .env file is the one we need for e2e testing: -cp ./tests/e2e/.env.e2e .env - -# Build the android app like you normally would with -npm run android -``` -2. Rename `./index.js` to `./appIndex.js` -3. Create a new `./index.js` with the following content: -```js -require('./src/libs/E2E/reactNativeLaunchingTest'); -``` -4. In `./src/libs/E2E/reactNativeLaunchingTest.ts` change the main app import to the new `./appIndex.js` file: -```diff -- import '../../../index'; -+ import '../../../appIndex'; -``` - -> [!WARNING] -> Make sure to not commit these changes to the repository! - -Now you can start the metro bundler in e2e mode with: - -```bash -CAPTURE_METRICS=true E2E_TESTING=true npm start -- --reset-cache -``` - -Then we can execute our test with: - -``` -npm run test:e2e:dev -- --includes "My new test name" -``` - -> - `--includes "MyTestName"` will only run the test with the name "MyTestName", but is optional - - ## Creating a new test Tests are executed on device, inside the app code. @@ -144,8 +98,13 @@ Done! When you now start the test runner, your new test will be executed as well ## Quickly test your test -To check your new test you can simply run `npm run test:e2e`, which uses the -`--development` flag. This will run the tests on the branch you are currently on, runs fewer iterations and most importantly, it tries to reuse the existing APK and just patch into the new app bundle, instead of rebuilding the release app from scratch. +> [!TIP] +> You can only run a specific test by specifying the `--includes` flag: +> ```sh +> npm run test:e2e:dev -- --includes "My new test name" +> ``` + +It is recommended to run a debug build of the e2e tests first to iterate quickly on your test. Follow the explanation in the [README](./README.md) to create a debug build. ## Debugging your test diff --git a/tests/e2e/README.md b/tests/e2e/README.md index 64d11d3b2ca4..5f124f20e872 100644 --- a/tests/e2e/README.md +++ b/tests/e2e/README.md @@ -6,20 +6,6 @@ run the actual app on a real device (physical or emulated). ![Example of a e2e test run](https://raw.githubusercontent.com/hannojg/expensify-app/5f945c25e2a0650753f47f3f541b984f4d114f6d/e2e/example.gif) -To run the e2e tests: - -1. Connect an android device. The tests are currently designed to run only on android. It can be - a physical device or an emulator. - -2. Make sure Fastlane was initialized by running `bundle install` - -3. Run the tests with `npm run test:e2e`. - > ๐Ÿ’ก Tip: To run the tests locally faster, and you are only making changes to JS, it's recommended to - build the app once with `npm run android-build-e2e` and from then on run the tests with - `npm run test:e2e -- --buildMode js-only`. This will only rebuild the JS code, and not the - whole native app! - -Ideally you want to run these tests on your branch before you want to merge your new feature to `main`. ## Available CLI options @@ -27,23 +13,78 @@ The tests can be run with the following CLI options: - `--config`: Extend/Overwrite the default config with your values, e.g. `--config config.local.ts` - `--includes`: Expects a string/regexp to filter the tests to run, e.g. `--includes "login|signup"` -- `--skipInstallDeps`: Skips the `npm install` step, useful during development -- `--development`: Applies some default configurations: - - Sets the config to `config.local.ts`, which executes the tests with fewer iterations - - Runs the tests only on the current branch -- `--buildMode`: There are three build modes, the default is `full`: - 1. **full**: rebuilds the full native app in (e2e) release mode - 2. **js-only**: only rebuilds the js bundle, and then re-packages - the existing native app with the new package. If there - is no existing native app, it will fallback to mode "full" - 3. **skip**: does not rebuild anything, and just runs the existing native app -- `--skipCheckout`: Won't checkout any baseline or comparison branch, and will just run the tests - -## Available environment variables - -The tests can be run with the following environment variables: - -- `baseline`: Change the baseline to run the tests again (default is `main`). + +## Running the tests on your machine + +You have two options when running the e2e tests: + +1. Run a debug build of the app (useful when developing a test) +2. Run two (e2e) release builds against each other (useful to test performance regression and the suite as a whole) + +### Running a debug build + +1. You need to create a debug build of the app that's configured with some build flags to enable e2e testing. +The build flags should be specified in your `./.env` file. You can use the `./tests/e2e/.env.e2e` file as a template: + +```sh +cp ./tests/e2e/.env.e2e .env +``` + +> [!IMPORTANT] +> There are some non-public environment variables that you still have to add to the `.env` file. Ask on slack for the values (cc @vit, @andrew, @hanno gรถdecke). + +2. Create a new android build like you usually would: + +```sh +npm run android +``` + +3. We need to modify the app entry to point to the one for the tests. Therefore rename `./index.js` to `./appIndex.js` temporarily. + +4. Create a new `./index.js` with the following content: +```js +require('./src/libs/E2E/reactNativeLaunchingTest'); +``` + +5. In `./src/libs/E2E/reactNativeLaunchingTest.ts` change the main app import to the new `./appIndex.js` file: +```diff +- import '../../../index'; ++ import '../../../appIndex'; +``` + +6. You can now run the tests. This command will invoke the test runner: + +```sh +npm run test:e2e:dev +``` + +### Running two release builds + +The e2e tests are meant to detect performance regressions. For that we need to compare two builds against each other. On the CI system this is e.g. the latest release build (baseline) VS the currently merged PR (compare). + +You need to build the two apps first. Note that the two apps will be installed on the same device at the same time, so both apps have a different package name. Therefor, we have special build types for the e2e tests. + +1. Create a new android build for the baseline: + +> [!IMPORTANT] +> There are some non-public environment variables that you still have to add to the `./tests/e2e/.env.e2e` and `./tests/e2e/.env.e2edelta` file. Ask on slack for the values (cc @vit, @andrew, @hanno gรถdecke). + +```sh +npm run android-build-e2e +``` + +2. Create a new android build for the compare: + +```sh +npm run android-build-e2edelta +``` + +3. Run the tests: + +```sh +npm run test:e2e +``` + ## Performance regression testing diff --git a/tests/e2e/TestSpec.yml b/tests/e2e/TestSpec.yml index acc5926e93a5..e0dcd2b9b66d 100644 --- a/tests/e2e/TestSpec.yml +++ b/tests/e2e/TestSpec.yml @@ -22,7 +22,7 @@ phases: commands: - cd zip - npm install underscore ts-node typescript - - npx ts-node e2e/testRunner.js -- --skipInstallDeps --buildMode "skip" --skipCheckout --mainAppPath app-e2eRelease.apk --deltaAppPath app-e2edeltaRelease.apk + - npx ts-node e2e/testRunner.js -- --mainAppPath app-e2eRelease.apk --deltaAppPath app-e2edeltaRelease.apk artifacts: - $WORKING_DIRECTORY diff --git a/tests/e2e/config.dev.js b/tests/e2e/config.dev.js index 8d12fb5ce007..0e5d3dc01a95 100644 --- a/tests/e2e/config.dev.js +++ b/tests/e2e/config.dev.js @@ -7,4 +7,5 @@ export default { MAIN_APP_PATH: appPath, DELTA_APP_PATH: appPath, RUNS: 8, + BOOT_COOL_DOWN: 5 * 1000, }; diff --git a/tests/e2e/testRunner.js b/tests/e2e/testRunner.js index cbdb22de6c63..77b2033dfece 100644 --- a/tests/e2e/testRunner.js +++ b/tests/e2e/testRunner.js @@ -21,7 +21,6 @@ import compare from './compare/compare'; import defaultConfig from './config'; import createServerInstance from './server'; import reversePort from './utils/androidReversePort'; -import execAsync from './utils/execAsync'; import installApp from './utils/installApp'; import killApp from './utils/killApp'; import launchApp from './utils/launchApp'; @@ -39,16 +38,6 @@ const getArg = (argName) => { return args[argIndex + 1]; }; -let branch = 'main'; -if (args.includes('--branch')) { - branch = getArg('--branch'); -} - -let label = branch; -if (args.includes('--label')) { - label = getArg('--label'); -} - let config = defaultConfig; const setConfigPath = (configPathParam) => { let configPath = configPathParam; @@ -59,164 +48,40 @@ const setConfigPath = (configPathParam) => { config = _.extend(defaultConfig, customConfig); }; -const skipCheckout = args.includes('--skipCheckout'); - -const skipInstallDeps = args.includes('--skipInstallDeps'); - -// There are three build modes: -// 1. full: rebuilds the full native app in (e2e) release mode -// 2. js-only: only rebuilds the js bundle, and then re-packages -// the existing native app with the new bundle. If there -// is no existing native app, it will fallback to mode "full" -// 3. skip: does not rebuild anything, and just runs the existing native app -let buildMode = 'full'; - -// When we are in dev mode we want to apply certain default params and configs -const isDevMode = args.includes('--development'); -if (isDevMode) { - setConfigPath('config.local.ts'); - buildMode = 'js-only'; -} - -if (args.includes('--buildMode')) { - buildMode = getArg('--buildMode'); -} - if (args.includes('--config')) { const configPath = getArg('--config'); setConfigPath(configPath); } -// Important set app path after correct config file has been set -let mainAppPath = getArg('--mainAppPath') || config.MAIN_APP_PATH; -let deltaAppPath = getArg('--deltaAppPath') || config.DELTA_APP_PATH; - -// Create some variables after the correct config file has been loaded -const OUTPUT_FILE = `${config.OUTPUT_DIR}/${label}.json`; - -if (isDevMode) { - Logger.note(`๐ŸŸ  Running in development mode.`); +// Important: set app path only after correct config file has been loaded +const mainAppPath = getArg('--mainAppPath') || config.MAIN_APP_PATH; +const deltaAppPath = getArg('--deltaAppPath') || config.DELTA_APP_PATH; +// Check if files exists: +if (!fs.existsSync(mainAppPath)) { + throw new Error(`Main app path does not exist: ${mainAppPath}`); +} +if (!fs.existsSync(deltaAppPath)) { + throw new Error(`Delta app path does not exist: ${deltaAppPath}`); } -if (isDevMode) { - // On dev mode only delete any existing output file but keep the folder - if (fs.existsSync(OUTPUT_FILE)) { - fs.rmSync(OUTPUT_FILE); - } -} else { - // On CI it is important to re-create the output dir, it has a different owner - // therefore this process cannot write to it - try { - fs.rmSync(config.OUTPUT_DIR, {recursive: true, force: true}); +// On CI it is important to re-create the output dir, it has a different owner +// therefore this process cannot write to it +try { + fs.rmSync(config.OUTPUT_DIR, {recursive: true, force: true}); - fs.mkdirSync(config.OUTPUT_DIR); - } catch (error) { - // Do nothing - console.error(error); - } + fs.mkdirSync(config.OUTPUT_DIR); +} catch (error) { + // Do nothing + console.error(error); } // START OF TEST CODE const runTests = async () => { - // check if using buildMode "js-only" or "none" is possible - if (buildMode !== 'full') { - const mainAppExists = fs.existsSync(mainAppPath); - const deltaAppExists = fs.existsSync(deltaAppPath); - if (!mainAppExists || !deltaAppExists) { - Logger.warn(`Build mode "${buildMode}" is not possible, because the app does not exist. Falling back to build mode "full".`); - Logger.note(`App path: ${mainAppPath}`); - - buildMode = 'full'; - } - } - - // Build app - if (buildMode === 'full') { - Logger.log(`Test setup - building main branch`); - - if (!skipCheckout) { - // Switch branch - Logger.log(`Test setup - checkout main`); - await execAsync(`git checkout main`); - } - - if (!skipInstallDeps) { - Logger.log(`Test setup - npm install`); - await execAsync('npm i'); - } - - await execAsync('npm run android-build-e2e'); - - if (branch != null && !skipCheckout) { - // Switch branch - Logger.log(`Test setup - checkout branch '${branch}'`); - await execAsync(`git checkout ${branch}`); - } - - if (!skipInstallDeps) { - Logger.log(`Test setup - npm install`); - await execAsync('npm i'); - } - - Logger.log(`Test setup '${branch}' - building delta branch`); - await execAsync('npm run android-build-e2edelta'); - } else if (buildMode === 'js-only') { - Logger.log(`Test setup '${branch}' - building js bundle`); - - if (!skipInstallDeps) { - Logger.log(`Test setup '${branch}' - npm install`); - await execAsync('npm i'); - } - - // Build a new JS bundle - if (!skipCheckout) { - // Switch branch - Logger.log(`Test setup - checkout main`); - await execAsync(`git checkout main`); - } - - if (!skipInstallDeps) { - Logger.log(`Test setup - npm install`); - await execAsync('npm i'); - } - - const tempDir = `${config.OUTPUT_DIR}/temp`; - let tempBundlePath = `${tempDir}/index.android.bundle`; - await execAsync(`rm -rf ${tempDir} && mkdir ${tempDir}`); - await execAsync(`npx react-native bundle --platform android --dev false --entry-file ${config.ENTRY_FILE} --bundle-output ${tempBundlePath}`, {E2E_TESTING: 'true'}); - // Repackage the existing native app with the new bundle - let tempApkPath = `${tempDir}/app-release.apk`; - await execAsync(`./scripts/android-repackage-app-bundle-and-sign.sh ${mainAppPath} ${tempBundlePath} ${tempApkPath}`); - mainAppPath = tempApkPath; - - // Build a new JS bundle - if (!skipCheckout) { - // Switch branch - Logger.log(`Test setup - checkout main`); - await execAsync(`git checkout ${branch}`); - } - - if (!skipInstallDeps) { - Logger.log(`Test setup - npm install`); - await execAsync('npm i'); - } - - tempBundlePath = `${tempDir}/index.android.bundle`; - await execAsync(`rm -rf ${tempDir} && mkdir ${tempDir}`); - await execAsync(`npx react-native bundle --platform android --dev false --entry-file ${config.ENTRY_FILE} --bundle-output ${tempBundlePath}`, {E2E_TESTING: 'true'}); - // Repackage the existing native app with the new bundle - tempApkPath = `${tempDir}/app-release.apk`; - await execAsync(`./scripts/android-repackage-app-bundle-and-sign.sh ${deltaAppPath} ${tempBundlePath} ${tempApkPath}`); - deltaAppPath = tempApkPath; - } - - let progressLog = Logger.progressInfo('Installing apps and reversing port'); - + Logger.info('Installing apps and reversing port'); await installApp('android', config.MAIN_APP_PACKAGE, mainAppPath); await installApp('android', config.DELTA_APP_PACKAGE, deltaAppPath); await reversePort(); - progressLog.done(); // Start the HTTP server const server = createServerInstance(); @@ -251,162 +116,107 @@ const runTests = async () => { results[testResult.branch][testResult.name] = (results[testResult.branch][testResult.name] || []).concat(result); }); + // Function to run a single test iteration + async function runTestIteration(appPackage, iterationText, launchArgs) { + Logger.info(iterationText); + + // Making sure the app is really killed (e.g. if a prior test run crashed) + Logger.log('Killing', appPackage); + await killApp('android', appPackage); + + Logger.log('Launching', appPackage); + await launchApp('android', appPackage, config.ACTIVITY_PATH, launchArgs); + + await withFailTimeout( + new Promise((resolve) => { + const cleanup = server.addTestDoneListener(() => { + Logger.success(iterationText); + cleanup(); + resolve(); + }); + }), + iterationText, + ); + + Logger.log('Killing', appPackage); + await killApp('android', appPackage); + } + // Run the tests - const suites = _.values(config.TESTS_CONFIG); - for (let suiteIndex = 0; suiteIndex < suites.length; suiteIndex++) { - const suite = _.values(config.TESTS_CONFIG)[suiteIndex]; + const tests = _.values(config.TESTS_CONFIG); + for (let testIndex = 0; testIndex < tests.length; testIndex++) { + const test = _.values(config.TESTS_CONFIG)[testIndex]; // check if we want to skip the test if (args.includes('--includes')) { const includes = args[args.indexOf('--includes') + 1]; // assume that "includes" is a regexp - if (!suite.name.match(includes)) { + if (!test.name.match(includes)) { // eslint-disable-next-line no-continue continue; } } - const coolDownLogs = Logger.progressInfo(`Cooling down for ${config.BOOT_COOL_DOWN / 1000}s`); - coolDownLogs.updateText(`Cooling down for ${config.BOOT_COOL_DOWN / 1000}s`); - - // Having the cooldown right at the beginning should hopefully lower the chances of heat + // Having the cooldown right at the beginning lowers the chances of heat // throttling from the previous run (which we have no control over and will be a - // completely different AWS DF customer/app). It also gives the time to cool down between test suites. + // completely different AWS DF customer/app). It also gives the time to cool down between tests. + Logger.info(`Cooling down for ${config.BOOT_COOL_DOWN / 1000}s`); await sleep(config.BOOT_COOL_DOWN); - coolDownLogs.done(); - - server.setTestConfig(suite); - - const warmupLogs = Logger.progressInfo(`Running warmup '${suite.name}'`); - - let progressText = `Warmup for suite '${suite.name}' [${suiteIndex + 1}/${suites.length}]\n`; - warmupLogs.updateText(progressText); - - Logger.log('Killing main app'); - await killApp('android', config.MAIN_APP_PACKAGE); - Logger.log('Launching main app'); - await launchApp('android', config.MAIN_APP_PACKAGE); - - await withFailTimeout( - new Promise((resolve) => { - const cleanup = server.addTestDoneListener(() => { - Logger.log('Main warm up ready โœ…'); - cleanup(); - resolve(); - }); - }), - progressText, - ); - - Logger.log('Killing main app'); - await killApp('android', config.MAIN_APP_PACKAGE); - Logger.log('Killing delta app'); - await killApp('android', config.DELTA_APP_PACKAGE); - Logger.log('Launching delta app'); - await launchApp('android', config.DELTA_APP_PACKAGE); + server.setTestConfig(test); - await withFailTimeout( - new Promise((resolve) => { - const cleanup = server.addTestDoneListener(() => { - Logger.log('Delta warm up ready โœ…'); - cleanup(); - resolve(); - }); - }), - progressText, - ); + const warmupText = `Warmup for test '${test.name}' [${testIndex + 1}/${tests.length}]`; - Logger.log('Killing delta app'); - await killApp('android', config.DELTA_APP_PACKAGE); + // Warmup the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}`); - warmupLogs.done(); + // Warmup the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}`); - // We run each test multiple time to average out the results - const testLog = Logger.progressInfo(''); // For each test case we allow the test to fail three times before we stop the test run: const errorCountRef = { errorCount: 0, allowedExceptions: 3, }; - for (let i = 0; i < config.RUNS; i++) { - progressText = `Suite '${suite.name}' [${suiteIndex + 1}/${suites.length}], iteration [${i + 1}/${config.RUNS}]\n`; - testLog.updateText(progressText); - - Logger.log('Killing delta app'); - await killApp('android', config.DELTA_APP_PACKAGE); - - Logger.log('Killing main app'); - await killApp('android', config.MAIN_APP_PACKAGE); - - Logger.log('Starting main app'); - await launchApp('android', config.MAIN_APP_PACKAGE, config.ACTIVITY_PATH, { - mockNetwork: true, - }); + // We run each test multiple time to average out the results + for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { const onError = (e) => { - testLog.done(); errorCountRef.errorCount += 1; - if (i === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { + if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { + Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); // If the error happened on the first test run, the test is broken // and we should not continue running it. Or if we have reached the // maximum number of allowed exceptions, we should stop the test run. throw e; } - console.error(e); + Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); }; - // Wait for a test to finish by waiting on its done call to the http server - try { - await withFailTimeout( - new Promise((resolve) => { - const cleanup = server.addTestDoneListener(() => { - Logger.log(`Test iteration ${i + 1} done!`); - cleanup(); - resolve(); - }); - }), - progressText, - ); - } catch (e) { - onError(e); - } - - Logger.log('Killing main app'); - await killApp('android', config.MAIN_APP_PACKAGE); - - Logger.log('Starting delta app'); - await launchApp('android', config.DELTA_APP_PACKAGE, config.ACTIVITY_PATH, { + const launchArgs = { mockNetwork: true, - }); + }; - // Wait for a test to finish by waiting on its done call to the http server + const iterationText = `Test '${test.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; + const mainIterationText = `[MAIN] ${iterationText}`; + const deltaIterationText = `[DELTA] ${iterationText}`; try { - await withFailTimeout( - new Promise((resolve) => { - const cleanup = server.addTestDoneListener(() => { - Logger.log(`Test iteration ${i + 1} done!`); - cleanup(); - resolve(); - }); - }), - progressText, - ); + // Run the test on the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, launchArgs); + + // Run the test on the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, launchArgs); } catch (e) { onError(e); } } - testLog.done(); } // Calculate statistics and write them to our work file - progressLog = Logger.progressInfo('Calculating statics and writing results'); - + Logger.info('Calculating statics and writing results'); compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`); - progressLog.done(); - await server.stop(); }; diff --git a/tests/e2e/utils/execAsync.js b/tests/e2e/utils/execAsync.js index 9abc41105f7e..b7dd93d8963e 100644 --- a/tests/e2e/utils/execAsync.js +++ b/tests/e2e/utils/execAsync.js @@ -16,7 +16,7 @@ export default (command, env = {}) => { ...env, }; - Logger.important(command); + Logger.note(command); childProcess = exec( command, @@ -33,7 +33,7 @@ export default (command, env = {}) => { reject(error); } } else { - Logger.note(stdout); + Logger.writeToLogFile(stdout); resolve(stdout); } }, diff --git a/tests/e2e/utils/logger.js b/tests/e2e/utils/logger.js index 5a7046d8c8b0..d0770b7aa8e4 100644 --- a/tests/e2e/utils/logger.js +++ b/tests/e2e/utils/logger.js @@ -1,28 +1,17 @@ import fs from 'fs'; import path from 'path'; +import _ from 'underscore'; import CONFIG from '../config'; -let isVerbose = true; -const setLogLevelVerbose = (value) => { - isVerbose = value; -}; - -// On CI systems when using .progressInfo, the current line won't reset but a new line gets added -// Which can flood the logs. You can increase this rate to mitigate this effect. -const LOGGER_PROGRESS_REFRESH_RATE = process.env.LOGGER_PROGRESS_REFRESH_RATE || 250; const COLOR_DIM = '\x1b[2m'; const COLOR_RESET = '\x1b[0m'; const COLOR_YELLOW = '\x1b[33m'; const COLOR_RED = '\x1b[31m'; -const COLOR_BLUE = '\x1b[34m'; const COLOR_GREEN = '\x1b[32m'; -const log = (...args) => { - if (isVerbose) { - console.debug(...args); - } +const getDateString = () => `[${Date()}] `; - // Write to log file +const writeToLogFile = (...args) => { if (!fs.existsSync(CONFIG.LOG_FILE)) { // Check that the directory exists const logDir = path.dirname(CONFIG.LOG_FILE); @@ -32,71 +21,57 @@ const log = (...args) => { fs.writeFileSync(CONFIG.LOG_FILE, ''); } - const time = new Date(); - const timeStr = `${time.getHours()}:${time.getMinutes()}:${time.getSeconds()} ${time.getMilliseconds()}`; - fs.appendFileSync(CONFIG.LOG_FILE, `[${timeStr}] ${args.join(' ')}\n`); + fs.appendFileSync( + CONFIG.LOG_FILE, + `${_.map(args, (arg) => { + if (typeof arg === 'string') { + // Remove color codes from arg, because they are not supported in log files + // eslint-disable-next-line no-control-regex + return arg.replace(/\x1b\[\d+m/g, ''); + } + return arg; + }) + .join(' ') + .trim()}\n`, + ); }; -const info = (...args) => { - log('> ', ...args); +const log = (...args) => { + const argsWithTime = [getDateString(), ...args]; + console.debug(...argsWithTime); + writeToLogFile(...argsWithTime); }; -const important = (...args) => { - const lines = [`๐ŸŸฆ ${COLOR_BLUE}`, ...args, `${COLOR_RESET}\n`]; - log(...lines); +const info = (...args) => { + log('โ–ถ๏ธ', ...args); }; const success = (...args) => { - const lines = [`๐ŸŸฆ ${COLOR_GREEN}`, ...args, `${COLOR_RESET}\n`]; + const lines = ['โœ…', COLOR_GREEN, ...args, COLOR_RESET]; log(...lines); }; const warn = (...args) => { - const lines = [`\n${COLOR_YELLOW}โš ๏ธ`, ...args, `${COLOR_RESET}\n`]; + const lines = ['โš ๏ธ', COLOR_YELLOW, ...args, COLOR_RESET]; log(...lines); }; const note = (...args) => { - const lines = [`${COLOR_DIM}`, ...args, `${COLOR_RESET}\n`]; + const lines = [COLOR_DIM, ...args, COLOR_RESET]; log(...lines); }; const error = (...args) => { - const lines = [`\n๐Ÿ”ด ${COLOR_RED}`, ...args, `${COLOR_RESET}\n`]; + const lines = ['๐Ÿ”ด', COLOR_RED, ...args, COLOR_RESET]; log(...lines); }; -const progressInfo = (textParam) => { - let text = textParam || ''; - const getTexts = () => [`๐Ÿ•› ${text}`, `๐Ÿ•” ${text}`, `๐Ÿ•— ${text}`, `๐Ÿ•™ ${text}`]; - log(textParam); - - const startTime = Date.now(); - let i = 0; - const timer = setInterval(() => { - process.stdout.write(`\r${getTexts()[i++]}`); - // eslint-disable-next-line no-bitwise - i &= 3; - }, Number(LOGGER_PROGRESS_REFRESH_RATE)); - - const getTimeText = () => { - const timeInSeconds = Math.round((Date.now() - startTime) / 1000).toFixed(0); - return `(${COLOR_DIM}took: ${timeInSeconds}s${COLOR_RESET})`; - }; - return { - updateText: (newText) => { - text = newText; - log(newText); - }, - done: () => { - clearInterval(timer); - success(`\rโœ… ${text} ${getTimeText()}\n`); - }, - error: () => { - clearInterval(timer); - error(`\rโŒ ${text} ${getTimeText()}\n`); - }, - }; +module.exports = { + log, + info, + warn, + note, + error, + success, + writeToLogFile, }; - -export {log, info, warn, note, error, success, important, progressInfo, setLogLevelVerbose}; diff --git a/tests/e2e/utils/withFailTimeout.js b/tests/e2e/utils/withFailTimeout.js index 3a314cd23562..d7ac50a64e00 100644 --- a/tests/e2e/utils/withFailTimeout.js +++ b/tests/e2e/utils/withFailTimeout.js @@ -5,7 +5,7 @@ const TIMEOUT = process.env.INTERACTION_TIMEOUT || CONFIG.INTERACTION_TIMEOUT; const withFailTimeout = (promise, name) => new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { - reject(new Error(`[${name}] Interaction timed out after ${(TIMEOUT / 1000).toFixed(0)}s`)); + reject(new Error(`"${name}": Interaction timed out after ${(TIMEOUT / 1000).toFixed(0)}s`)); }, Number(TIMEOUT)); promise