diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000000000..2e8652a466309 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,56 @@ +name: Benchmark + +on: + pull_request: + paths: + - 'packages/cli/**' + - 'packages/core/**' + - 'packages/workflow/**' + workflow_dispatch: + +jobs: + benchmark: + name: Benchmark + runs-on: ubuntu-latest + timeout-minutes: 20 + env: + DB_POSTGRESDB_PASSWORD: password + steps: + - uses: actions/checkout@v4.1.1 + + - name: Start Postgres + uses: isbang/compose-action@v2.0.0 + with: + compose-file: ./.github/docker-compose.yml + services: postgres + + - run: corepack enable + + - uses: actions/setup-node@v4.0.1 + with: + node-version: 18.x + cache: pnpm + + - run: pnpm install --frozen-lockfile + + - name: Build + if: ${{ inputs.cacheKey == '' }} + run: pnpm build:backend + + - name: Restore cached build artifacts + if: ${{ inputs.cacheKey != '' }} + uses: actions/cache/restore@v4.0.0 + with: + path: ./packages/**/dist + key: ${{ inputs.cacheKey }} + + - run: pnpm build:benchmark + working-directory: packages/cli + + - name: Benchmark + uses: CodSpeedHQ/action@v2 + with: + working-directory: packages/cli + run: | + pnpm benchmark:sqlite + pnpm benchmark:postgres diff --git a/package.json b/package.json index c8bb55dea6333..72a594922542d 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "packageManager": "pnpm@8.14.3", "scripts": { "preinstall": "node scripts/block-npm-install.js", + "benchmark": "pnpm --filter=n8n benchmark", "build": "turbo run build", "build:backend": "pnpm --filter=!@n8n/chat --filter=!n8n-design-system --filter=!n8n-editor-ui build", "build:frontend": "pnpm --filter=@n8n/chat --filter=n8n-design-system --filter=n8n-editor-ui build", @@ -95,7 +96,8 @@ "pyodide@0.23.4": "patches/pyodide@0.23.4.patch", "@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch", "@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch", - "vite-plugin-checker@0.6.4": "patches/vite-plugin-checker@0.6.4.patch" + "vite-plugin-checker@0.6.4": "patches/vite-plugin-checker@0.6.4.patch", + "@codspeed/tinybench-plugin@3.1.0": "patches/@codspeed__tinybench-plugin@3.1.0.patch" } } } diff --git a/packages/cli/package.json b/packages/cli/package.json index 6aab47afde086..936b4816b1b91 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -20,9 +20,12 @@ "bin": "n8n" }, "scripts": { + "benchmark:sqlite": "NODE_ENV=benchmark N8N_LOG_LEVEL=silent DB_TYPE=sqlite node dist/benchmark/main.js", + "benchmark:postgres": "NODE_ENV=benchmark N8N_LOG_LEVEL=silent DB_TYPE=postgresdb node dist/benchmark/main.js", "clean": "rimraf dist .turbo", "typecheck": "tsc", "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && node scripts/build.mjs", + "build:benchmark": "tsc -p tsconfig.benchmark.json && tsc-alias -p tsconfig.benchmark.json && node scripts/build.mjs && node dist/benchmark/scripts/list-suites.js", "buildAndDev": "pnpm run build && pnpm run dev", "dev": "concurrently -k -n \"TypeScript,Node\" -c \"yellow.bold,cyan.bold\" \"npm run watch\" \"nodemon\"", "dev:worker": "concurrently -k -n \"TypeScript,Node\" -c \"yellow.bold,cyan.bold\" \"npm run watch\" \"nodemon worker\"", @@ -60,6 +63,7 @@ "!dist/**/e2e.*" ], "devDependencies": { + "@codspeed/tinybench-plugin": "^3.1.0", "@redocly/cli": "^1.6.0", "@types/aws4": "^1.5.1", "@types/basic-auth": "^1.1.3", @@ -87,6 +91,7 @@ "chokidar": "^3.5.2", "concurrently": "^8.2.0", "ioredis-mock": "^8.8.1", + "tinybench": "^2.6.0", "ts-essentials": "^7.0.3" }, "dependencies": { diff --git a/packages/cli/src/AbstractServer.ts b/packages/cli/src/AbstractServer.ts index 9496b1dffc453..1610600e50e88 100644 --- a/packages/cli/src/AbstractServer.ts +++ b/packages/cli/src/AbstractServer.ts @@ -154,7 +154,7 @@ export abstract class AbstractServer { this.server.on('error', (error: Error & { code: string }) => { if (error.code === 'EADDRINUSE') { - console.log( + this.logger.info( `n8n's port ${PORT} is already in use. Do you have another instance of n8n running already?`, ); process.exit(1); @@ -167,7 +167,7 @@ export abstract class AbstractServer { await this.setupHealthCheck(); - console.log(`n8n ready on ${ADDRESS}, port ${PORT}`); + this.logger.info(`n8n ready on ${ADDRESS}, port ${PORT}`); } async start(): Promise { @@ -236,11 +236,11 @@ export abstract class AbstractServer { await this.configure(); if (!inTest) { - console.log(`Version: ${N8N_VERSION}`); + this.logger.info(`Version: ${N8N_VERSION}`); const defaultLocale = config.getEnv('defaultLocale'); if (defaultLocale !== 'en') { - console.log(`Locale: ${defaultLocale}`); + this.logger.info(`Locale: ${defaultLocale}`); } await this.externalHooks.run('n8n.ready', [this, config]); diff --git a/packages/cli/src/benchmark/benchmark.md b/packages/cli/src/benchmark/benchmark.md new file mode 100644 index 0000000000000..1df01b8cbe22a --- /dev/null +++ b/packages/cli/src/benchmark/benchmark.md @@ -0,0 +1,65 @@ +# Benchmark + +This package contains benchmarks to measure the execution time of n8n backend operations in sqlite and Postgres. + +Benchmarks are organized into **suites** for the scenario to benchmark, **tasks** for operations in that scenario, and **hooks** for setup and teardown, implemented on top of [`tinybench`](https://github.com/tinylibs/tinybench). Execution in CI is delegated to [Codspeed](https://codspeed.io/) to keep measurements consistent and to monitor improvements and regressions. + +## Running benchmarks + +To run benchmarks: + +```sh +pnpm build:benchmark +pnpm benchmark:sqlite # or +pnpm benchmark:postgres +``` + +Locally, the benchmarking run can be configured via [environment variables](https://docs.n8n.io/hosting/configuration/environment-variables/benchmarking). In CI, the configuration is set by Codspeed. + +## Creating benchmarks + +To create benchmarks: + +1. Create a file at `suites/**/{suiteId}-{suiteTitle}.ts`. +2. Include a `suite()` call for the scenario to benchmark. +3. Inside the suite, include one or more `task()` calls for operations in that scenario. `task()` must contain only the specific operation whose execution time to measure. Move any per-task setup and teardown to `beforeEachTask()` and `afterEachTask()` in the suite. +4. Include workflows at `suites/workflows/{suiteId}-{ordinalNumber}`. During setup, workflows at this dir are saved in the temp DB and activated in memory. +5. Run `pnpm build:benchmark` to add the suite and its tasks to the index below. + +## Index of benchmarking suites + +> **Note**: All workflows with default settings unless otherwise specified, e.g. `EXECUTIONS_MODE` is `regular` unless `queue` is specified. + + + +### 001 - Production workflow with authless webhook node + +- [using "Respond immediately" mode](./suites/workflows/001-1.json) +- [using "When last node finishes" mode](./suites/workflows/001-2.json) +- [using "Respond to Webhook node" mode](./suites/workflows/001-3.json) + + + +## Reading benchmarks + +In a benchmarking run, a task is repeatedly executed for a duration and for a number of iterations - the run will continue until the number of iterations is reached, even if this exceeds the duration. + +``` +BENCHMARK suites/001-production-webhook-with-authless-webhook-node.suite.ts [sqlite] + + • using "Respond immediately" mode + · Ran 27 iterations in 509.992 ms at a rate of 52.941 op/s + · p75 20.251 ms ··· p99 64.570 ms ··· p999 64.570 ms + · min 8.363 ms ···· max 64.570 ms ··· mean 18.888 ms + · MoE ±4.1% ··· std err 02.037 ms ··· std dev 10.586 ms +``` + +`p{n}` is the percentile, i.e. the percentage of data points in a distribution that are less than or equal to a value. For example, `p75` being 20.251 ms means that 75% of the 27 iterations for the task `using "Respond immediately" mode` took 20.251 ms or less. `p75` is the execution time that the majority of users experience, `p99` captures worst-case scenarios for all but 1% of users, and `p999` includes performance at extreme cases for the slowest 0.1% of users. + +`min` is the shortest execution time recorded across all iterations of the task, `max` is the longest, and `mean` is the average. + +`MoE` (margin of error) reflects how much the sample mean is expected to differ from the true population mean. For example, a margin of error of ±4.1% in the task `using "Respond immediately" mode` suggests that, if the benchmarking run were repeated multiple times, the sample mean would fall within 4.1% of the true population mean in 95% of those runs, assuming a standard confidence level. This range indicates the variability we might see due to the randomness of selecting a sample. + +`std err` (standard error) reflects how closely a sample mean is expected to approximate the true population mean. A smaller standard error indicates that the sample mean is likely to be a more accurate estimate of the population mean because the variation among sample means is less. For example, in the task `using "Respond immediately" mode`, the standard error is 2.037 ms, which suggests that the sample mean is expected to differ from the true population mean by 2.037 ms on average. + +`std dev` (standard deviation) is the amount of dispersion across samples. When low, it indicates that the samples tend to be close to the mean; when high, it indicates that the samples are spread out over a wider range. For example, in the task `using "Respond immediately" mode`, the standard deviation is 10.586 ms, which suggests that the execution times varied significantly across iterations. diff --git a/packages/cli/src/benchmark/lib/agent.ts b/packages/cli/src/benchmark/lib/agent.ts new file mode 100644 index 0000000000000..62ffc3c63fda0 --- /dev/null +++ b/packages/cli/src/benchmark/lib/agent.ts @@ -0,0 +1,23 @@ +import axios from 'axios'; +import { BACKEND_BASE_URL, INSTANCE_ONWER } from './constants'; +import { ApplicationError } from 'n8n-workflow'; + +export const agent = axios.create({ baseURL: BACKEND_BASE_URL }); + +export async function authenticateAgent() { + const response = await agent.post('/rest/login', { + email: INSTANCE_ONWER.EMAIL, + password: INSTANCE_ONWER.PASSWORD, + }); + + const cookies = response.headers['set-cookie']; + + if (!cookies || cookies.length !== 1) { + throw new ApplicationError('Expected cookie', { level: 'warning' }); + } + + const [cookie] = cookies; + + agent.defaults.headers.Cookie = cookie; + agent.defaults.headers['x-n8n-api-key'] = INSTANCE_ONWER.API_KEY; +} diff --git a/packages/cli/src/benchmark/lib/api.ts b/packages/cli/src/benchmark/lib/api.ts new file mode 100644 index 0000000000000..b4bbbc83a4551 --- /dev/null +++ b/packages/cli/src/benchmark/lib/api.ts @@ -0,0 +1,98 @@ +import 'reflect-metadata'; +import path from 'node:path'; +import type Bench from 'tinybench'; +import { assert } from 'n8n-workflow'; +import glob from 'fast-glob'; +import callsites from 'callsites'; +import type { Suites, Task, Callback } from './types'; +import { DuplicateHookError } from './errors/duplicate-hook.error'; +import { DuplicateSuiteError } from './errors/duplicate-suite.error'; + +const suites: Suites = {}; + +export async function collectSuites() { + const files = await glob('**/*.suite.js', { + cwd: path.join('dist', 'benchmark'), + absolute: true, + }); + + for (const f of files) { + await import(f); + } + + return suites; +} + +export function registerSuites(bench: Bench) { + for (const { name: suiteName, hooks, tasks } of Object.values(suites)) { + /** + * In tinybench, `beforeAll` and `afterAll` refer to all _iterations_ of + * a single task, while `beforeEach` and `afterEach` refer to each _iteration_. + * + * In jest and vitest, `beforeAll` and `afterAll` refer to all _tests_, + * while `beforeEach` and `afterEach` refer to each _test_. + * + * This API renames tinybench's hooks to prevent confusion from familiarity with jest. + */ + const options: Record = {}; + + if (hooks.beforeEachTask) options.beforeAll = hooks.beforeEachTask; + if (hooks.afterEachTask) options.afterAll = hooks.afterEachTask; + + for (const t of tasks) { + const taskName = process.env.CI === 'true' ? [suiteName, t.name].join('::') : t.name; + + bench.add(taskName, t.operation, options); + } + } +} + +function suiteKey() { + const key = callsites() + .map((site) => site.getFileName()) + .filter((site): site is string => site !== null) + .find((site) => site.endsWith('.suite.js')); + + assert(key !== undefined); + + return key.replace(/^.*benchmark\//, '').replace(/\.js$/, '.ts'); +} + +export function suite(suiteName: string, suiteFn: () => void) { + const key = suiteKey(); + + if (suites[key]) throw new DuplicateSuiteError(key); + + suites[key] = { name: suiteName, hooks: {}, tasks: [] }; + + suiteFn(); +} + +export function task(taskName: string, operation: Task['operation']) { + const key = suiteKey(); + + suites[key].tasks.push({ + name: taskName, + operation, + }); +} + +export function beforeEachTask(fn: Callback) { + const key = suiteKey(); + + if (suites[key]?.hooks.beforeEachTask) { + throw new DuplicateHookError('beforeEachTask', key); + } + + suites[key].hooks.beforeEachTask = fn; +} + +export function afterEachTask(fn: Callback) { + const key = suiteKey(); + + if (suites[key]?.hooks.afterEachTask) { + throw new DuplicateHookError('afterEachTask', key); + } + + suites[key].hooks.afterEachTask = fn; +} diff --git a/packages/cli/src/benchmark/lib/constants.ts b/packages/cli/src/benchmark/lib/constants.ts new file mode 100644 index 0000000000000..5cb0e8bdb442a --- /dev/null +++ b/packages/cli/src/benchmark/lib/constants.ts @@ -0,0 +1,9 @@ +export const BACKEND_BASE_URL = 'http://127.0.0.1:5678'; // localhost on GitHub Actions runners refuses connections + +export const INSTANCE_ONWER = { + EMAIL: 'instance@owner.com', + PASSWORD: 'password', + FIRST_NAME: 'Instance', + LAST_NAME: 'Owner', + API_KEY: 'n8n_api_123', +}; diff --git a/packages/cli/src/benchmark/lib/errors/duplicate-hook.error.ts b/packages/cli/src/benchmark/lib/errors/duplicate-hook.error.ts new file mode 100644 index 0000000000000..d9467b9f38987 --- /dev/null +++ b/packages/cli/src/benchmark/lib/errors/duplicate-hook.error.ts @@ -0,0 +1,10 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class DuplicateHookError extends ApplicationError { + constructor(hookName: 'beforeEachTask' | 'afterEachTask', key: string) { + super( + `Duplicate \`${hookName}\` hook found at \`${key}\`. Please define a single \`${hookName}\` hook for this file.`, + { level: 'warning' }, + ); + } +} diff --git a/packages/cli/src/benchmark/lib/errors/duplicate-suite.error.ts b/packages/cli/src/benchmark/lib/errors/duplicate-suite.error.ts new file mode 100644 index 0000000000000..566ac14c58a94 --- /dev/null +++ b/packages/cli/src/benchmark/lib/errors/duplicate-suite.error.ts @@ -0,0 +1,9 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class DuplicateSuiteError extends ApplicationError { + constructor(key: string) { + super(`Duplicate suite found at \`${key}\`. Please define a single suite for this file.`, { + level: 'warning', + }); + } +} diff --git a/packages/cli/src/benchmark/lib/errors/postgres-connection.error.ts b/packages/cli/src/benchmark/lib/errors/postgres-connection.error.ts new file mode 100644 index 0000000000000..d280e88bb89e7 --- /dev/null +++ b/packages/cli/src/benchmark/lib/errors/postgres-connection.error.ts @@ -0,0 +1,12 @@ +import { ApplicationError } from 'n8n-workflow'; +import type { DataSourceOptions } from '@n8n/typeorm'; + +export class PostgresConnectionError extends ApplicationError { + constructor(error: unknown, pgOptions: DataSourceOptions) { + super('Failed to connect to Postgres - check your Postgres configuration', { + level: 'warning', + cause: error, + extra: { postgresConfig: { pgOptions } }, + }); + } +} diff --git a/packages/cli/src/benchmark/lib/hooks/n8nDir.ts b/packages/cli/src/benchmark/lib/hooks/n8nDir.ts new file mode 100644 index 0000000000000..21f0acf93392d --- /dev/null +++ b/packages/cli/src/benchmark/lib/hooks/n8nDir.ts @@ -0,0 +1,37 @@ +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { mkdirSync, mkdtempSync, writeFileSync } from 'node:fs'; +import Container from 'typedi'; +import { InstanceSettings } from 'n8n-core'; +import { log } from '../log'; + +/** + * Create a temp .n8n user dir for benchmarking. + */ +export function n8nDir() { + const tempBaseDir = path.join(tmpdir(), 'n8n-benchmarks/'); + + mkdirSync(tempBaseDir, { recursive: true }); + + const tempUserHomeDir = mkdtempSync(tempBaseDir); + + const tempN8nDir = path.join(tempUserHomeDir, '.n8n'); + + mkdirSync(tempN8nDir); + + writeFileSync( + path.join(tempN8nDir, 'config'), + JSON.stringify({ encryptionKey: 'temp-encryption-key', instanceId: 'temp-123' }), + 'utf-8', + ); + + process.env.N8N_USER_FOLDER = tempUserHomeDir; + + /** + * `typedi` has already instantiated `InstanceSettings` using the default user home, + * so re-instantiate it to ensure it picks up the temp user home dir path. + */ + Container.set(InstanceSettings, new InstanceSettings()); + + log('Created temp dir', tempN8nDir); +} diff --git a/packages/cli/src/benchmark/lib/hooks/repository-extensions.ts b/packages/cli/src/benchmark/lib/hooks/repository-extensions.ts new file mode 100644 index 0000000000000..33cf62668cb83 --- /dev/null +++ b/packages/cli/src/benchmark/lib/hooks/repository-extensions.ts @@ -0,0 +1,26 @@ +import { Service } from 'typedi'; +import { hash } from 'bcryptjs'; +import { UserRepository } from '@/databases/repositories/user.repository'; +import { INSTANCE_ONWER } from '../constants'; + +@Service() +export class UserRepositoryExtension extends UserRepository { + async deleteAll() { + await this.delete({}); + } + + async createInstanceOwner() { + const user = this.create({ + email: INSTANCE_ONWER.EMAIL, + password: await hash(INSTANCE_ONWER.PASSWORD, 10), + firstName: INSTANCE_ONWER.FIRST_NAME, + lastName: INSTANCE_ONWER.LAST_NAME, + role: 'global:owner', + apiKey: INSTANCE_ONWER.API_KEY, + }); + + user.computeIsOwner(); + + return await this.save(user); + } +} diff --git a/packages/cli/src/benchmark/lib/hooks/seed.ts b/packages/cli/src/benchmark/lib/hooks/seed.ts new file mode 100644 index 0000000000000..ac58623ee25c6 --- /dev/null +++ b/packages/cli/src/benchmark/lib/hooks/seed.ts @@ -0,0 +1,44 @@ +import path from 'node:path'; +import glob from 'fast-glob'; +import { jsonParse } from 'n8n-workflow'; +import { readFile } from 'fs/promises'; +import type { WorkflowRequest } from '@/workflows/workflow.request'; +import { agent, authenticateAgent } from '../agent'; +import Container from 'typedi'; +import { UserRepositoryExtension } from './repository-extensions'; +import { log } from '../log'; + +export async function seedInstanceOwner() { + await Container.get(UserRepositoryExtension).deleteAll(); + + const user = await Container.get(UserRepositoryExtension).createInstanceOwner(); + + log('Seeded user in DB', user.email); +} + +export async function seedWorkflows() { + const _files = await glob('suites/workflows/*.json', { + cwd: path.join('dist', 'benchmark'), + absolute: true, + }); + + const payloads: WorkflowRequest.CreateUpdatePayload[] = []; + + for (const f of _files) { + const json = await readFile(f, 'utf8'); + const payload = jsonParse(json); + payloads.push(payload); + } + + await authenticateAgent(); + + for (const p of payloads) { + await agent.post('/rest/workflows', p); + } + + const files = _files.map((f) => f.replace(/.*workflows\//, '')).join(' '); + + log('Seeded workflows in DB', files); + + return files; +} diff --git a/packages/cli/src/benchmark/lib/hooks/setup-and-teardown.ts b/packages/cli/src/benchmark/lib/hooks/setup-and-teardown.ts new file mode 100644 index 0000000000000..90775f844c9f3 --- /dev/null +++ b/packages/cli/src/benchmark/lib/hooks/setup-and-teardown.ts @@ -0,0 +1,42 @@ +import Container from 'typedi'; +import { Config } from '@oclif/core'; +import { Start } from '@/commands/start'; +import { n8nDir } from './n8nDir'; +import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; +import { seedInstanceOwner, seedWorkflows } from './seed'; +import { log } from '../log'; +import { postgresSetup, postgresTeardown } from '../postgres'; +import * as Db from '@/Db'; +import config from '@/config'; + +let main: Start; + +const dbType = config.getEnv('database.type'); + +export async function setup() { + n8nDir(); + + log('Selected DB type', dbType); + + if (dbType === 'postgresdb') await postgresSetup(); + + main = new Start([], new Config({ root: __dirname })); + + await main.init(); + await main.run(); + + await seedInstanceOwner(); + const files = await seedWorkflows(); + + await Container.get(ActiveWorkflowRunner).init(); + + log('Activated workflows', files); +} + +export async function teardown() { + await main.stopProcess(); + + await Db.close(); + + if (dbType === 'postgresdb') await postgresTeardown(); +} diff --git a/packages/cli/src/benchmark/lib/index.ts b/packages/cli/src/benchmark/lib/index.ts new file mode 100644 index 0000000000000..5745c94153f7c --- /dev/null +++ b/packages/cli/src/benchmark/lib/index.ts @@ -0,0 +1,8 @@ +import 'reflect-metadata'; + +export { suite, task, beforeEachTask, afterEachTask, collectSuites } from './api'; +export { agent } from './agent'; +export { setup, teardown } from './hooks/setup-and-teardown'; +export type { Suites } from './types'; + +export { log, logResults, toOneLineJson } from './log'; diff --git a/packages/cli/src/benchmark/lib/log.ts b/packages/cli/src/benchmark/lib/log.ts new file mode 100644 index 0000000000000..d5009000de9f5 --- /dev/null +++ b/packages/cli/src/benchmark/lib/log.ts @@ -0,0 +1,135 @@ +import pico from 'picocolors'; +import { assert } from 'n8n-workflow'; +import config from '@/config'; +import type Bench from 'tinybench'; +import type { Suites } from './types'; + +export const toOneLineJson = (obj: object) => + JSON.stringify(obj) + .replace(/:/g, ': ') + .replace(/,/g, ', ') + .replace(/"/g, '') + .replace(/^\{/, '{ ') + .replace(/\}$/, ' }'); + +export function log(message: string, details?: string) { + const parts = [pico.magenta('[benchmarking]'), message]; + + if (details) { + parts[parts.length - 1] += ':'; + parts.push(pico.dim(details)); + } + + console.log(parts.join(' ')); +} + +const indentation = { + first: ' ', + second: ' '.repeat(4), + third: ' '.repeat(6), +}; + +function truncate(n: number, decimalPlaces = 3) { + const nStr = n.toString(); + + const truncated = nStr.slice(0, nStr.indexOf('.') + decimalPlaces + 1); + + if (truncated.length === 5) return '0' + truncated; + + return truncated; +} + +const toDirsAndFileName = (key: string) => { + const segments = key.split('/'); + const dirs = segments.slice(0, -1).join('/') + '/'; + const [fileName] = segments.slice(-1); + + return [dirs, fileName]; +}; + +export function logResults(suites: Suites, results: Bench['results']) { + const dbType = config.getEnv('database.type') === 'postgresdb' ? 'postgres' : 'sqlite'; + const columnDivider = pico.dim('·'.repeat(3)); + + for (const [key, suite] of Object.entries(suites)) { + const [dirs, fileName] = toDirsAndFileName(key); + + const title = [ + '\n', + pico.bgWhite(pico.black(' BENCHMARK ')), + pico.gray(dirs) + pico.bold(fileName), + pico.dim('[' + dbType + ']'), + '\n', + ].join(' '); + + console.log(title); + + for (const task of suite.tasks) { + console.log(indentation.first, pico.white('•'), task.name); + + const result = results.shift(); + + assert(result !== undefined); + + const { totalTime, samples, sd, hz, moe, sem } = result; + + const zerothLine = [ + indentation.second + pico.dim('·'), + pico.dim('Ran'), + pico.magenta(samples.length), + pico.dim('iterations in'), + pico.magenta(truncate(totalTime) + ' ms'), + pico.dim('at a rate of'), + pico.magenta(truncate(hz) + ' op/s'), + ].join(' '); + + console.log(zerothLine); + + const [p75, p99, p999] = [result.p75, result.p99, result.p999].map((n) => truncate(n)); + + const firstLine = [ + indentation.second + pico.dim('·'), + pico.dim('p75'), + pico.magenta(p75 + ' ms'), + columnDivider, + pico.dim('p99'), + pico.magenta(p99 + ' ms'), + columnDivider, + pico.dim('p999'), + pico.magenta(p999 + ' ms'), + ].join(' '); + + console.log(firstLine); + + const [min, max, mean] = [result.min, result.max, result.mean].map((n) => truncate(n)); + + const secondLine = [ + indentation.second + pico.dim('·'), + pico.dim('min'), + pico.magenta(min + ' ms'), + columnDivider, + pico.dim('max'), + pico.magenta(max + ' ms'), + columnDivider, + pico.dim('mean'), + pico.magenta(mean + ' ms'), + ].join(' '); + + console.log(secondLine); + + const thirdLine = [ + indentation.second + pico.dim('·'), + pico.dim('MoE'), + pico.magenta('±' + truncate(moe, 1) + '%'), + columnDivider, + pico.dim('std err'), + pico.magenta(truncate(sem) + ' ms'), + columnDivider, + pico.dim('std dev'), + pico.magenta(truncate(sd) + ' ms'), + ].join(' '); + + console.log(thirdLine + '\n'); + } + } +} diff --git a/packages/cli/src/benchmark/lib/postgres.ts b/packages/cli/src/benchmark/lib/postgres.ts new file mode 100644 index 0000000000000..23c13dc6658f0 --- /dev/null +++ b/packages/cli/src/benchmark/lib/postgres.ts @@ -0,0 +1,71 @@ +import { DataSource } from '@n8n/typeorm'; +import type { DataSourceOptions } from '@n8n/typeorm'; +import config from '@/config'; +import { log } from './log'; +import { PostgresConnectionError } from './errors/postgres-connection.error'; + +const BENCHMARK_DB_PREFIX = 'n8n_benchmark'; + +const pgOptions: DataSourceOptions = { + type: 'postgres', + database: config.getEnv('database.postgresdb.database'), + host: config.getEnv('database.postgresdb.host'), + port: config.getEnv('database.postgresdb.port'), + username: config.getEnv('database.postgresdb.user'), + password: config.getEnv('database.postgresdb.password'), + schema: config.getEnv('database.postgresdb.schema'), +}; + +function tenRandomChars() { + const ALPHABET = 'abcdefghijklmnopqrstuvwxyz'; + + let result = ''; + + for (let i = 0; i < 10; i++) { + result += ALPHABET[Math.floor(Math.random() * ALPHABET.length)]; + } + + return result; +} + +export async function postgresSetup() { + const dbName = [BENCHMARK_DB_PREFIX, tenRandomChars(), Date.now()].join('_'); + + let bootstrap: DataSource; + + try { + bootstrap = await new DataSource(pgOptions).initialize(); + } catch (error) { + throw new PostgresConnectionError(error, pgOptions); + } + + await bootstrap.query(`CREATE DATABASE ${dbName};`); + await bootstrap.destroy(); + + log('Created temp Postgres DB', dbName); + + config.set('database.postgresdb.database', dbName); +} + +export async function postgresTeardown() { + const bootstrap = new DataSource(pgOptions); + await bootstrap.initialize(); + + const results: Array<{ dbName: string }> = await bootstrap.query( + 'SELECT datname AS "dbName" FROM pg_database', + ); + + const dbNames = results + .filter(({ dbName }) => dbName.startsWith(BENCHMARK_DB_PREFIX)) + .map(({ dbName }) => dbName); + + const promises: Array> = dbNames.map( + async (dbName) => await bootstrap.query(`DROP DATABASE ${dbName};`), + ); + + await Promise.all(promises); + + log('Dropped temp Postgres DB', dbNames.at(0)); + + await bootstrap.destroy(); +} diff --git a/packages/cli/src/benchmark/lib/types.ts b/packages/cli/src/benchmark/lib/types.ts new file mode 100644 index 0000000000000..35830b2052ca6 --- /dev/null +++ b/packages/cli/src/benchmark/lib/types.ts @@ -0,0 +1,20 @@ +export type Suites = { + [key: string]: { + name: string; + hooks: { + beforeEachTask?: Callback; + afterEachTask?: Callback; + }; + tasks: Task[]; + }; +}; + +/** + * Single operation whose execution time to measure repeatedly. + */ +export type Task = { + name: string; + operation: Callback; +}; + +export type Callback = () => void | Promise; diff --git a/packages/cli/src/benchmark/main.ts b/packages/cli/src/benchmark/main.ts new file mode 100644 index 0000000000000..93c21fa065f9a --- /dev/null +++ b/packages/cli/src/benchmark/main.ts @@ -0,0 +1,49 @@ +import 'reflect-metadata'; +import { collectSuites, log, logResults, setup, teardown, toOneLineJson } from './lib'; +import { registerSuites } from './lib/api'; +import config from '@/config'; + +/* eslint-disable import/no-extraneous-dependencies */ +import Bench from 'tinybench'; +import { withCodSpeed } from '@codspeed/tinybench-plugin'; +/* eslint-enable import/no-extraneous-dependencies */ + +async function main() { + const suites = await collectSuites(); + + log('Found suites', Object.keys(suites).join(' ')); + + await setup(); + + const benchConfig = { + time: config.getEnv('benchmark.time'), + iterations: config.getEnv('benchmark.iterations'), + throws: config.getEnv('benchmark.stopOnError'), + warmupTime: config.getEnv('benchmark.warmupTime'), + warmupIterations: config.getEnv('benchmark.warmupIterations'), + }; + + const _bench = new Bench(benchConfig); + + const bench = process.env.CI === 'true' ? withCodSpeed(_bench) : _bench; + + registerSuites(bench); + + await bench.warmup(); + + log('Set config', toOneLineJson(benchConfig)); + + log('Running iterations, please wait...'); + + await bench.run(); + + log('Iterations completed ✓'); + + await teardown(); + + if (process.env.CI !== 'true') logResults(suites, bench.results); + + process.exit(0); +} + +void main(); diff --git a/packages/cli/src/benchmark/scripts/list-suites.ts b/packages/cli/src/benchmark/scripts/list-suites.ts new file mode 100644 index 0000000000000..672f5fefde6d5 --- /dev/null +++ b/packages/cli/src/benchmark/scripts/list-suites.ts @@ -0,0 +1,59 @@ +import fs from 'node:fs/promises'; +import path from 'node:path'; +import { writeFileSync } from 'node:fs'; +import { collectSuites } from '../lib'; +import type { Suites } from '../lib'; + +async function exists(filePath: string) { + try { + await fs.access(filePath); + return true; + } catch { + return false; + } +} + +async function toSuitesList(suites: Suites) { + let list = ''; + + for (const [fullPath, suite] of Object.entries(suites)) { + const suiteId = fullPath.split('/').pop()?.split('-').shift() ?? ''; + + list += `\n### ${suiteId} - ${suite.name}\n\n`; + + for (let i = 0; i < suite.tasks.length; i++) { + const suiteName = suite.tasks[i].name.replace(suite.name, '').trim(); + const relativeWorkflowPath = `./suites/workflows/${suiteId}-${i + 1}.json`; + const absoluteWorkflowPath = path.resolve('src', 'benchmark', relativeWorkflowPath); + + list += (await exists(absoluteWorkflowPath)) + ? `- [${suiteName}](${relativeWorkflowPath})\n` + : `- ${suiteName}\n`; + } + } + + return list; +} + +/** + * Insert an auto-generated list of benchmarking suites into `benchmark.md`. + */ +async function listSuites() { + const filePath = path.resolve('src', 'benchmark', 'benchmark.md'); + const oldDoc = await fs.readFile(filePath, 'utf8'); + + const MARK_START = ''; + const MARK_END = ''; + + const before = oldDoc.slice(0, oldDoc.indexOf(MARK_START) + MARK_START.length); + const after = oldDoc.slice(oldDoc.indexOf(MARK_END)); + + const suites = await collectSuites(); + const suitesList = await toSuitesList(suites); + + const newDoc = [before, suitesList, after].join('\n'); + + writeFileSync(filePath, newDoc); +} + +void listSuites(); diff --git a/packages/cli/src/benchmark/suites/001-production-webhook-with-authless-webhook-node.suite.ts b/packages/cli/src/benchmark/suites/001-production-webhook-with-authless-webhook-node.suite.ts new file mode 100644 index 0000000000000..a31f681298b53 --- /dev/null +++ b/packages/cli/src/benchmark/suites/001-production-webhook-with-authless-webhook-node.suite.ts @@ -0,0 +1,15 @@ +import { task, suite, agent } from '../lib'; + +suite('Production workflow with authless webhook node', () => { + task('using "Respond immediately" mode', async () => { + await agent.get('/webhook/001-1'); + }); + + task('using "When last node finishes" mode', async () => { + await agent.get('/webhook/001-2'); + }); + + task('using "Respond to Webhook node" mode', async () => { + await agent.get('/webhook/001-3'); + }); +}); diff --git a/packages/cli/src/benchmark/suites/workflows/001-1.json b/packages/cli/src/benchmark/suites/workflows/001-1.json new file mode 100644 index 0000000000000..c6217ed2f2be0 --- /dev/null +++ b/packages/cli/src/benchmark/suites/workflows/001-1.json @@ -0,0 +1,25 @@ +{ + "name": "1.1", + "active": true, + "nodes": [ + { + "parameters": { + "path": "001-1", + "options": {} + }, + "id": "000012bb-d534-4e81-a7e4-e62edf816582", + "name": "Webhook", + "type": "n8n-nodes-base.webhook", + "typeVersion": 2, + "position": [580, 280], + "webhookId": "d58b0160-2370-417b-bc0e-f3050b0c7adf" + } + ], + "pinData": {}, + "connections": {}, + "settings": { + "executionOrder": "v1" + }, + "versionId": "0cea62e0-b011-46f1-bd03-0011f648d7e9", + "tags": [] +} diff --git a/packages/cli/src/benchmark/suites/workflows/001-2.json b/packages/cli/src/benchmark/suites/workflows/001-2.json new file mode 100644 index 0000000000000..c40a1cba2b68f --- /dev/null +++ b/packages/cli/src/benchmark/suites/workflows/001-2.json @@ -0,0 +1,48 @@ +{ + "name": "1.2", + "active": true, + "nodes": [ + { + "parameters": { + "jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nfor (const item of $input.all()) {\n item.json.myNewField = 1;\n}\n\nreturn $input.all();" + }, + "id": "81af5f75-b4d4-4a99-8f3c-f86e307d75a3", + "name": "Code", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [780, 280] + }, + { + "parameters": { + "path": "001-2", + "responseMode": "lastNode", + "options": {} + }, + "id": "5d3512bd-6f6d-4b67-a01e-1e2d9735e4f1", + "name": "Webhook", + "type": "n8n-nodes-base.webhook", + "typeVersion": 2, + "position": [580, 280], + "webhookId": "a669d108-7de3-4e72-b332-919a11b2328b" + } + ], + "pinData": {}, + "connections": { + "Webhook": { + "main": [ + [ + { + "node": "Code", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "settings": { + "executionOrder": "v1" + }, + "versionId": "45e84048-db98-4754-80c2-0be5b141de0f", + "tags": [] +} diff --git a/packages/cli/src/benchmark/suites/workflows/001-3.json b/packages/cli/src/benchmark/suites/workflows/001-3.json new file mode 100644 index 0000000000000..46d75d027c891 --- /dev/null +++ b/packages/cli/src/benchmark/suites/workflows/001-3.json @@ -0,0 +1,50 @@ +{ + "name": "1.3", + "active": true, + "nodes": [ + { + "parameters": { + "respondWith": "text", + "responseBody": "Responding from \"Respond to Webhook\" node", + "options": {} + }, + "id": "5716bed4-c3b1-40f0-b5cb-62a14e622fcb", + "name": "Respond to Webhook", + "type": "n8n-nodes-base.respondToWebhook", + "typeVersion": 1.1, + "position": [700, 280] + }, + { + "parameters": { + "path": "001-3", + "responseMode": "responseNode", + "options": {} + }, + "id": "65892623-8bea-42ba-b7ed-d0298f498bce", + "name": "Webhook", + "type": "n8n-nodes-base.webhook", + "typeVersion": 2, + "position": [500, 280], + "webhookId": "c143e038-b0bd-46ca-9708-33b868499c61" + } + ], + "pinData": {}, + "connections": { + "Webhook": { + "main": [ + [ + { + "node": "Respond to Webhook", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "settings": { + "executionOrder": "v1" + }, + "versionId": "08698a35-a4e4-4630-8b3c-3949e78f8620", + "tags": [] +} diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 3b7ee763a3244..0f5b286ec3e52 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -14,7 +14,7 @@ import config from '@/config'; import { ActiveExecutions } from '@/ActiveExecutions'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { Server } from '@/Server'; -import { EDITOR_UI_DIST_DIR, LICENSE_FEATURES } from '@/constants'; +import { EDITOR_UI_DIST_DIR, LICENSE_FEATURES, inBenchmark } from '@/constants'; import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { InternalHooks } from '@/InternalHooks'; import { License } from '@/License'; @@ -114,6 +114,8 @@ export class Start extends BaseCommand { await this.exitWithCrash('There was an error shutting down n8n.', error); } + if (inBenchmark) return; + await this.exitSuccessFully(); } @@ -287,7 +289,9 @@ export class Start extends BaseCommand { await this.activeWorkflowRunner.init(); const editorUrl = Container.get(UrlService).baseUrl; - this.log(`\nEditor is now accessible via:\n${editorUrl}`); + this.logger.info(`\nEditor is now accessible via:\n${editorUrl}`); + + if (inBenchmark) return; // Allow to open n8n editor by pressing "o" if (Boolean(process.stdout.isTTY) && process.stdin.setRawMode) { diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 15e746f2d9837..032a675d3001a 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -1452,4 +1452,37 @@ export const schema = { env: 'N8N_PROXY_HOPS', doc: 'Number of reverse-proxies n8n is running behind', }, + + benchmark: { + time: { + doc: 'Length of time (ms) during which to repeatedly run a benchmarking task', + format: Number, + default: 500, + env: 'N8N_BENCHMARK_TIME', + }, + iterations: { + doc: 'Number of times to run a benchmarking task, even if `N8N_BENCHMARK_TIME` is exceeded', + format: Number, + default: 10, + env: 'N8N_BENCHMARK_ITERATIONS', + }, + stopOnError: { + doc: 'Whether to stop benchmarking if an error occurs in a task', + format: Boolean, + default: true, + env: 'N8N_BENCHMARK_STOP_ON_ERROR', + }, + warmupTime: { + doc: 'Length of time (ms) during which to repeatedly run each benchmarking task for warmup', + format: Number, + default: 100, + env: 'N8N_BENCHMARK_WARMUP_TIME', + }, + warmupIterations: { + doc: 'Number of times to run each benchmarking task for warmup, even if `N8N_BENCHMARK_WARMUP_TIME` is exceeded', + format: Number, + default: 5, + env: 'N8N_BENCHMARK_WARMUP_ITERATIONS', + }, + }, }; diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 2253ae832292c..dbc03d52e5a17 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -7,6 +7,7 @@ const { NODE_ENV, E2E_TESTS } = process.env; export const inProduction = NODE_ENV === 'production'; export const inDevelopment = !NODE_ENV || NODE_ENV === 'development'; export const inTest = NODE_ENV === 'test'; +export const inBenchmark = NODE_ENV === 'benchmark'; export const inE2ETests = E2E_TESTS === 'true'; export const CUSTOM_API_CALL_NAME = 'Custom API Call'; diff --git a/packages/cli/src/workflows/workflow.request.ts b/packages/cli/src/workflows/workflow.request.ts index 77d653a2a052c..44f837c9b3bcc 100644 --- a/packages/cli/src/workflows/workflow.request.ts +++ b/packages/cli/src/workflows/workflow.request.ts @@ -10,6 +10,8 @@ import type { } from 'n8n-workflow'; export declare namespace WorkflowRequest { + export type CreatePayload = CreateUpdatePayload; + type CreateUpdatePayload = Partial<{ id: string; // delete if sent name: string; diff --git a/packages/cli/tsconfig.benchmark.json b/packages/cli/tsconfig.benchmark.json new file mode 100644 index 0000000000000..1673d60bfddf9 --- /dev/null +++ b/packages/cli/tsconfig.benchmark.json @@ -0,0 +1,10 @@ +{ + "extends": ["./tsconfig.json", "../../tsconfig.build.json"], + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "dist/benchmark.tsbuildinfo" + }, + "include": ["src/**/*.ts", "src/benchmark/**/*.ts", "src/benchmark/**/*.json"], + "exclude": ["test/**"] +} diff --git a/packages/cli/tsconfig.build.json b/packages/cli/tsconfig.build.json index 1e8a2ff7fa476..a48bf1fc7f018 100644 --- a/packages/cli/tsconfig.build.json +++ b/packages/cli/tsconfig.build.json @@ -6,5 +6,5 @@ "tsBuildInfoFile": "dist/build.tsbuildinfo" }, "include": ["src/**/*.ts"], - "exclude": ["test/**"] + "exclude": ["test/**", "src/benchmarks/**"] } diff --git a/packages/core/src/InstanceSettings.ts b/packages/core/src/InstanceSettings.ts index e8ab9aa553223..7a3494a76e0e0 100644 --- a/packages/core/src/InstanceSettings.ts +++ b/packages/core/src/InstanceSettings.ts @@ -2,7 +2,7 @@ import path from 'path'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { createHash, randomBytes } from 'crypto'; import { Service } from 'typedi'; -import { ApplicationError, jsonParse } from 'n8n-workflow'; +import { ApplicationError, jsonParse, LoggerProxy as Logger } from 'n8n-workflow'; interface ReadOnlySettings { encryptionKey: string; @@ -71,7 +71,7 @@ export class InstanceSettings { errorMessage: `Error parsing n8n-config file "${this.settingsFile}". It does not seem to be valid JSON.`, }); - if (!inTest) console.info(`User settings loaded from: ${this.settingsFile}`); + if (!inTest) Logger.info(`User settings loaded from: ${this.settingsFile}`); const { encryptionKey, tunnelSubdomain } = settings; diff --git a/patches/@codspeed__tinybench-plugin@3.1.0.patch b/patches/@codspeed__tinybench-plugin@3.1.0.patch new file mode 100644 index 0000000000000..ce4f38306e2a3 --- /dev/null +++ b/patches/@codspeed__tinybench-plugin@3.1.0.patch @@ -0,0 +1,18 @@ +diff --git a/dist/index.cjs.js b/dist/index.cjs.js +index a75964e40eaeff15df33fabd3c0bef20e9b1e5dd..42f2bec4950f13675ddccba675b2c207b2789d42 100644 +--- a/dist/index.cjs.js ++++ b/dist/index.cjs.js +@@ -80,7 +80,12 @@ function withCodSpeed(bench) { + const rawAdd = bench.add; + bench.add = (name, fn, opts) => { + const callingFile = getCallingFile(); +- const uri = `${callingFile}::${name}`; ++ let uri = `${callingFile}::${name}`; ++ if (name.includes('::')) { ++ // ignore calling file misdirecting to benchmark/lib/api.js ++ const [suiteName, taskName] = name.split('::'); ++ uri = `${suiteName} - ${taskName}` ++ } + const options = Object.assign({}, opts ?? {}, { uri }); + return rawAdd.bind(bench)(name, fn, options); + }; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c1729788939c1..c00cb03d93f84 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -19,6 +19,9 @@ overrides: typescript: ^5.4.2 patchedDependencies: + '@codspeed/tinybench-plugin@3.1.0': + hash: dtpxhamve6ldymzame6xpj4atq + path: patches/@codspeed__tinybench-plugin@3.1.0.patch '@sentry/cli@2.17.0': hash: nchnoezkq6p37qaiku3vrpwraq path: patches/@sentry__cli@2.17.0.patch @@ -772,6 +775,9 @@ importers: specifier: 3.22.4 version: 3.22.4(zod@3.22.4) devDependencies: + '@codspeed/tinybench-plugin': + specifier: ^3.1.0 + version: 3.1.0(patch_hash=dtpxhamve6ldymzame6xpj4atq)(tinybench@2.6.0) '@redocly/cli': specifier: ^1.6.0 version: 1.6.0 @@ -853,6 +859,9 @@ importers: ioredis-mock: specifier: ^8.8.1 version: 8.8.1(@types/ioredis-mock@8.2.2)(ioredis@5.3.2) + tinybench: + specifier: ^2.6.0 + version: 2.6.0 ts-essentials: specifier: ^7.0.3 version: 7.0.3(typescript@5.4.2) @@ -4724,6 +4733,30 @@ packages: w3c-keyname: 2.2.6 dev: false + /@codspeed/core@3.1.0: + resolution: {integrity: sha512-oYd7X46QhnRkgRbZkqAoX9i3Fwm17FpunK4Ee5RdrvRYR0Xr93ewH8/O5g6uyTPDOOqDEv1v2KRYtWhVgN+2VQ==} + dependencies: + axios: 1.6.7 + find-up: 6.3.0 + form-data: 4.0.0 + node-gyp-build: 4.7.0 + transitivePeerDependencies: + - debug + dev: true + + /@codspeed/tinybench-plugin@3.1.0(patch_hash=dtpxhamve6ldymzame6xpj4atq)(tinybench@2.6.0): + resolution: {integrity: sha512-yl0WzzUGIXkZzWaw7+2U+xGkuIal1Rs9hS09DtlDZGGAcGRoMMU5d2vyCS8nBrna4hrPQZ5Sx/hIKerO+lqWaw==} + peerDependencies: + tinybench: ^2.3.0 + dependencies: + '@codspeed/core': 3.1.0 + stack-trace: 1.0.0-pre2 + tinybench: 2.6.0 + transitivePeerDependencies: + - debug + dev: true + patched: true + /@colors/colors@1.5.0: resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} @@ -11725,7 +11758,6 @@ packages: proxy-from-env: 1.1.0 transitivePeerDependencies: - debug - dev: false /axios@1.6.7(debug@3.2.7): resolution: {integrity: sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==} @@ -15409,6 +15441,14 @@ packages: path-exists: 4.0.0 dev: true + /find-up@6.3.0: + resolution: {integrity: sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + locate-path: 7.2.0 + path-exists: 5.0.0 + dev: true + /findup-sync@2.0.0: resolution: {integrity: sha512-vs+3unmJT45eczmcAZ6zMJtxN3l/QXeccaXQx5cu/MeJMhewVfoWZqibRkOxPnmoR59+Zy5hjabfQc6JLSah4g==} engines: {node: '>= 0.10'} @@ -15495,7 +15535,6 @@ packages: optional: true dependencies: debug: 3.2.7(supports-color@5.5.0) - dev: false /follow-redirects@1.15.6(debug@4.3.4): resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} @@ -19064,6 +19103,13 @@ packages: p-locate: 5.0.0 dev: true + /locate-path@7.2.0: + resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + p-locate: 6.0.0 + dev: true + /lodash-es@4.17.21: resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} dev: false @@ -20251,7 +20297,6 @@ packages: /node-gyp-build@4.7.0: resolution: {integrity: sha512-PbZERfeFdrHQOOXiAKOY0VPbykZy90ndPKk0d+CFDegTKmWp1VgOTz2xACVbr1BjCWxrQp68CXtvNsveFhqDJg==} hasBin: true - dev: false /node-gyp@8.4.1: resolution: {integrity: sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==} @@ -20860,6 +20905,13 @@ packages: dependencies: yocto-queue: 0.1.0 + /p-limit@4.0.0: + resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + yocto-queue: 1.0.0 + dev: true + /p-limit@5.0.0: resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==} engines: {node: '>=18'} @@ -20887,6 +20939,13 @@ packages: p-limit: 3.1.0 dev: true + /p-locate@6.0.0: + resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + p-limit: 4.0.0 + dev: true + /p-map@4.0.0: resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} engines: {node: '>=10'} @@ -21060,6 +21119,11 @@ packages: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} + /path-exists@5.0.0: + resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dev: true + /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} engines: {node: '>=0.10.0'} @@ -23787,6 +23851,11 @@ packages: /stack-trace@0.0.10: resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} + /stack-trace@1.0.0-pre2: + resolution: {integrity: sha512-2ztBJRek8IVofG9DBJqdy2N5kulaacX30Nz7xmkYF6ale9WBVmIy6mFBchvGX7Vx/MyjBhx+Rcxqrj+dbOnQ6A==} + engines: {node: '>=16'} + dev: true + /stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} @@ -24479,8 +24548,8 @@ packages: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} dev: true - /tinybench@2.5.1: - resolution: {integrity: sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg==} + /tinybench@2.6.0: + resolution: {integrity: sha512-N8hW3PG/3aOoZAN5V/NSAEDz0ZixDSSt5b/a05iqtpgfLWMSVuCo7w0k2vVvEjdrIoeGqZzweX2WlyioNIHchA==} dev: true /tinypool@0.8.2: @@ -25722,7 +25791,7 @@ packages: picocolors: 1.0.0 std-env: 3.6.0 strip-literal: 2.0.0 - tinybench: 2.5.1 + tinybench: 2.6.0 tinypool: 0.8.2 vite: 5.1.6(sass@1.64.1) vite-node: 1.3.1