diff --git a/observability-test/benchmark.ts b/observability-benchmark/benchmark.ts similarity index 99% rename from observability-test/benchmark.ts rename to observability-benchmark/benchmark.ts index e8f49d42d..2327c3812 100644 --- a/observability-test/benchmark.ts +++ b/observability-benchmark/benchmark.ts @@ -26,7 +26,7 @@ const lessComparator = (a, b) => { * a map of functionNames to the percentiles of RAM usage and time spent. */ export async function runBenchmarks(runners: Function[], done: Function) { - const nRuns = 10000; + const nRuns = 20000; const nWarmups = Math.round(nRuns / 8); const benchmarkValues = {_totalRuns: nRuns, _warmRuns: nWarmups}; diff --git a/observability-test/comparisons.ts b/observability-benchmark/comparisons.ts similarity index 68% rename from observability-test/comparisons.ts rename to observability-benchmark/comparisons.ts index 45ec32c90..80f27b95f 100644 --- a/observability-test/comparisons.ts +++ b/observability-benchmark/comparisons.ts @@ -34,8 +34,15 @@ const { } = require('@opentelemetry/sdk-trace-node'); const {SimpleSpanProcessor} = require('@opentelemetry/sdk-trace-base'); import {humanizeBytes, humanizeTime, runBenchmarks} from './benchmark'; +const {diag, DiagConsoleLogger, DiagLogLevel} = require('@opentelemetry/api'); +// diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); -const {ObservabilityOptions} = require('../src/instrument'); +const { + getTracer, + startTrace, + traceConfig, + ObservabilityOptions, +} = require('../src/instrument'); const selectSql = 'SELECT 1'; const updateSql = 'UPDATE FOO SET BAR=1 WHERE BAZ=2'; @@ -124,6 +131,16 @@ async function setup( }); } +interface percentiles { + p50: number; + p50_s: string; +} + +interface description { + ram: percentiles; + timeSpent: percentiles; +} + describe('Benchmarking Database', () => { if (!process.env.SPANNER_RUN_BENCHMARKS) { console.log( @@ -207,23 +224,6 @@ describe('Benchmarking Database', () => { }); }, - /* - async function databaseGetTransactionAsync() { - const tx = await database.getTransction(); - - try { - await tx!.begin(); - return await tx!.runUpdate(updateSql); - } catch (e) { - console.log(e); - return null; - } finally { - console.log('tx.end'); - tx!.end(); - console.log('exiting'); - } - }, - */ async function databaseRunTransactionAsyncTxRunUpdate() { const withTx = async tx => { await tx!.begin(); @@ -274,16 +274,6 @@ describe('Benchmarking Database', () => { }); } - interface percentiles { - p50: number; - p50_s: string; - } - - interface description { - ram: percentiles; - timeSpent: percentiles; - } - it('Database runs compared', async () => { const traced = await setItUp(true); const untraced = await setItUp(false); @@ -327,3 +317,151 @@ describe('Benchmarking Database', () => { function percentDiff(orig, fresh) { return ((Number(fresh) - Number(orig)) * 100.0) / Number(orig); } + +describe('Benchmark getTracer', () => { + it('No tracerProvider/global tracerProvider', async () => { + console.log('tracerProvider'); + const results = await benchmarkStartTrace(); + + console.log( + `Total Runs: ${results['_totalRuns']}\nWarm up runs: ${results['_warmRuns']}` + ); + + for (const method in results) { + const values = results[method]; + if (typeof values !== 'object') { + continue; + } + const desc = values as description; + const ram = desc.ram; + const timeSpent = desc!.timeSpent; + console.log(`${method}`); + console.log(`\tRAM (${ram.p50_s})`); + console.log(`\tTimeSpent (${timeSpent.p50_s})`); + } + }); +}); + +function benchmarkGetTracer(): Promise> { + const customTracerProvider = new NodeTracerProvider(); + let trapDoorCalled = false; + + const runners: Function[] = [ + function getTracerNullTracerProviderUnsetGlobalTracerProvider() { + return getTracer(null); + }, + + function getTracerDefinedTracerProvider() { + return getTracer(customTracerProvider); + }, + + function getTracerRegisteredGlobally() { + if (!trapDoorCalled) { + customTracerProvider.register(); + trapDoorCalled = true; + } + return getTracer(null); + }, + ]; + + return new Promise(resolve => { + runBenchmarks(runners, results => { + resolve(results); + }); + }); +} + +function benchmarkStartSpan(): Promise> { + const customTracerProvider = new NodeTracerProvider(); + let trapDoorCalled = false; + + const runners: Function[] = [ + function withNullTracerProviderUnsetGlobalTracerProvider() { + return new Promise(resolve => { + getTracer(null).startActiveSpan('aSpan', {}, span => { + resolve(span); + }); + }); + }, + + function withTracerDefinedTracerProvider() { + return new Promise(resolve => { + getTracer(customTracerProvider).startActiveSpan('aSpan', {}, span => { + resolve(span); + }); + }); + }, + + function getTracerRegisteredGlobally() { + if (!trapDoorCalled) { + customTracerProvider.register(); + trapDoorCalled = true; + } + return new Promise(resolve => { + getTracer(null).startActiveSpan('aSpan', {}, span => { + resolve(span); + }); + }); + }, + ]; + + return new Promise(resolve => { + runBenchmarks(runners, results => { + resolve(results); + }); + }); +} + +function benchmarkStartTrace(): Promise> { + const customTracerProvider = new NodeTracerProvider(); + let trapDoorCalled = false; + + const runners: Function[] = [ + async function withNullTracerProviderUnsetGlobalTracerProvider() { + const promise = new Promise((resolve, reject) => { + const opts: typeof traceConfig = { + opts: {tracerProvider: null}, + }; + startTrace('aspan', null, span => { + span.end(); + resolve(span); + }); + }); + return promise; + }, + + function withTracerDefinedTracerProvider() { + return new Promise(resolve => { + const opts: typeof traceConfig = { + opts: {tracerProvider: customTracerProvider}, + }; + startTrace('aspan', opts, span => { + span.end(); + resolve(span); + }); + }); + }, + + function withTracerRegisteredGlobally() { + if (!trapDoorCalled) { + customTracerProvider.register(); + trapDoorCalled = true; + } + return new Promise(resolve => { + const opts: typeof traceConfig = { + opts: {tracerProvider: null}, + }; + startTrace('aspan', opts, span => { + span.end(); + resolve(span); + }); + }); + }, + ]; + + return new Promise(resolve => { + runBenchmarks(runners, results => { + resolve(results); + }); + }); +} diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index 78cab0e3b..0c0c108ac 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -45,7 +45,6 @@ const { AsyncHooksContextManager, } = require('@opentelemetry/context-async-hooks'); import {promisify} from '@google-cloud/promisify'; -import {runBenchmarks} from './benchmark'; const {ObservabilityOptions} = require('../src/instrument'); import {SessionPool} from '../src/session-pool'; diff --git a/package.json b/package.json index e60bba0de..d93a73173 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "samples-test-with-archived": "cd samples/ && npm link ../ && npm test-with-archived && cd ../", "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", "system-test": "mocha build/system-test --timeout 1600000", + "observability-benchmark": "mocha build/observability-benchmark --timeout 1600000", "observability-test": "mocha build/observability-test --timeout 1600000", "cleanup": "mocha scripts/cleanup.js --timeout 30000", "test": "mocha build/test build/test/common build/observability-test", @@ -43,6 +44,7 @@ "prepare": "npm run compile-protos && npm run compile", "pretest": "npm run compile", "presystem-test": "npm run compile", + "preobservability-benchmark": "npm run compile", "preobservability-test": "npm run compile", "proto": "compileProtos src", "docs-test": "linkinator docs", diff --git a/tsconfig.json b/tsconfig.json index 9b165b9e7..1598feaeb 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -20,6 +20,7 @@ "test/**/*.ts", "system-test/*.ts", "benchmark/*.ts", + "observability-benchmark/*.ts", "observability-test/*.ts" ] }