Skip to content

Commit

Permalink
add frame duration
Browse files Browse the repository at this point in the history
  • Loading branch information
ricokahler committed Sep 26, 2024
1 parent 524bed5 commit eb2cb64
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 20 deletions.
46 changes: 26 additions & 20 deletions perf/efps/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ import {exec} from './helpers/exec'
import {runTest} from './runTest'
import article from './tests/article/article'
import recipe from './tests/recipe/recipe'
// import singleString from './tests/singleString/singleString'
import synthetic from './tests/synthetic/synthetic'
import {type EfpsResult} from './types'

Expand Down Expand Up @@ -81,6 +80,16 @@ await exec({
const baseSanityPkgPath = path.join(tmpDir, 'node_modules', 'sanity')
const localSanityPkgPath = path.dirname(fileURLToPath(import.meta.resolve('sanity/package.json')))

// const compareTag = 'v3.57.0'

// await exec({
// command: `pnpm install sanity@${compareTag}`,
// cwd: tmpDir,
// spinner,
// text: [`Downloading sanity@${compareTag} package…`, `Downloaded sanity@${compareTag}`],
// })
// const compareSanityPkgPath = path.join(tmpDir, 'node_modules', 'sanity')

await exec({
text: ['Ensuring playwright is installed…', 'Playwright is installed'],
command: 'npx playwright install',
Expand Down Expand Up @@ -125,7 +134,7 @@ for (let i = 0; i < tests.length; i++) {
const test = tests[i]

const localResults = await runTest({
prefix: `Running test '${test.name}' [${i + 1}/${tests.length}] with local 'sanity'…`,
prefix: `Running test '${test.name}' [${i + 1}/${tests.length}] with local…`,
test,
resultsDir,
client,
Expand All @@ -137,7 +146,7 @@ for (let i = 0; i < tests.length; i++) {

// Run with latest 'sanity' package
const baseResults = await runTest({
prefix: `Running '${test.name}' [${i + 1}/${tests.length}] with 'sanity@latest'…`,
prefix: `Running '${test.name}' [${i + 1}/${tests.length}] with 'sanity@${tag}'…`,
test,
resultsDir,
client,
Expand Down Expand Up @@ -194,23 +203,9 @@ console.log(`
│ The percentage difference of the current branch when compared to \`sanity@${tag}\`.
${chalk.bold('Passed?')}
│ Tests are failed when any of the median eFPS results perform more than 10% worse.
│ Tests are failed when any of the median eFPS results perform more than ${deltaThreshold * 100}% worse.
`)

const markdownRows = testOutput
.flatMap((test) =>
test.results.map((result) => ({
...result,
label: result.label ? `${test.name} (${result.label})` : test.name,
})),
)
.map(
({label, p50, delta, passed}) =>
`| ${label} | ${formatFpsPlain(p50)} | ${formatPercentagePlain(delta)} | ${passed ? '✅' : '🔴'} |`,
)
.join('\n')

// Build the markdown content
const markdown = `
<details>
<summary><strong>⚡️ Editor Performance Report</strong><br/><br/>
Expand All @@ -226,7 +221,18 @@ const markdown = `
| Benchmark | eFPS | vs \`${tag}\` | Passed? |
|-----------| ---: | ------------: | :-----: |
${markdownRows}
${testOutput
.flatMap((test) =>
test.results.map((result) => ({
...result,
label: result.label ? `${test.name} (${result.label})` : test.name,
})),
)
.map(
({label, p50, delta, passed}) =>
`| ${label} | ${formatFpsPlain(p50)} | ${formatPercentagePlain(delta)} | ${passed ? '✅' : '🔴'} |`,
)
.join('\n')}
> **eFPS — Editor "Frames Per Second"**
> The number of renders, aka "frames", that are assumed to be possible within a second.
Expand All @@ -236,7 +242,7 @@ ${markdownRows}
> The percentage difference of the current branch when compared to \`sanity@${tag}\`.
>
> **Passed?**
> Tests are failed when any of the median eFPS results perform more than 10% worse.
> Tests are failed when any of the median eFPS results perform more than ${deltaThreshold * 100}% worse.
`

// Write markdown file to root of results
Expand Down
37 changes: 37 additions & 0 deletions perf/efps/runTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import sourcemaps from 'rollup-plugin-sourcemaps'
import handler from 'serve-handler'
import * as vite from 'vite'

import {calculatePercentile} from './helpers/calculatePercentile'
// TODO: add test duration to metrics
import {type EfpsResult, type EfpsTest, type EfpsTestRunnerContext} from './types'

const workspaceDir = path.dirname(fileURLToPath(import.meta.url))
Expand Down Expand Up @@ -123,9 +125,39 @@ export async function runTest({
// await cdp.send('Profiler.start')

log('Benchmarking…')
const durationsPromise = page.evaluate(async () => {
const durations: number[] = []
let last = performance.now()
let done = false

const handleFrame = () => {
const current = performance.now()
durations.push(current - last)
last = current

if (done) return
requestAnimationFrame(handleFrame)
}
requestAnimationFrame(handleFrame)

await new Promise((resolve) => {
window.document.addEventListener('__finish', resolve, {once: true})
})
done = true

return durations
})

const result = await test.run({...runnerContext, document})

await page.evaluate(() => {
window.document.dispatchEvent(new CustomEvent('__finish'))
})

log('Saving results…')
const durations = await durationsPromise
calculatePercentile(durations, 0.5)
console.log({frameDuration: calculatePercentile(durations, 0.5)})
const results = Array.isArray(result) ? result : [result]

// const {profile} = await cdp.send('Profiler.stop')
Expand All @@ -136,6 +168,11 @@ export async function runTest({
path.join(testResultsDir, 'results.json'),
JSON.stringify(results, null, 2),
)
await fs.promises.writeFile(
path.join(testResultsDir, 'durations.json'),
JSON.stringify(durations, null, 2),
)

// await fs.promises.writeFile(
// path.join(testResultsDir, 'raw.cpuprofile'),
// JSON.stringify(profile),
Expand Down

0 comments on commit eb2cb64

Please sign in to comment.