Skip to content

Commit

Permalink
Merge branch 'main' into optimistic-byte-efficiency
Browse files Browse the repository at this point in the history
  • Loading branch information
adrianaixba committed Dec 5, 2023
2 parents 3b3f4fb + 4c5e1c2 commit 43011b0
Show file tree
Hide file tree
Showing 37 changed files with 481 additions and 131 deletions.
6 changes: 2 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ jobs:
fetch-depth: 100
- run: bash core/scripts/github-actions-commit-range.sh
env:
CHROME_PATH: ${{ github.workspace }}/lighthouse/.tmp/chrome-tot/chrome
GITHUB_CONTEXT_PR_BASE_SHA: ${{ github.event.pull_request.base.sha }}
GITHUB_CONTEXT_BASE_SHA: ${{ github.event.before }}

Expand All @@ -38,11 +39,8 @@ jobs:
- run: yarn build-all

# Run pptr tests using ToT Chrome instead of stable default.
- name: Define ToT chrome path
run: echo "CHROME_PATH=/home/runner/chrome-linux-tot/chrome" >> $GITHUB_ENV
- name: Install Chrome ToT
working-directory: /home/runner
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh && mv chrome-linux chrome-linux-tot
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh

# Run tests that require headfull Chrome.
- run: sudo apt-get install xvfb
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/devtools.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ jobs:
fail-fast: false
runs-on: ubuntu-latest
env:
CHROME_PATH: ${{ github.workspace }}/lighthouse/.tmp/chrome-tot/chrome
FORCE_COLOR: true
name: DevTools smoke ${{ matrix.smoke-test-shard }}/${{ strategy.job-total }}

Expand Down Expand Up @@ -159,11 +160,8 @@ jobs:
- run: yarn build-report
working-directory: ${{ github.workspace }}/lighthouse

- name: Define ToT chrome path
run: echo "CHROME_PATH=/home/runner/chrome-linux-tot/chrome" >> $GITHUB_ENV
- name: Install Chrome ToT
working-directory: /home/runner
run: bash ${{ github.workspace }}/lighthouse/core/scripts/download-chrome.sh && mv chrome-linux chrome-linux-tot
run: bash ${{ github.workspace }}/lighthouse/core/scripts/download-chrome.sh

- run: mkdir latest-run
working-directory: ${{ github.workspace }}/lighthouse
Expand Down
12 changes: 4 additions & 8 deletions .github/workflows/smoke.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,12 @@ jobs:

- name: Define ToT chrome path
if: matrix.chrome-channel == 'ToT'
run: echo "CHROME_PATH=/home/runner/chrome-linux-tot/chrome" >> $GITHUB_ENV
run: echo "CHROME_PATH=${{ github.workspace }}/lighthouse/.tmp/chrome-tot/chrome" >> $GITHUB_ENV

# Chrome Stable is already installed by default.
- name: Install Chrome ToT
if: matrix.chrome-channel == 'ToT'
working-directory: /home/runner
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh && mv chrome-linux chrome-linux-tot
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh

- run: yarn install --frozen-lockfile --network-timeout 1000000
- run: yarn build-report
Expand Down Expand Up @@ -127,6 +126,7 @@ jobs:
fail-fast: false
runs-on: ubuntu-latest
env:
CHROME_PATH: ${{ github.workspace }}/lighthouse/.tmp/chrome-tot/chrome
# The total number of shards. Set dynamically when length of *single* matrix variable is
# computable. See https://github.community/t/get-length-of-strategy-matrix-or-get-all-matrix-options/18342
SHARD_TOTAL: 3
Expand All @@ -146,12 +146,8 @@ jobs:
- run: yarn build-report
- run: yarn build-devtools

- name: Define ToT chrome path
run: echo "CHROME_PATH=/home/runner/chrome-linux-tot/chrome" >> $GITHUB_ENV

- name: Install Chrome ToT
working-directory: /home/runner
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh && mv chrome-linux chrome-linux-tot
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh

- name: yarn test-bundle
run: yarn test-bundle --shard=${{ matrix.smoke-test-shard }}/$SHARD_TOTAL --retries=2
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/unit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ jobs:
runs-on: ubuntu-latest
name: node ${{ matrix.node }}
env:
CHROME_PATH: ${{ github.workspace }}/lighthouse/.tmp/chrome-tot/chrome
LATEST_NODE: '18'
FORCE_COLOR: true

Expand Down Expand Up @@ -53,11 +54,8 @@ jobs:
- run: yarn reset-link

# Run pptr tests using ToT Chrome instead of stable default.
- name: Define ToT chrome path
run: echo "CHROME_PATH=/home/runner/chrome-linux-tot/chrome" >> $GITHUB_ENV
- name: Install Chrome ToT
working-directory: /home/runner
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh && mv chrome-linux chrome-linux-tot
run: bash $GITHUB_WORKSPACE/core/scripts/download-chrome.sh

- run: yarn test-proto # Run before unit-core because the roundtrip json is needed for proto tests.

Expand Down
60 changes: 60 additions & 0 deletions core/audits/byte-efficiency/byte-efficiency-audit.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {PageDependencyGraph} from '../../computed/page-dependency-graph.js';
import {LanternLargestContentfulPaint} from '../../computed/metrics/lantern-largest-contentful-paint.js';
import {LanternFirstContentfulPaint} from '../../computed/metrics/lantern-first-contentful-paint.js';
import {LCPImageRecord} from '../../computed/lcp-image-record.js';
import {NetworkRequest} from '../../lib/network-request.js';

const str_ = i18n.createIcuMessageFn(import.meta.url, {});

Expand Down Expand Up @@ -100,6 +101,65 @@ class ByteEfficiencyAudit extends Audit {
}
}

/**
* Estimates the number of bytes the content of this network record would have consumed on the network based on the
* uncompressed size (totalBytes). Uses the actual transfer size from the network record if applicable,
* minus the size of the response headers.
*
* This differs from `estimateTransferSize` only in that is subtracts the response headers from the estimate.
*
* @param {LH.Artifacts.NetworkRequest|undefined} networkRecord
* @param {number} totalBytes Uncompressed size of the resource
* @param {LH.Crdp.Network.ResourceType=} resourceType
* @return {number}
*/
static estimateCompressedContentSize(networkRecord, totalBytes, resourceType) {
if (!networkRecord) {
// We don't know how many bytes this asset used on the network, but we can guess it was
// roughly the size of the content gzipped.
// See https://developers.google.com/web/fundamentals/performance/optimizing-content-efficiency/optimize-encoding-and-transfer for specific CSS/Script examples
// See https://discuss.httparchive.org/t/file-size-and-compression-savings/145 for fallback multipliers
switch (resourceType) {
case 'Stylesheet':
// Stylesheets tend to compress extremely well.
return Math.round(totalBytes * 0.2);
case 'Script':
case 'Document':
// Scripts and HTML compress fairly well too.
return Math.round(totalBytes * 0.33);
default:
// Otherwise we'll just fallback to the average savings in HTTPArchive
return Math.round(totalBytes * 0.5);
}
}

// Get the size of the response body on the network.
let contentTransferSize = networkRecord.transferSize || 0;
if (!NetworkRequest.isContentEncoded(networkRecord)) {
// This is not encoded, so we can use resourceSize directly.
// This would be equivalent to transfer size minus headers transfer size, but transfer size
// may also include bytes for SSL connection etc.
contentTransferSize = networkRecord.resourceSize;
} else if (networkRecord.responseHeadersTransferSize) {
// Subtract the size of the encoded headers.
contentTransferSize =
Math.max(0, contentTransferSize - networkRecord.responseHeadersTransferSize);
}

if (networkRecord.resourceType === resourceType) {
// This was a regular standalone asset, just use the transfer size.
return contentTransferSize;
} else {
// This was an asset that was inlined in a different resource type (e.g. HTML document).
// Use the compression ratio of the resource to estimate the total transferred bytes.
const resourceSize = networkRecord.resourceSize || 0;
// Get the compression ratio, if it's an invalid number, assume no compression.
const compressionRatio = Number.isFinite(resourceSize) && resourceSize > 0 ?
(contentTransferSize / resourceSize) : 1;
return Math.round(totalBytes * compressionRatio);
}
}

/**
* @param {LH.Artifacts} artifacts
* @param {LH.Audit.Context} context
Expand Down
39 changes: 21 additions & 18 deletions core/audits/byte-efficiency/legacy-javascript.js
Original file line number Diff line number Diff line change
Expand Up @@ -391,34 +391,37 @@ class LegacyJavascript extends ByteEfficiencyAudit {
}

/**
* Utility function to estimate transfer size and cache calculation.
* Utility function to estimate the ratio of the compression on the resource.
* This excludes the size of the response headers.
* Also caches the calculation.
*
* Note: duplicated-javascript does this exact thing. In the future, consider
* making a generic estimator on ByteEfficienyAudit.
* @param {Map<string, number>} transferRatioByUrl
* making a generic estimator on ByteEfficiencyAudit.
* @param {Map<string, number>} compressionRatioByUrl
* @param {string} url
* @param {LH.Artifacts} artifacts
* @param {Array<LH.Artifacts.NetworkRequest>} networkRecords
*/
static async estimateTransferRatioForScript(transferRatioByUrl, url, artifacts, networkRecords) {
let transferRatio = transferRatioByUrl.get(url);
if (transferRatio !== undefined) return transferRatio;
static async estimateCompressionRatioForContent(compressionRatioByUrl, url,
artifacts, networkRecords) {
let compressionRatio = compressionRatioByUrl.get(url);
if (compressionRatio !== undefined) return compressionRatio;

const script = artifacts.Scripts.find(script => script.url === url);

if (!script || script.content === null) {
if (!script) {
// Can't find content, so just use 1.
transferRatio = 1;
compressionRatio = 1;
} else {
const networkRecord = getRequestForScript(networkRecords, script);
const contentLength = script.length || 0;
const transferSize =
ByteEfficiencyAudit.estimateTransferSize(networkRecord, contentLength, 'Script');
transferRatio = transferSize / contentLength;
const contentLength = networkRecord?.resourceSize || script.length || 0;
const compressedSize =
ByteEfficiencyAudit.estimateCompressedContentSize(networkRecord, contentLength, 'Script');
compressionRatio = compressedSize / contentLength;
}

transferRatioByUrl.set(url, transferRatio);
return transferRatio;
compressionRatioByUrl.set(url, compressionRatio);
return compressionRatio;
}

/**
Expand All @@ -443,14 +446,14 @@ class LegacyJavascript extends ByteEfficiencyAudit {
]);

/** @type {Map<string, number>} */
const transferRatioByUrl = new Map();
const compressionRatioByUrl = new Map();

const scriptToMatchResults =
this.detectAcrossScripts(matcher, artifacts.Scripts, networkRecords, bundles);
for (const [script, matches] of scriptToMatchResults.entries()) {
const transferRatio = await this.estimateTransferRatioForScript(
transferRatioByUrl, script.url, artifacts, networkRecords);
const wastedBytes = Math.round(this.estimateWastedBytes(matches) * transferRatio);
const compressionRatio = await this.estimateCompressionRatioForContent(
compressionRatioByUrl, script.url, artifacts, networkRecords);
const wastedBytes = Math.round(this.estimateWastedBytes(matches) * compressionRatio);
/** @type {typeof items[number]} */
const item = {
url: script.url,
Expand Down
13 changes: 8 additions & 5 deletions core/audits/dobetterweb/uses-http2.js
Original file line number Diff line number Diff line change
Expand Up @@ -152,16 +152,19 @@ class UsesHTTP2Audit extends Audit {
* @param {LH.Artifacts.EntityClassification} classifiedEntities
* @return {boolean}
*/
static isStaticAsset(networkRequest, classifiedEntities) {
static isMultiplexableStaticAsset(networkRequest, classifiedEntities) {
if (!STATIC_RESOURCE_TYPES.has(networkRequest.resourceType)) return false;

// Resources from third-parties that are less than 100 bytes are usually tracking pixels, not actual resources.
// They can masquerade as static types though (gifs, documents, etc)
if (networkRequest.resourceSize < 100) {
// This logic needs to be revisited.
// See https://github.com/GoogleChrome/lighthouse/issues/14661
const entity = classifiedEntities.entityByUrl.get(networkRequest.url);
if (entity && !entity.isUnrecognized) return false;
if (entity) {
// Third-party assets are multiplexable in their first-party context.
if (classifiedEntities.firstParty?.name === entity.name) return true;
// Skip recognizable third-parties' requests.
if (!entity.isUnrecognized) return false;
}
}

return true;
Expand Down Expand Up @@ -199,7 +202,7 @@ class UsesHTTP2Audit extends Audit {
/** @type {Map<string, Array<LH.Artifacts.NetworkRequest>>} */
const groupedByOrigin = new Map();
for (const record of networkRecords) {
if (!UsesHTTP2Audit.isStaticAsset(record, classifiedEntities)) continue;
if (!UsesHTTP2Audit.isMultiplexableStaticAsset(record, classifiedEntities)) continue;
if (UrlUtils.isLikeLocalhost(record.parsedURL.host)) continue;
const existing = groupedByOrigin.get(record.parsedURL.securityOrigin) || [];
existing.push(record);
Expand Down
2 changes: 1 addition & 1 deletion core/computed/entity-classification.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class EntityClassification {
// Make up an entity only for valid http/https URLs.
if (!parsedUrl.protocol.startsWith('http')) return;

const rootDomain = Util.getRootDomain(url);
const rootDomain = UrlUtils.getRootDomain(url);
if (!rootDomain) return;
if (entityCache.has(rootDomain)) return entityCache.get(rootDomain);

Expand Down
4 changes: 2 additions & 2 deletions core/computed/resource-summary.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {makeComputedArtifact} from './computed-artifact.js';
import {NetworkRecords} from './network-records.js';
import {NetworkRequest} from '../lib/network-request.js';
import {Budget} from '../config/budget.js';
import {Util} from '../../shared/util.js';
import UrlUtils from '../lib/url-utils.js';

/** @typedef {{count: number, resourceSize: number, transferSize: number}} ResourceEntry */

Expand Down Expand Up @@ -59,7 +59,7 @@ class ResourceSummary {
firstPartyHosts = budget.options.firstPartyHostnames;
} else {
firstPartyHosts = classifiedEntities.firstParty?.domains.map(domain => `*.${domain}`) ||
[`*.${Util.getRootDomain(URLArtifact.finalDisplayedUrl)}`];
[`*.${UrlUtils.getRootDomain(URLArtifact.finalDisplayedUrl)}`];
}

networkRecords.filter(record => {
Expand Down
4 changes: 2 additions & 2 deletions core/config/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ const screenEmulationMetrics = {
};


const MOTOG4_USERAGENT = 'Mozilla/5.0 (Linux; Android 11; moto g power (2022)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Mobile Safari/537.36'; // eslint-disable-line max-len
const DESKTOP_USERAGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36'; // eslint-disable-line max-len
const MOTOG4_USERAGENT = 'Mozilla/5.0 (Linux; Android 11; moto g power (2022)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Mobile Safari/537.36'; // eslint-disable-line max-len
const DESKTOP_USERAGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36'; // eslint-disable-line max-len

const userAgents = {
mobile: MOTOG4_USERAGENT,
Expand Down
2 changes: 1 addition & 1 deletion core/gather/navigation-runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ async function _navigation(navigationContext) {

// Every required url is initialized to an empty string in `getBaseArtifacts`.
// If we haven't set all the required urls yet, set them here.
if (!Object.values(phaseState.baseArtifacts).every(Boolean)) {
if (!Object.values(phaseState.baseArtifacts.URL).every(Boolean)) {
phaseState.baseArtifacts.URL = {
requestedUrl: navigateResult.requestedUrl,
mainDocumentUrl: navigateResult.mainDocumentUrl,
Expand Down
13 changes: 13 additions & 0 deletions core/lib/network-recorder.js
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,18 @@ class NetworkRecorder extends RequestEventEmitter {
request.onResponseReceived(data);
}

/**
* @param {{params: LH.Crdp.Network.ResponseReceivedExtraInfoEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onResponseReceivedExtraInfo(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} response received extra info`);
request.onResponseReceivedExtraInfo(data);
}

/**
* @param {{params: LH.Crdp.Network.DataReceivedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
Expand Down Expand Up @@ -196,6 +208,7 @@ class NetworkRecorder extends RequestEventEmitter {
case 'Network.requestWillBeSent': return this.onRequestWillBeSent(event);
case 'Network.requestServedFromCache': return this.onRequestServedFromCache(event);
case 'Network.responseReceived': return this.onResponseReceived(event);
case 'Network.responseReceivedExtraInfo': return this.onResponseReceivedExtraInfo(event);
case 'Network.dataReceived': return this.onDataReceived(event);
case 'Network.loadingFinished': return this.onLoadingFinished(event);
case 'Network.loadingFailed': return this.onLoadingFailed(event);
Expand Down
Loading

0 comments on commit 43011b0

Please sign in to comment.