fix: prevent asset conflicts between React and Grid.js versions

Add coexistence checks to all enqueue methods to prevent loading
both React and Grid.js assets simultaneously.

Changes:
- ReactAdmin.php: Only enqueue React assets when ?react=1
- Init.php: Skip Grid.js when React active on admin pages
- Form.php, Coupon.php, Access.php: Restore classic assets when ?react=0
- Customer.php, Product.php, License.php: Add coexistence checks

Now the toggle between Classic and React versions works correctly.

Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
dwindown
2026-04-18 17:02:14 +07:00
parent bd9cdac02e
commit e8fbfb14c1
74973 changed files with 6658406 additions and 71 deletions

View File

@@ -0,0 +1,47 @@
/**
* @fileoverview This class is designed to allow maps with arbitrary equality functions.
* It is not meant to be performant and is well-suited to use cases where the number of entries is
* likely to be small (like computed artifacts).
*/
export class ArbitraryEqualityMap {
/**
* Determines whether two objects are deeply equal. Defers to lodash isEqual, but is kept here for
* easy usage by consumers.
* See https://lodash.com/docs/4.17.5#isEqual.
* @param {*} objA
* @param {*} objB
* @return {boolean}
*/
static deepEquals(objA: any, objB: any): boolean;
_equalsFn: typeof ArbitraryEqualityMap.deepEquals;
/** @type {Array<{key: *, value: *}>} */
_entries: {
key: any;
value: any;
}[];
/**
* @param {function(*,*):boolean} equalsFn
*/
setEqualityFn(equalsFn: (arg0: any, arg1: any) => boolean): void;
/**
* @param {*} key
* @return {boolean}
*/
has(key: any): boolean;
/**
* @param {*} key
* @return {*}
*/
get(key: any): any;
/**
* @param {*} key
* @param {*} value
*/
set(key: any, value: any): void;
/**
* @param {*} key
* @return {number}
*/
_findIndexOf(key: any): number;
}
//# sourceMappingURL=arbitrary-equality-map.d.ts.map

View File

@@ -0,0 +1,80 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import isDeepEqual from 'lodash/isEqual.js';
/**
* @fileoverview This class is designed to allow maps with arbitrary equality functions.
* It is not meant to be performant and is well-suited to use cases where the number of entries is
* likely to be small (like computed artifacts).
*/
class ArbitraryEqualityMap {
constructor() {
this._equalsFn = ArbitraryEqualityMap.deepEquals;
/** @type {Array<{key: *, value: *}>} */
this._entries = [];
}
/**
* @param {function(*,*):boolean} equalsFn
*/
setEqualityFn(equalsFn) {
this._equalsFn = equalsFn;
}
/**
* @param {*} key
* @return {boolean}
*/
has(key) {
return this._findIndexOf(key) !== -1;
}
/**
* @param {*} key
* @return {*}
*/
get(key) {
const entry = this._entries[this._findIndexOf(key)];
return entry?.value;
}
/**
* @param {*} key
* @param {*} value
*/
set(key, value) {
let index = this._findIndexOf(key);
if (index === -1) index = this._entries.length;
this._entries[index] = {key, value};
}
/**
* @param {*} key
* @return {number}
*/
_findIndexOf(key) {
for (let i = 0; i < this._entries.length; i++) {
if (this._equalsFn(key, this._entries[i].key)) return i;
}
return -1;
}
/**
* Determines whether two objects are deeply equal. Defers to lodash isEqual, but is kept here for
* easy usage by consumers.
* See https://lodash.com/docs/4.17.5#isEqual.
* @param {*} objA
* @param {*} objB
* @return {boolean}
*/
static deepEquals(objA, objB) {
return isDeepEqual(objA, objB);
}
}
export {ArbitraryEqualityMap};

105
node_modules/lighthouse/core/lib/asset-saver.d.ts generated vendored Normal file
View File

@@ -0,0 +1,105 @@
export type PreparedAssets = {
passName: string;
traceData: LH.Trace;
devtoolsLog: import("../index.js").DevtoolsLog;
};
/**
* Save artifacts object mostly to single file located at basePath/artifacts.json.
* Also save the traces & devtoolsLogs to their own files
* @param {LH.Artifacts} artifacts
* @param {string} basePath
* @return {Promise<void>}
*/
export function saveArtifacts(artifacts: LH.Artifacts, basePath: string): Promise<void>;
/**
* Saves flow artifacts with the following file structure:
* flow/ -- Directory specified by `basePath`.
* options.json -- Flow options (e.g. flow name, flags).
* step0/ -- Directory containing artifacts for the first step.
* options.json -- First step's options (e.g. step flags).
* artifacts.json -- First step's artifacts except the DevTools log and trace.
* defaultPass.devtoolslog.json -- First step's DevTools log.
* defaultPass.trace.json -- First step's trace.
* step1/ -- Directory containing artifacts for the second step.
*
* @param {LH.UserFlow.FlowArtifacts} flowArtifacts
* @param {string} basePath
* @return {Promise<void>}
*/
export function saveFlowArtifacts(flowArtifacts: LH.UserFlow.FlowArtifacts, basePath: string): Promise<void>;
/**
* Save LHR to file located at basePath/lhr.report.json.
* @param {LH.Result} lhr
* @param {string} basePath
*/
export function saveLhr(lhr: LH.Result, basePath: string): void;
/**
* @typedef {object} PreparedAssets
* @property {string} passName
* @property {LH.Trace} traceData
* @property {LH.DevtoolsLog} devtoolsLog
*/
/**
* Load artifacts object from files located within basePath
* Also save the traces to their own files
* @param {string} basePath
* @return {LH.Artifacts}
*/
export function loadArtifacts(basePath: string): LH.Artifacts;
/**
* @param {string} basePath
* @return {LH.UserFlow.FlowArtifacts}
*/
export function loadFlowArtifacts(basePath: string): LH.UserFlow.FlowArtifacts;
/**
* Writes trace(s) and associated asset(s) to disk.
* @param {LH.Artifacts} artifacts
* @param {LH.Result['audits']} audits
* @param {string} pathWithBasename
* @return {Promise<void>}
*/
export function saveAssets(artifacts: LH.Artifacts, audits: LH.Result['audits'], pathWithBasename: string): Promise<void>;
/**
* Filter traces and extract screenshots to prepare for saving.
* @param {LH.Artifacts} artifacts
* @param {LH.Result['audits']} [audits]
* @return {Promise<Array<PreparedAssets>>}
*/
export function prepareAssets(artifacts: LH.Artifacts, audits?: Record<string, import("../../types/lhr/audit-result.js").Result> | undefined): Promise<Array<PreparedAssets>>;
/**
* Save a trace as JSON by streaming to disk at traceFilename.
* @param {LH.Trace} traceData
* @param {string} traceFilename
* @return {Promise<void>}
*/
export function saveTrace(traceData: LH.Trace, traceFilename: string): Promise<void>;
/**
* Save a devtoolsLog as JSON by streaming to disk at devtoolLogFilename.
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} devtoolLogFilename
* @return {Promise<void>}
*/
export function saveDevtoolsLog(devtoolsLog: import("../index.js").DevtoolsLog, devtoolLogFilename: string): Promise<void>;
/**
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} outputPath
* @return {Promise<void>}
*/
export function saveLanternNetworkData(devtoolsLog: import("../index.js").DevtoolsLog, outputPath: string): Promise<void>;
/**
* A replacer function for JSON.stingify of the artifacts. Used to serialize objects that
* JSON won't normally handle.
* @param {string} key
* @param {any} value
*/
export function stringifyReplacer(key: string, value: any): any;
/**
* Normalize timing data so it doesn't change every update.
* @param {LH.Result.MeasureEntry[]} timings
*/
export function normalizeTimingEntries(timings: LH.Result.MeasureEntry[]): void;
/**
* @param {LH.Result} lhr
*/
export function elideAuditErrorStacks(lhr: LH.Result): void;
//# sourceMappingURL=asset-saver.d.ts.map

460
node_modules/lighthouse/core/lib/asset-saver.js generated vendored Normal file
View File

@@ -0,0 +1,460 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import fs from 'fs';
import path from 'path';
import stream from 'stream';
import url from 'url';
import log from 'lighthouse-logger';
import {Simulator} from './dependency-graph/simulator/simulator.js';
import lanternTraceSaver from './lantern-trace-saver.js';
import {MetricTraceEvents} from './traces/metric-trace-events.js';
import {NetworkAnalysis} from '../computed/network-analysis.js';
import {LoadSimulator} from '../computed/load-simulator.js';
import {LighthouseError} from '../lib/lh-error.js';
import {LH_ROOT} from '../../root.js';
const optionsFilename = 'options.json';
const artifactsFilename = 'artifacts.json';
const traceSuffix = '.trace.json';
const devtoolsLogSuffix = '.devtoolslog.json';
const stepDirectoryRegex = /^step(\d+)$/;
/**
* @typedef {object} PreparedAssets
* @property {string} passName
* @property {LH.Trace} traceData
* @property {LH.DevtoolsLog} devtoolsLog
*/
/**
* Load artifacts object from files located within basePath
* Also save the traces to their own files
* @param {string} basePath
* @return {LH.Artifacts}
*/
function loadArtifacts(basePath) {
log.log('Reading artifacts from disk:', basePath);
if (!fs.existsSync(basePath)) {
throw new Error('No saved artifacts found at ' + basePath);
}
// load artifacts.json using a reviver to deserialize any LighthouseErrors in artifacts.
const artifactsStr = fs.readFileSync(path.join(basePath, artifactsFilename), 'utf8');
/** @type {LH.Artifacts} */
const artifacts = JSON.parse(artifactsStr, LighthouseError.parseReviver);
const filenames = fs.readdirSync(basePath);
// load devtoolsLogs
artifacts.devtoolsLogs = {};
filenames.filter(f => f.endsWith(devtoolsLogSuffix)).forEach(filename => {
const passName = filename.replace(devtoolsLogSuffix, '');
const devtoolsLog = JSON.parse(fs.readFileSync(path.join(basePath, filename), 'utf8'));
artifacts.devtoolsLogs[passName] = devtoolsLog;
if (passName === 'defaultPass') {
artifacts.DevtoolsLog = devtoolsLog;
}
});
// load traces
artifacts.traces = {};
filenames.filter(f => f.endsWith(traceSuffix)).forEach(filename => {
const file = fs.readFileSync(path.join(basePath, filename), {encoding: 'utf-8'});
const trace = JSON.parse(file);
const passName = filename.replace(traceSuffix, '');
artifacts.traces[passName] = Array.isArray(trace) ? {traceEvents: trace} : trace;
if (passName === 'defaultPass') {
artifacts.Trace = artifacts.traces[passName];
}
});
if (Array.isArray(artifacts.Timing)) {
// Any Timing entries in saved artifacts will have a different timeOrigin than the auditing phase
// The `gather` prop is read later in generate-timing-trace and they're added to a separate track of trace events
artifacts.Timing.forEach(entry => (entry.gather = true));
}
return artifacts;
}
/**
* @param {string} basePath
* @return {LH.UserFlow.FlowArtifacts}
*/
function loadFlowArtifacts(basePath) {
log.log('Reading flow artifacts from disk:', basePath);
if (!fs.existsSync(basePath)) {
throw new Error('No saved flow artifacts found at ' + basePath);
}
/** @type {LH.UserFlow.FlowArtifacts} */
const flowArtifacts = JSON.parse(
fs.readFileSync(path.join(basePath, optionsFilename), 'utf-8')
);
const filenames = fs.readdirSync(basePath);
flowArtifacts.gatherSteps = [];
for (const filename of filenames) {
const regexResult = stepDirectoryRegex.exec(filename);
if (!regexResult) continue;
const index = Number(regexResult[1]);
if (!Number.isFinite(index)) continue;
const stepPath = path.join(basePath, filename);
if (!fs.lstatSync(stepPath).isDirectory()) continue;
/** @type {LH.UserFlow.GatherStep} */
const gatherStep = JSON.parse(
fs.readFileSync(path.join(stepPath, optionsFilename), 'utf-8')
);
gatherStep.artifacts = loadArtifacts(stepPath);
flowArtifacts.gatherSteps[index] = gatherStep;
}
const missingStepIndex = flowArtifacts.gatherSteps.findIndex(gatherStep => !gatherStep);
if (missingStepIndex !== -1) {
throw new Error(`Could not find step with index ${missingStepIndex} at ${basePath}`);
}
return flowArtifacts;
}
/**
* A replacer function for JSON.stingify of the artifacts. Used to serialize objects that
* JSON won't normally handle.
* @param {string} key
* @param {any} value
*/
function stringifyReplacer(key, value) {
// Currently only handle LighthouseError and other Error types.
if (value instanceof Error) {
return LighthouseError.stringifyReplacer(value);
}
return value;
}
/**
* Saves flow artifacts with the following file structure:
* flow/ -- Directory specified by `basePath`.
* options.json -- Flow options (e.g. flow name, flags).
* step0/ -- Directory containing artifacts for the first step.
* options.json -- First step's options (e.g. step flags).
* artifacts.json -- First step's artifacts except the DevTools log and trace.
* defaultPass.devtoolslog.json -- First step's DevTools log.
* defaultPass.trace.json -- First step's trace.
* step1/ -- Directory containing artifacts for the second step.
*
* @param {LH.UserFlow.FlowArtifacts} flowArtifacts
* @param {string} basePath
* @return {Promise<void>}
*/
async function saveFlowArtifacts(flowArtifacts, basePath) {
const status = {msg: 'Saving flow artifacts', id: 'lh:assetSaver:saveArtifacts'};
log.time(status);
fs.mkdirSync(basePath, {recursive: true});
// Delete any previous artifacts in this directory.
const filenames = fs.readdirSync(basePath);
for (const filename of filenames) {
if (stepDirectoryRegex.test(filename) || filename === optionsFilename) {
fs.rmSync(`${basePath}/${filename}`, {recursive: true});
}
}
const {gatherSteps, ...flowOptions} = flowArtifacts;
for (let i = 0; i < gatherSteps.length; ++i) {
const {artifacts, ...stepOptions} = gatherSteps[i];
const stepPath = path.join(basePath, `step${i}`);
await saveArtifacts(artifacts, stepPath);
fs.writeFileSync(
path.join(stepPath, optionsFilename),
JSON.stringify(stepOptions, stringifyReplacer, 2) + '\n'
);
}
fs.writeFileSync(
path.join(basePath, optionsFilename),
JSON.stringify(flowOptions, stringifyReplacer, 2) + '\n'
);
log.log('Flow artifacts saved to disk in folder:', basePath);
log.timeEnd(status);
}
/**
* Save artifacts object mostly to single file located at basePath/artifacts.json.
* Also save the traces & devtoolsLogs to their own files
* @param {LH.Artifacts} artifacts
* @param {string} basePath
* @return {Promise<void>}
*/
async function saveArtifacts(artifacts, basePath) {
const status = {msg: 'Saving artifacts', id: 'lh:assetSaver:saveArtifacts'};
log.time(status);
fs.mkdirSync(basePath, {recursive: true});
// Delete any previous artifacts in this directory.
const filenames = fs.readdirSync(basePath);
for (const filename of filenames) {
if (filename.endsWith(traceSuffix) || filename.endsWith(devtoolsLogSuffix) ||
filename === artifactsFilename) {
fs.unlinkSync(`${basePath}/${filename}`);
}
}
// `DevtoolsLog` and `Trace` will always be the 'defaultPass' version.
// We don't need to save them twice, so extract them here.
// eslint-disable-next-line no-unused-vars
const {traces, devtoolsLogs, DevtoolsLog, Trace, ...restArtifacts} = artifacts;
// save traces
for (const [passName, trace] of Object.entries(traces)) {
await saveTrace(trace, `${basePath}/${passName}${traceSuffix}`);
}
// save devtools log
for (const [passName, devtoolsLog] of Object.entries(devtoolsLogs)) {
await saveDevtoolsLog(devtoolsLog, `${basePath}/${passName}${devtoolsLogSuffix}`);
}
// save everything else, using a replacer to serialize LighthouseErrors in the artifacts.
const restArtifactsString = JSON.stringify(restArtifacts, stringifyReplacer, 2) + '\n';
fs.writeFileSync(`${basePath}/${artifactsFilename}`, restArtifactsString, 'utf8');
log.log('Artifacts saved to disk in folder:', basePath);
log.timeEnd(status);
}
/**
* Save LHR to file located at basePath/lhr.report.json.
* @param {LH.Result} lhr
* @param {string} basePath
*/
function saveLhr(lhr, basePath) {
fs.writeFileSync(`${basePath}/lhr.report.json`, JSON.stringify(lhr, null, 2));
}
/**
* Filter traces and extract screenshots to prepare for saving.
* @param {LH.Artifacts} artifacts
* @param {LH.Result['audits']} [audits]
* @return {Promise<Array<PreparedAssets>>}
*/
async function prepareAssets(artifacts, audits) {
const passNames = Object.keys(artifacts.traces);
/** @type {Array<PreparedAssets>} */
const assets = [];
for (const passName of passNames) {
const trace = artifacts.traces[passName];
const devtoolsLog = artifacts.devtoolsLogs[passName];
const traceData = Object.assign({}, trace);
if (audits) {
const evts = new MetricTraceEvents(traceData.traceEvents, audits).generateFakeEvents();
traceData.traceEvents = traceData.traceEvents.concat(evts);
}
assets.push({
passName,
traceData,
devtoolsLog,
});
}
return assets;
}
/**
* Generates a JSON representation of an array of objects with the objects
* printed one per line for a more readable (but not too verbose) version.
* @param {Array<unknown>} arrayOfObjects
* @return {IterableIterator<string>}
*/
function* arrayOfObjectsJsonGenerator(arrayOfObjects) {
const ITEMS_PER_ITERATION = 500;
// Stringify and emit items separately to avoid a giant string in memory.
yield '[\n';
if (arrayOfObjects.length > 0) {
const itemsIterator = arrayOfObjects[Symbol.iterator]();
// Emit first item manually to avoid a trailing comma.
const firstItem = itemsIterator.next().value;
yield ` ${JSON.stringify(firstItem)}`;
let itemsRemaining = ITEMS_PER_ITERATION;
let itemsJSON = '';
for (const item of itemsIterator) {
itemsJSON += `,\n ${JSON.stringify(item)}`;
itemsRemaining--;
if (itemsRemaining === 0) {
yield itemsJSON;
itemsRemaining = ITEMS_PER_ITERATION;
itemsJSON = '';
}
}
yield itemsJSON;
}
yield '\n]';
}
/**
* Generates a JSON representation of traceData line-by-line for a nicer printed
* version with one trace event per line.
* @param {LH.Trace} traceData
* @return {IterableIterator<string>}
*/
function* traceJsonGenerator(traceData) {
const {traceEvents, ...rest} = traceData;
yield '{\n';
yield '"traceEvents": ';
yield* arrayOfObjectsJsonGenerator(traceEvents);
// Emit the rest of the object (usually just `metadata`, if anything).
for (const [key, value] of Object.entries(rest)) {
yield `,\n"${key}": ${JSON.stringify(value, null, 2)}`;
}
yield '}\n';
}
/**
* Save a trace as JSON by streaming to disk at traceFilename.
* @param {LH.Trace} traceData
* @param {string} traceFilename
* @return {Promise<void>}
*/
async function saveTrace(traceData, traceFilename) {
const traceIter = traceJsonGenerator(traceData);
const writeStream = fs.createWriteStream(traceFilename);
return stream.promises.pipeline(traceIter, writeStream);
}
/**
* Save a devtoolsLog as JSON by streaming to disk at devtoolLogFilename.
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} devtoolLogFilename
* @return {Promise<void>}
*/
function saveDevtoolsLog(devtoolsLog, devtoolLogFilename) {
const writeStream = fs.createWriteStream(devtoolLogFilename);
return stream.promises.pipeline(function* () {
yield* arrayOfObjectsJsonGenerator(devtoolsLog);
yield '\n';
}, writeStream);
}
/**
* @param {string} pathWithBasename
* @return {Promise<void>}
*/
async function saveLanternDebugTraces(pathWithBasename) {
if (!process.env.LANTERN_DEBUG) return;
for (const [label, nodeTimings] of Simulator.ALL_NODE_TIMINGS) {
if (lanternTraceSaver.simulationNamesToIgnore.includes(label)) continue;
const traceFilename = `${pathWithBasename}-${label}${traceSuffix}`;
await saveTrace(lanternTraceSaver.convertNodeTimingsToTrace(nodeTimings), traceFilename);
log.log('saveAssets', `${label} lantern trace file streamed to disk: ${traceFilename}`);
}
}
/**
* Writes trace(s) and associated asset(s) to disk.
* @param {LH.Artifacts} artifacts
* @param {LH.Result['audits']} audits
* @param {string} pathWithBasename
* @return {Promise<void>}
*/
async function saveAssets(artifacts, audits, pathWithBasename) {
const allAssets = await prepareAssets(artifacts, audits);
const saveAll = allAssets.map(async (passAssets, index) => {
const devtoolsLogFilename = `${pathWithBasename}-${index}${devtoolsLogSuffix}`;
fs.writeFileSync(devtoolsLogFilename, JSON.stringify(passAssets.devtoolsLog, null, 2));
log.log('saveAssets', 'devtools log saved to disk: ' + devtoolsLogFilename);
const traceFilename = `${pathWithBasename}-${index}${traceSuffix}`;
await saveTrace(passAssets.traceData, traceFilename);
log.log('saveAssets', 'trace file streamed to disk: ' + traceFilename);
});
await Promise.all(saveAll);
await saveLanternDebugTraces(pathWithBasename);
}
/**
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} outputPath
* @return {Promise<void>}
*/
async function saveLanternNetworkData(devtoolsLog, outputPath) {
/** @type {LH.Audit.Context} */
// @ts-expect-error - the full audit context isn't needed for analysis.
const context = {computedCache: new Map()};
const networkAnalysis = await NetworkAnalysis.request(devtoolsLog, context);
const lanternData = LoadSimulator.convertAnalysisToSaveableLanternData(networkAnalysis);
fs.writeFileSync(outputPath, JSON.stringify(lanternData));
}
/**
* Normalize timing data so it doesn't change every update.
* @param {LH.Result.MeasureEntry[]} timings
*/
function normalizeTimingEntries(timings) {
let baseTime = 0;
for (const timing of timings) {
// @ts-expect-error: Value actually is writeable at this point.
timing.startTime = baseTime++;
// @ts-expect-error: Value actually is writeable at this point.
timing.duration = 1;
}
}
/**
* @param {LH.Result} lhr
*/
function elideAuditErrorStacks(lhr) {
const baseCallFrameUrl = url.pathToFileURL(LH_ROOT);
for (const auditResult of Object.values(lhr.audits)) {
if (auditResult.errorStack) {
auditResult.errorStack = auditResult.errorStack
// Make paths relative to the repo root.
.replaceAll(baseCallFrameUrl.pathname, '')
// Remove line/col info.
.replaceAll(/:\d+:\d+/g, '');
}
}
}
export {
saveArtifacts,
saveFlowArtifacts,
saveLhr,
loadArtifacts,
loadFlowArtifacts,
saveAssets,
prepareAssets,
saveTrace,
saveDevtoolsLog,
saveLanternNetworkData,
stringifyReplacer,
normalizeTimingEntries,
elideAuditErrorStacks,
};

2
node_modules/lighthouse/core/lib/axe.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export const axeSource: string;
//# sourceMappingURL=axe.d.ts.map

17
node_modules/lighthouse/core/lib/axe.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import fs from 'fs';
import {createRequire} from 'module';
// This is removed by rollup, because the only usage is to resolve a module path
// but that is replaced by the inline-fs plugin, leaving `require` unused.
const require = /* #__PURE__ */ createRequire(import.meta.url);
const axeSource = fs.readFileSync(require.resolve('axe-core/axe.min.js'), 'utf8');
export {
axeSource,
};

124
node_modules/lighthouse/core/lib/bf-cache-strings.d.ts generated vendored Normal file
View File

@@ -0,0 +1,124 @@
/** @type {Record<string, {name: LH.IcuMessage} | undefined>} */
export const NotRestoredReasonDescription: Record<string, {
name: LH.IcuMessage;
} | undefined>;
export namespace UIStrings {
const notMainFrame: string;
const backForwardCacheDisabled: string;
const relatedActiveContentsExist: string;
const HTTPStatusNotOK: string;
const schemeNotHTTPOrHTTPS: string;
const loading: string;
const wasGrantedMediaAccess: string;
const HTTPMethodNotGET: string;
const subframeIsNavigating: string;
const timeout: string;
const cacheLimit: string;
const JavaScriptExecution: string;
const rendererProcessKilled: string;
const rendererProcessCrashed: string;
const grantedMediaStreamAccess: string;
const cacheFlushed: string;
const serviceWorkerVersionActivation: string;
const sessionRestored: string;
const serviceWorkerPostMessage: string;
const enteredBackForwardCacheBeforeServiceWorkerHostAdded: string;
const serviceWorkerClaim: string;
const haveInnerContents: string;
const timeoutPuttingInCache: string;
const backForwardCacheDisabledByLowMemory: string;
const backForwardCacheDisabledByCommandLine: string;
const networkRequestDatapipeDrainedAsBytesConsumer: string;
const networkRequestRedirected: string;
const networkRequestTimeout: string;
const networkExceedsBufferLimit: string;
const navigationCancelledWhileRestoring: string;
const backForwardCacheDisabledForPrerender: string;
const userAgentOverrideDiffers: string;
const foregroundCacheLimit: string;
const backForwardCacheDisabledForDelegate: string;
const unloadHandlerExistsInMainFrame: string;
const unloadHandlerExistsInSubFrame: string;
const serviceWorkerUnregistration: string;
const noResponseHead: string;
const cacheControlNoStore: string;
const ineligibleAPI: string;
const internalError: string;
const webSocket: string;
const webTransport: string;
const webRTC: string;
const mainResourceHasCacheControlNoStore: string;
const mainResourceHasCacheControlNoCache: string;
const subresourceHasCacheControlNoStore: string;
const subresourceHasCacheControlNoCache: string;
const containsPlugins: string;
const documentLoaded: string;
const dedicatedWorkerOrWorklet: string;
const outstandingNetworkRequestOthers: string;
const outstandingIndexedDBTransaction: string;
const requestedNotificationsPermission: string;
const requestedMIDIPermission: string;
const requestedAudioCapturePermission: string;
const requestedVideoCapturePermission: string;
const requestedBackForwardCacheBlockedSensors: string;
const requestedBackgroundWorkPermission: string;
const broadcastChannel: string;
const indexedDBConnection: string;
const webXR: string;
const sharedWorker: string;
const webLocks: string;
const webHID: string;
const webShare: string;
const requestedStorageAccessGrant: string;
const webNfc: string;
const outstandingNetworkRequestFetch: string;
const outstandingNetworkRequestXHR: string;
const appBanner: string;
const printing: string;
const webDatabase: string;
const pictureInPicture: string;
const portal: string;
const speechRecognizer: string;
const idleManager: string;
const paymentManager: string;
const speechSynthesis: string;
const keyboardLock: string;
const webOTPService: string;
const outstandingNetworkRequestDirectSocket: string;
const injectedJavascript: string;
const injectedStyleSheet: string;
const contentSecurityHandler: string;
const contentWebAuthenticationAPI: string;
const contentFileChooser: string;
const contentSerial: string;
const contentFileSystemAccess: string;
const contentMediaDevicesDispatcherHost: string;
const contentWebBluetooth: string;
const contentWebUSB: string;
const contentMediaSession: string;
const contentMediaSessionService: string;
const contentMediaPlay: string;
const contentScreenReader: string;
const embedderPopupBlockerTabHelper: string;
const embedderSafeBrowsingTriggeredPopupBlocker: string;
const embedderSafeBrowsingThreatDetails: string;
const embedderAppBannerManager: string;
const embedderDomDistillerViewerSource: string;
const embedderDomDistillerSelfDeletingRequestDelegate: string;
const embedderOomInterventionTabHelper: string;
const embedderOfflinePage: string;
const embedderChromePasswordManagerClientBindCredentialManager: string;
const embedderPermissionRequestManager: string;
const embedderModalDialog: string;
const embedderExtensions: string;
const embedderExtensionMessaging: string;
const embedderExtensionMessagingForOpenPort: string;
const embedderExtensionSentMessageToCachedFrame: string;
const errorDocument: string;
const fencedFramesEmbedder: string;
const keepaliveRequest: string;
const authorizationHeader: string;
const indexedDBEvent: string;
const cookieDisabled: string;
}
//# sourceMappingURL=bf-cache-strings.d.ts.map

677
node_modules/lighthouse/core/lib/bf-cache-strings.js generated vendored Normal file
View File

@@ -0,0 +1,677 @@
// auto-generated by build/build-cdt-strings.js
/* eslint-disable */
import * as i18n from '../lib/i18n/i18n.js';
const UIStrings = {
/**
* @description Description text for not restored reason NotMainFrame.
*/
notMainFrame: 'Navigation happened in a frame other than the main frame.',
/**
* @description Description text for not restored reason BackForwardCacheDisabled.
*/
backForwardCacheDisabled:
'Back/forward cache is disabled by flags. Visit chrome://flags/#back-forward-cache to enable it locally on this device.',
/**
* @description Description text for not restored reason RelatedActiveContentsExist.
* Note: "window.open()" is the name of a JavaScript method and should not be translated.
*/
relatedActiveContentsExist:
'The page was opened using \'`window.open()`\' and another tab has a reference to it, or the page opened a window.',
/**
* @description Description text for not restored reason HTTPStatusNotOK.
*/
HTTPStatusNotOK: 'Only pages with a status code of 2XX can be cached.',
/**
* @description Description text for not restored reason SchemeNotHTTPOrHTTPS.
*/
schemeNotHTTPOrHTTPS: 'Only pages whose URL scheme is HTTP / HTTPS can be cached.',
/**
* @description Description text for not restored reason Loading.
*/
loading: 'The page did not finish loading before navigating away.',
/**
* @description Description text for not restored reason WasGrantedMediaAccess.
*/
wasGrantedMediaAccess:
'Pages that have granted access to record video or audio are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason HTTPMethodNotGET.
*/
HTTPMethodNotGET: 'Only pages loaded via a GET request are eligible for back/forward cache.',
/**
* @description Description text for not restored reason SubframeIsNavigating.
*/
subframeIsNavigating: 'An iframe on the page started a navigation that did not complete.',
/**
* @description Description text for not restored reason Timeout.
*/
timeout: 'The page exceeded the maximum time in back/forward cache and was expired.',
/**
* @description Description text for not restored reason CacheLimit.
*/
cacheLimit: 'The page was evicted from the cache to allow another page to be cached.',
/**
* @description Description text for not restored reason JavaScriptExecution.
*/
JavaScriptExecution: 'Chrome detected an attempt to execute JavaScript while in the cache.',
/**
* @description Description text for not restored reason RendererProcessKilled.
*/
rendererProcessKilled: 'The renderer process for the page in back/forward cache was killed.',
/**
* @description Description text for not restored reason RendererProcessCrashed.
*/
rendererProcessCrashed: 'The renderer process for the page in back/forward cache crashed.',
/**
* @description Description text for not restored reason GrantedMediaStreamAccess.
*/
grantedMediaStreamAccess:
'Pages that have granted media stream access are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason CacheFlushed.
*/
cacheFlushed: 'The cache was intentionally cleared.',
/**
* @description Description text for not restored reason ServiceWorkerVersionActivation.
*/
serviceWorkerVersionActivation: 'The page was evicted from back/forward cache due to a service worker activation.',
/**
* @description Description text for not restored reason SessionRestored.
*/
sessionRestored: 'Chrome restarted and cleared the back/forward cache entries.',
/**
* @description Description text for not restored reason ServiceWorkerPostMessage.
* Note: "MessageEvent" should not be translated.
*/
serviceWorkerPostMessage: 'A service worker attempted to send the page in back/forward cache a `MessageEvent`.',
/**
* @description Description text for not restored reason EnteredBackForwardCacheBeforeServiceWorkerHostAdded.
*/
enteredBackForwardCacheBeforeServiceWorkerHostAdded:
'A service worker was activated while the page was in back/forward cache.',
/**
* @description Description text for not restored reason ServiceWorkerClaim.
*/
serviceWorkerClaim: 'The page was claimed by a service worker while it is in back/forward cache.',
/**
* @description Description text for not restored reason HaveInnerContents.
*/
haveInnerContents: 'Pages that use portals are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason TimeoutPuttingInCache.
*/
timeoutPuttingInCache:
'The page timed out entering back/forward cache (likely due to long-running pagehide handlers).',
/**
* @description Description text for not restored reason BackForwardCacheDisabledByLowMemory.
*/
backForwardCacheDisabledByLowMemory: 'Back/forward cache is disabled due to insufficient memory.',
/**
* @description Description text for not restored reason BackForwardcCacheDisabledByCommandLine.
*/
backForwardCacheDisabledByCommandLine: 'Back/forward cache is disabled by the command line.',
/**
* @description Description text for not restored reason NetworkRequestDatapipeDrainedAsBytesConsumer.
*/
networkRequestDatapipeDrainedAsBytesConsumer:
'Pages that have inflight fetch() or XHR are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason NetworkRequestRedirected.
*/
networkRequestRedirected:
'The page was evicted from back/forward cache because an active network request involved a redirect.',
/**
* @description Description text for not restored reason NetworkRequestTimeout.
*/
networkRequestTimeout:
'The page was evicted from the cache because a network connection was open too long. Chrome limits the amount of time that a page may receive data while cached.',
/**
* @description Description text for not restored reason NetworkExceedsBufferLimit.
*/
networkExceedsBufferLimit:
'The page was evicted from the cache because an active network connection received too much data. Chrome limits the amount of data that a page may receive while cached.',
/**
* @description Description text for not restored reason NavigationCancelledWhileRestoring.
*/
navigationCancelledWhileRestoring:
'Navigation was cancelled before the page could be restored from back/forward cache.',
/**
* @description Description text for not restored reason BackForwardCacheDisabledForPrerender.
*/
backForwardCacheDisabledForPrerender: 'Back/forward cache is disabled for prerenderer.',
/**
* @description Description text for not restored reason userAgentOverrideDiffers.
*/
userAgentOverrideDiffers: 'Browser has changed the user agent override header.',
/**
* @description Description text for not restored reason ForegroundCacheLimit.
*/
foregroundCacheLimit: 'The page was evicted from the cache to allow another page to be cached.',
/**
* @description Description text for not restored reason BackForwardCacheDisabledForDelegate.
*/
backForwardCacheDisabledForDelegate: 'Back/forward cache is not supported by delegate.',
/**
* @description Description text for not restored reason UnloadHandlerExistsInMainFrame.
*/
unloadHandlerExistsInMainFrame: 'The page has an unload handler in the main frame.',
/**
* @description Description text for not restored reason UnloadHandlerExistsInSubFrame.
*/
unloadHandlerExistsInSubFrame: 'The page has an unload handler in a sub frame.',
/**
* @description Description text for not restored reason ServiceWorkerUnregistration.
*/
serviceWorkerUnregistration: 'ServiceWorker was unregistered while a page was in back/forward cache.',
/**
* @description Description text for not restored reason NoResponseHead.
*/
noResponseHead: 'Pages that do not have a valid response head cannot enter back/forward cache.',
/**
* @description Description text for not restored reason CacheControlNoStore.
*/
cacheControlNoStore: 'Pages with cache-control:no-store header cannot enter back/forward cache.',
/**
* @description Description text for not restored reason IneligibleAPI.
*/
ineligibleAPI: 'Ineligible APIs were used.',
/**
* @description Description text for not restored reason InternalError.
*/
internalError: 'Internal error.',
/**
* @description Description text for not restored reason WebSocket.
*/
webSocket: 'Pages with WebSocket cannot enter back/forward cache.',
/**
* @description Description text for not restored reason WebTransport.
*/
webTransport: 'Pages with WebTransport cannot enter back/forward cache.',
/**
* @description Description text for not restored reason WebRTC.
*/
webRTC: 'Pages with WebRTC cannot enter back/forward cache.',
/**
* @description Description text for not restored reason MainResourceHasCacheControlNoStore.
*/
mainResourceHasCacheControlNoStore:
'Pages whose main resource has cache-control:no-store cannot enter back/forward cache.',
/**
* @description Description text for not restored reason MainResourceHasCacheControlNoCache.
*/
mainResourceHasCacheControlNoCache:
'Pages whose main resource has cache-control:no-cache cannot enter back/forward cache.',
/**
* @description Description text for not restored reason SubresourceHasCacheControlNoStore.
*/
subresourceHasCacheControlNoStore:
'Pages whose subresource has cache-control:no-store cannot enter back/forward cache.',
/**
* @description Description text for not restored reason SubresourceHasCacheControlNoCache.
*/
subresourceHasCacheControlNoCache:
'Pages whose subresource has cache-control:no-cache cannot enter back/forward cache.',
/**
* @description Description text for not restored reason ContainsPlugins.
*/
containsPlugins: 'Pages containing plugins are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason DocumentLoaded.
*/
documentLoaded: 'The document did not finish loading before navigating away.',
/**
* @description Description text for not restored reason DedicatedWorkerOrWorklet.
*/
dedicatedWorkerOrWorklet:
'Pages that use a dedicated worker or worklet are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason OutstandingNetworkRequestOthers.
*/
outstandingNetworkRequestOthers:
'Pages with an in-flight network request are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason OutstandingIndexedDBTransaction.
*/
outstandingIndexedDBTransaction:
'Page with ongoing indexed DB transactions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedNotificationsPermission.
*/
requestedNotificationsPermission:
'Pages that have requested notifications permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedMIDIPermission.
*/
requestedMIDIPermission:
'Pages that have requested MIDI permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedAudioCapturePermission.
*/
requestedAudioCapturePermission:
'Pages that have requested audio capture permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedVideoCapturePermission.
*/
requestedVideoCapturePermission:
'Pages that have requested video capture permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedBackForwardCacheBlockedSensors.
*/
requestedBackForwardCacheBlockedSensors:
'Pages that have requested sensor permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason RequestedBackgroundWorkPermission.
*/
requestedBackgroundWorkPermission:
'Pages that have requested background sync or fetch permissions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason BroadcastChannel.
*/
broadcastChannel: 'The page cannot be cached because it has a BroadcastChannel instance with registered listeners.',
/**
* @description Description text for not restored reason IndexedDBConnection.
*/
indexedDBConnection:
'Pages that have an open IndexedDB connection are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebXR.
*/
webXR: 'Pages that use WebXR are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason SharedWorker.
*/
sharedWorker: 'Pages that use SharedWorker are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebLocks.
*/
webLocks: 'Pages that use WebLocks are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebHID.
*/
webHID: 'Pages that use WebHID are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebShare.
*/
webShare: 'Pages that use WebShare are not currently eligible for back/forwad cache.',
/**
* @description Description text for not restored reason RequestedStorageAccessGrant.
*/
requestedStorageAccessGrant:
'Pages that have requested storage access are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebNfc.
*/
webNfc: 'Pages that use WebNfc are not currently eligible for back/forwad cache.',
/**
* @description Description text for not restored reason OutstandingNetworkRequestFetch.
*/
outstandingNetworkRequestFetch:
'Pages with an in-flight fetch network request are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason OutstandingNetworkRequestXHR.
*/
outstandingNetworkRequestXHR:
'Pages with an in-flight XHR network request are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason AppBanner.
*/
appBanner: 'Pages that requested an AppBanner are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason Printing.
*/
printing: 'Pages that show Printing UI are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebDatabase.
*/
webDatabase: 'Pages that use WebDatabase are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason PictureInPicture.
*/
pictureInPicture: 'Pages that use Picture-in-Picture are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason Portal.
*/
portal: 'Pages that use portals are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason SpeechRecognizer.
*/
speechRecognizer: 'Pages that use SpeechRecognizer are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason IdleManager.
*/
idleManager: 'Pages that use IdleManager are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason PaymentManager.
*/
paymentManager: 'Pages that use PaymentManager are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason SpeechSynthesis.
*/
speechSynthesis: 'Pages that use SpeechSynthesis are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason KeyboardLock.
*/
keyboardLock: 'Pages that use Keyboard lock are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason WebOTPService.
*/
webOTPService: 'Pages that use WebOTPService are not currently eligible for bfcache.',
/**
* @description Description text for not restored reason OutstandingNetworkRequestDirectSocket.
*/
outstandingNetworkRequestDirectSocket:
'Pages with an in-flight network request are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason InjectedJavascript.
*/
injectedJavascript:
'Pages that `JavaScript` is injected into by extensions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason InjectedStyleSheet.
*/
injectedStyleSheet:
'Pages that a `StyleSheet` is injected into by extensions are not currently eligible for back/forward cache.',
/**
* @description Description text for not restored reason ContentSecurityHandler.
*/
contentSecurityHandler: 'Pages that use SecurityHandler are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentWebAuthenticationAPI: 'Pages that use WebAuthetication API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentFileChooser: 'Pages that use FileChooser API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentSerial: 'Pages that use Serial API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentFileSystemAccess: 'Pages that use File System Access API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentMediaDevicesDispatcherHost: 'Pages that use Media Device Dispatcher are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason NotMainFrame.
*/
contentWebBluetooth: 'Pages that use WebBluetooth API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason ContentWebUSB.
*/
contentWebUSB: 'Pages that use WebUSB API are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason ContentMediaSession.
*/
contentMediaSession:
'Pages that use MediaSession API and set a playback state are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason ContentMediaSessionService.
*/
contentMediaSessionService:
'Pages that use MediaSession API and set action handlers are not eligible for back/forward cache.',
/**
* @description Description text for not restored reason ContentMediaPlay.
*/
contentMediaPlay: 'A media player was playing upon navigating away.',
/**
* @description Description text for not restored reason ContentScreenReader.
*/
contentScreenReader: 'Back/forward cache is disabled due to screen reader.',
/**
* @description Description text for not restored reason EmbedderPopupBlockerTabHelper.
*/
embedderPopupBlockerTabHelper: 'Popup blocker was present upon navigating away.',
/**
* @description Description text for not restored reason EmbedderSafeBrowsingTriggeredPopupBlocker.
*/
embedderSafeBrowsingTriggeredPopupBlocker: 'Safe Browsing considered this page to be abusive and blocked popup.',
/**
* @description Description text for not restored reason EmbedderSafeBrowsingThreatDetails.
*/
embedderSafeBrowsingThreatDetails: 'Safe Browsing details were shown upon navigating away.',
/**
* @description Description text for not restored reason EmbedderAppBannerManager.
*/
embedderAppBannerManager: 'App Banner was present upon navigating away.',
/**
* @description Description text for not restored reason EmbedderDomDistillerViewerSource.
*/
embedderDomDistillerViewerSource: 'DOM Distiller Viewer was present upon navigating away.',
/**
* @description Description text for not restored reason EmbedderDomDistillerSelfDeletingRequestDelegate.
*/
embedderDomDistillerSelfDeletingRequestDelegate: 'DOM distillation was in progress upon navigating away.',
/**
* @description Description text for not restored reason EmbedderOomInterventionTabHelper.
*/
embedderOomInterventionTabHelper: 'Out-Of-Memory Intervention bar was present upon navigating away.',
/**
* @description Description text for not restored reason EmbedderOfflinePage.
*/
embedderOfflinePage: 'The offline page was shown upon navigating away.',
/**
* @description Description text for not restored reason EmbedderChromePasswordManagerClientBindCredentialManager.
*/
embedderChromePasswordManagerClientBindCredentialManager: 'Chrome Password Manager was present upon navigating away.',
/**
* @description Description text for not restored reason EmbedderPermissionRequestManager.
*/
embedderPermissionRequestManager: 'There were permission requests upon navigating away.',
/**
* @description Description text for not restored reason EmbedderModalDialog.
*/
embedderModalDialog:
'Modal dialog such as form resubmission or http password dialog was shown for the page upon navigating away.',
/**
* @description Description text for not restored reason EmbedderExtensions.
*/
embedderExtensions: 'Back/forward cache is disabled due to extensions.',
/**
* @description Description text for not restored reason EmbedderExtensionMessaging.
*/
embedderExtensionMessaging: 'Back/forward cache is disabled due to extensions using messaging API.',
/**
* @description Description text for not restored reason EmbedderExtensionMessagingForOpenPort.
*/
embedderExtensionMessagingForOpenPort:
'Extensions with long-lived connection should close the connection before entering back/forward cache.',
/**
* @description Description text for not restored reason EmbedderExtensionSentMessageToCachedFrame.
*/
embedderExtensionSentMessageToCachedFrame:
'Extensions with long-lived connection attempted to send messages to frames in back/forward cache.',
/**
* @description Description text for not restored reason ErrorDocument.
*/
errorDocument: 'Back/forward cache is disabled due to a document error.',
/**
* @description Description text for not restored reason FencedFramesEmbedder.
*/
fencedFramesEmbedder: 'Pages using FencedFrames cannot be stored in bfcache.',
/**
* @description Description text for not restored reason KeepaliveRequest.
*/
keepaliveRequest: 'Back/forward cache is disabled due to a keepalive request.',
/**
* @description Empty string to roll protocol.
*/
authorizationHeader: 'Back/forward cache is disabled due to a keepalive request.',
/**
* @description Description text for not restored reason IndexedDBEvent.
*/
indexedDBEvent: 'Back/forward cache is disabled due to an IndexedDB event.',
/**
* @description Description text for not restored reason CookieDisabled.
*/
cookieDisabled:
'Back/forward cache is disabled because cookies are disabled on a page that uses `Cache-Control: no-store`.',
};
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
/** @type {Record<string, {name: LH.IcuMessage} | undefined>} */
const NotRestoredReasonDescription = {
'NotPrimaryMainFrame': {name: str_(UIStrings.notMainFrame)},
'BackForwardCacheDisabled': {name: str_(UIStrings.backForwardCacheDisabled)},
'RelatedActiveContentsExist': {name: str_(UIStrings.relatedActiveContentsExist)},
'HTTPStatusNotOK': {name: str_(UIStrings.HTTPStatusNotOK)},
'SchemeNotHTTPOrHTTPS': {name: str_(UIStrings.schemeNotHTTPOrHTTPS)},
'Loading': {name: str_(UIStrings.loading)},
'WasGrantedMediaAccess': {name: str_(UIStrings.wasGrantedMediaAccess)},
'HTTPMethodNotGET': {name: str_(UIStrings.HTTPMethodNotGET)},
'SubframeIsNavigating': {name: str_(UIStrings.subframeIsNavigating)},
'Timeout': {name: str_(UIStrings.timeout)},
'CacheLimit': {name: str_(UIStrings.cacheLimit)},
'JavaScriptExecution': {name: str_(UIStrings.JavaScriptExecution)},
'RendererProcessKilled': {name: str_(UIStrings.rendererProcessKilled)},
'RendererProcessCrashed': {name: str_(UIStrings.rendererProcessCrashed)},
'GrantedMediaStreamAccess': {name: str_(UIStrings.grantedMediaStreamAccess)},
'CacheFlushed': {name: str_(UIStrings.cacheFlushed)},
'ServiceWorkerVersionActivation': {name: str_(UIStrings.serviceWorkerVersionActivation)},
'SessionRestored': {name: str_(UIStrings.sessionRestored)},
'ServiceWorkerPostMessage': {name: str_(UIStrings.serviceWorkerPostMessage)},
'EnteredBackForwardCacheBeforeServiceWorkerHostAdded':
{name: str_(UIStrings.enteredBackForwardCacheBeforeServiceWorkerHostAdded)},
'ServiceWorkerClaim': {name: str_(UIStrings.serviceWorkerClaim)},
'HaveInnerContents': {name: str_(UIStrings.haveInnerContents)},
'TimeoutPuttingInCache': {name: str_(UIStrings.timeoutPuttingInCache)},
'BackForwardCacheDisabledByLowMemory': {name: str_(UIStrings.backForwardCacheDisabledByLowMemory)},
'BackForwardCacheDisabledByCommandLine': {name: str_(UIStrings.backForwardCacheDisabledByCommandLine)},
'NetworkRequestDatapipeDrainedAsBytesConsumer':
{name: str_(UIStrings.networkRequestDatapipeDrainedAsBytesConsumer)},
'NetworkRequestRedirected': {name: str_(UIStrings.networkRequestRedirected)},
'NetworkRequestTimeout': {name: str_(UIStrings.networkRequestTimeout)},
'NetworkExceedsBufferLimit': {name: str_(UIStrings.networkExceedsBufferLimit)},
'NavigationCancelledWhileRestoring': {name: str_(UIStrings.navigationCancelledWhileRestoring)},
'BackForwardCacheDisabledForPrerender': {name: str_(UIStrings.backForwardCacheDisabledForPrerender)},
'UserAgentOverrideDiffers': {name: str_(UIStrings.userAgentOverrideDiffers)},
'ForegroundCacheLimit': {name: str_(UIStrings.foregroundCacheLimit)},
'BackForwardCacheDisabledForDelegate': {name: str_(UIStrings.backForwardCacheDisabledForDelegate)},
'UnloadHandlerExistsInMainFrame': {name: str_(UIStrings.unloadHandlerExistsInMainFrame)},
'UnloadHandlerExistsInSubFrame': {name: str_(UIStrings.unloadHandlerExistsInSubFrame)},
'ServiceWorkerUnregistration': {name: str_(UIStrings.serviceWorkerUnregistration)},
'NoResponseHead': {name: str_(UIStrings.noResponseHead)},
'CacheControlNoStore': {name: str_(UIStrings.cacheControlNoStore)},
'CacheControlNoStoreCookieModified': {name: str_(UIStrings.cacheControlNoStore)},
'CacheControlNoStoreHTTPOnlyCookieModified': {name: str_(UIStrings.cacheControlNoStore)},
'DisableForRenderFrameHostCalled': {name: str_(UIStrings.ineligibleAPI)},
'BlocklistedFeatures': {name: str_(UIStrings.ineligibleAPI)},
'SchedulerTrackedFeatureUsed': {name: str_(UIStrings.ineligibleAPI)},
'DomainNotAllowed': {name: str_(UIStrings.internalError)},
'ConflictingBrowsingInstance': {name: str_(UIStrings.internalError)},
'NotMostRecentNavigationEntry': {name: str_(UIStrings.internalError)},
'IgnoreEventAndEvict': {name: str_(UIStrings.internalError)},
'BrowsingInstanceNotSwapped': {name: str_(UIStrings.internalError)},
'ActivationNavigationsDisallowedForBug1234857': {name: str_(UIStrings.internalError)},
'Unknown': {name: str_(UIStrings.internalError)},
'RenderFrameHostReused_SameSite': {name: str_(UIStrings.internalError)},
'RenderFrameHostReused_CrossSite': {name: str_(UIStrings.internalError)},
'WebSocket': {name: str_(UIStrings.webSocket)},
'WebTransport': {name: str_(UIStrings.webTransport)},
'WebRTC': {name: str_(UIStrings.webRTC)},
'MainResourceHasCacheControlNoStore': {name: str_(UIStrings.mainResourceHasCacheControlNoStore)},
'MainResourceHasCacheControlNoCache': {name: str_(UIStrings.mainResourceHasCacheControlNoCache)},
'SubresourceHasCacheControlNoStore': {name: str_(UIStrings.subresourceHasCacheControlNoStore)},
'SubresourceHasCacheControlNoCache': {name: str_(UIStrings.subresourceHasCacheControlNoCache)},
'ContainsPlugins': {name: str_(UIStrings.containsPlugins)},
'DocumentLoaded': {name: str_(UIStrings.documentLoaded)},
'DedicatedWorkerOrWorklet': {name: str_(UIStrings.dedicatedWorkerOrWorklet)},
'OutstandingNetworkRequestOthers': {name: str_(UIStrings.outstandingNetworkRequestOthers)},
'OutstandingIndexedDBTransaction': {name: str_(UIStrings.outstandingIndexedDBTransaction)},
'RequestedNotificationsPermission': {name: str_(UIStrings.requestedNotificationsPermission)},
'RequestedMIDIPermission': {name: str_(UIStrings.requestedMIDIPermission)},
'RequestedAudioCapturePermission': {name: str_(UIStrings.requestedAudioCapturePermission)},
'RequestedVideoCapturePermission': {name: str_(UIStrings.requestedVideoCapturePermission)},
'RequestedBackForwardCacheBlockedSensors': {name: str_(UIStrings.requestedBackForwardCacheBlockedSensors)},
'RequestedBackgroundWorkPermission': {name: str_(UIStrings.requestedBackgroundWorkPermission)},
'BroadcastChannel': {name: str_(UIStrings.broadcastChannel)},
'IndexedDBConnection': {name: str_(UIStrings.indexedDBConnection)},
'WebXR': {name: str_(UIStrings.webXR)},
'SharedWorker': {name: str_(UIStrings.sharedWorker)},
'WebLocks': {name: str_(UIStrings.webLocks)},
'WebHID': {name: str_(UIStrings.webHID)},
'WebShare': {name: str_(UIStrings.webShare)},
'RequestedStorageAccessGrant': {name: str_(UIStrings.requestedStorageAccessGrant)},
'WebNfc': {name: str_(UIStrings.webNfc)},
'OutstandingNetworkRequestFetch': {name: str_(UIStrings.outstandingNetworkRequestFetch)},
'OutstandingNetworkRequestXHR': {name: str_(UIStrings.outstandingNetworkRequestXHR)},
'AppBanner': {name: str_(UIStrings.appBanner)},
'Printing': {name: str_(UIStrings.printing)},
'WebDatabase': {name: str_(UIStrings.webDatabase)},
'PictureInPicture': {name: str_(UIStrings.pictureInPicture)},
'Portal': {name: str_(UIStrings.portal)},
'SpeechRecognizer': {name: str_(UIStrings.speechRecognizer)},
'IdleManager': {name: str_(UIStrings.idleManager)},
'PaymentManager': {name: str_(UIStrings.paymentManager)},
'SpeechSynthesis': {name: str_(UIStrings.speechSynthesis)},
'KeyboardLock': {name: str_(UIStrings.keyboardLock)},
'WebOTPService': {name: str_(UIStrings.webOTPService)},
'OutstandingNetworkRequestDirectSocket': {name: str_(UIStrings.outstandingNetworkRequestDirectSocket)},
'InjectedJavascript': {name: str_(UIStrings.injectedJavascript)},
'InjectedStyleSheet': {name: str_(UIStrings.injectedStyleSheet)},
'Dummy': {name: str_(UIStrings.internalError)},
'ContentSecurityHandler': {name: str_(UIStrings.contentSecurityHandler)},
'ContentWebAuthenticationAPI': {name: str_(UIStrings.contentWebAuthenticationAPI)},
'ContentFileChooser': {name: str_(UIStrings.contentFileChooser)},
'ContentSerial': {name: str_(UIStrings.contentSerial)},
'ContentFileSystemAccess': {name: str_(UIStrings.contentFileSystemAccess)},
'ContentMediaDevicesDispatcherHost': {name: str_(UIStrings.contentMediaDevicesDispatcherHost)},
'ContentWebBluetooth': {name: str_(UIStrings.contentWebBluetooth)},
'ContentWebUSB': {name: str_(UIStrings.contentWebUSB)},
'ContentMediaSession': {name: str_(UIStrings.contentMediaSession)},
'ContentMediaSessionService': {name: str_(UIStrings.contentMediaSessionService)},
'ContentMediaPlay': {name: str_(UIStrings.contentMediaPlay)},
'ContentScreenReader': {name: str_(UIStrings.contentScreenReader)},
'EmbedderPopupBlockerTabHelper': {name: str_(UIStrings.embedderPopupBlockerTabHelper)},
'EmbedderSafeBrowsingTriggeredPopupBlocker':
{name: str_(UIStrings.embedderSafeBrowsingTriggeredPopupBlocker)},
'EmbedderSafeBrowsingThreatDetails': {name: str_(UIStrings.embedderSafeBrowsingThreatDetails)},
'EmbedderAppBannerManager': {name: str_(UIStrings.embedderAppBannerManager)},
'EmbedderDomDistillerViewerSource': {name: str_(UIStrings.embedderDomDistillerViewerSource)},
'EmbedderDomDistillerSelfDeletingRequestDelegate':
{name: str_(UIStrings.embedderDomDistillerSelfDeletingRequestDelegate)},
'EmbedderOomInterventionTabHelper': {name: str_(UIStrings.embedderOomInterventionTabHelper)},
'EmbedderOfflinePage': {name: str_(UIStrings.embedderOfflinePage)},
'EmbedderChromePasswordManagerClientBindCredentialManager':
{name: str_(UIStrings.embedderChromePasswordManagerClientBindCredentialManager)},
'EmbedderPermissionRequestManager': {name: str_(UIStrings.embedderPermissionRequestManager)},
'EmbedderModalDialog': {name: str_(UIStrings.embedderModalDialog)},
'EmbedderExtensions': {name: str_(UIStrings.embedderExtensions)},
'EmbedderExtensionMessaging': {name: str_(UIStrings.embedderExtensionMessaging)},
'EmbedderExtensionMessagingForOpenPort': {name: str_(UIStrings.embedderExtensionMessagingForOpenPort)},
'EmbedderExtensionSentMessageToCachedFrame':
{name: str_(UIStrings.embedderExtensionSentMessageToCachedFrame)},
'ErrorDocument': {name: str_(UIStrings.errorDocument)},
'FencedFramesEmbedder': {name: str_(UIStrings.fencedFramesEmbedder)},
'KeepaliveRequest': {name: str_(UIStrings.keepaliveRequest)},
'AuthorizationHeader': {name: str_(UIStrings.authorizationHeader)},
'IndexedDBEvent': {name: str_(UIStrings.indexedDBEvent)},
'CookieDisabled': {name: str_(UIStrings.cookieDisabled)},
};
export {
NotRestoredReasonDescription,
UIStrings,
};

3
node_modules/lighthouse/core/lib/cdt/Common.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export { ParsedURL };
import ParsedURL = require("./generated/ParsedURL.js");
//# sourceMappingURL=Common.d.ts.map

12
node_modules/lighthouse/core/lib/cdt/Common.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
// @ts-nocheck
/**
* @license Copyright 2022 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
const ParsedURL = require('./generated/ParsedURL.js');
module.exports = {
ParsedURL,
};

26
node_modules/lighthouse/core/lib/cdt/Platform.d.ts generated vendored Normal file
View File

@@ -0,0 +1,26 @@
/**
* @license Copyright 2021 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {any[]} array
* @param {any} needle
* @param {any} comparator
*/
declare function lowerBound(array: any[], needle: any, comparator: any, left: any, right: any): any;
/**
* @param {any[]} array
* @param {any} needle
* @param {any} comparator
*/
declare function upperBound(array: any[], needle: any, comparator: any, left: any, right: any): any;
export namespace ArrayUtilities {
export { lowerBound };
export { upperBound };
}
export namespace DevToolsPath {
const EmptyUrlString: string;
}
export {};
//# sourceMappingURL=Platform.d.ts.map

57
node_modules/lighthouse/core/lib/cdt/Platform.js generated vendored Normal file
View File

@@ -0,0 +1,57 @@
// @ts-nocheck
/**
* @license Copyright 2021 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
// Functions manually copied from:
// https://github.com/ChromeDevTools/devtools-frontend/blob/main/front_end/core/platform/array-utilities.ts#L125
/**
* @param {any[]} array
* @param {any} needle
* @param {any} comparator
*/
function lowerBound(array, needle, comparator, left, right) {
let l = left || 0;
let r = right !== undefined ? right : array.length;
while (l < r) {
const m = (l + r) >> 1;
if (comparator(needle, array[m]) > 0) {
l = m + 1;
} else {
r = m;
}
}
return r;
}
/**
* @param {any[]} array
* @param {any} needle
* @param {any} comparator
*/
function upperBound(array, needle, comparator, left, right) {
let l = left || 0;
let r = right !== undefined ? right : array.length;
while (l < r) {
const m = (l + r) >> 1;
if (comparator(needle, array[m]) >= 0) {
l = m + 1;
} else {
r = m;
}
}
return r;
}
module.exports = {
ArrayUtilities: {
lowerBound,
upperBound,
},
DevToolsPath: {
EmptyUrlString: '',
},
};

2
node_modules/lighthouse/core/lib/cdt/SDK.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export const SourceMap: typeof import("./generated/SourceMap.js");
//# sourceMappingURL=SDK.d.ts.map

28
node_modules/lighthouse/core/lib/cdt/SDK.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
const SDK = {
SourceMap: require('./generated/SourceMap.js'),
};
// Add `lastColumnNumber` to mappings. This will eventually be added to CDT.
// @ts-expect-error
SDK.SourceMap.prototype.computeLastGeneratedColumns = function() {
const mappings = this.mappings();
if (mappings.length && mappings[0].lastColumnNumber !== undefined) return;
for (let i = 0; i < mappings.length - 1; i++) {
const mapping = mappings[i];
const nextMapping = mappings[i + 1];
if (mapping.lineNumber === nextMapping.lineNumber) {
mapping.lastColumnNumber = nextMapping.columnNumber;
}
}
// Now, all but the last mapping on each line will have 'lastColumnNumber' set to a number.
};
module.exports = SDK;

View File

@@ -0,0 +1,28 @@
export const __esModule: boolean;
/**
* http://tools.ietf.org/html/rfc3986#section-5.2.4
*/
export function normalizePath(path: any): any;
export class ParsedURL {
static concatenate(devToolsPath: any, ...appendage: any[]): any;
static beginsWithWindowsDriveLetter(url: any): boolean;
static beginsWithScheme(url: any): boolean;
static isRelativeURL(url: any): boolean;
static urlRegexInstance: null;
constructor(url: any);
isValid: boolean;
url: any;
scheme: any;
user: any;
host: any;
port: any;
path: any;
queryParams: any;
fragment: any;
folderPathComponents: any;
lastPathComponent: any;
blobInnerScheme: any;
get displayName(): any;
#private;
}
//# sourceMappingURL=ParsedURL.d.ts.map

View File

@@ -0,0 +1,178 @@
// @ts-nocheck
// generated by yarn build-cdt-lib
/* eslint-disable */
"use strict";
/*
* Copyright (C) 2012 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.ParsedURL = exports.normalizePath = void 0;
;
/**
* http://tools.ietf.org/html/rfc3986#section-5.2.4
*/
function normalizePath(path) {
if (path.indexOf('..') === -1 && path.indexOf('.') === -1) {
return path;
}
// Remove leading slash (will be added back below) so we
// can handle all (including empty) segments consistently.
const segments = (path[0] === '/' ? path.substring(1) : path).split('/');
const normalizedSegments = [];
for (const segment of segments) {
if (segment === '.') {
continue;
}
else if (segment === '..') {
normalizedSegments.pop();
}
else {
normalizedSegments.push(segment);
}
}
let normalizedPath = normalizedSegments.join('/');
if (path[0] === '/' && normalizedPath) {
normalizedPath = '/' + normalizedPath;
}
if (normalizedPath[normalizedPath.length - 1] !== '/' &&
((path[path.length - 1] === '/') || (segments[segments.length - 1] === '.') ||
(segments[segments.length - 1] === '..'))) {
normalizedPath = normalizedPath + '/';
}
return normalizedPath;
}
exports.normalizePath = normalizePath;
class ParsedURL {
isValid;
url;
scheme;
user;
host;
port;
path;
queryParams;
fragment;
folderPathComponents;
lastPathComponent;
blobInnerScheme;
#displayNameInternal;
#dataURLDisplayNameInternal;
constructor(url) {
this.isValid = false;
this.url = url;
this.scheme = '';
this.user = '';
this.host = '';
this.port = '';
this.path = '';
this.queryParams = '';
this.fragment = '';
this.folderPathComponents = '';
this.lastPathComponent = '';
const isBlobUrl = this.url.startsWith('blob:');
const urlToMatch = isBlobUrl ? url.substring(5) : url;
const match = urlToMatch.match(ParsedURL.urlRegex());
if (match) {
this.isValid = true;
if (isBlobUrl) {
this.blobInnerScheme = match[2].toLowerCase();
this.scheme = 'blob';
}
else {
this.scheme = match[2].toLowerCase();
}
this.user = match[3] ?? '';
this.host = match[4] ?? '';
this.port = match[5] ?? '';
this.path = match[6] ?? '/';
this.queryParams = match[7] ?? '';
this.fragment = match[8] ?? '';
}
else {
if (this.url.startsWith('data:')) {
this.scheme = 'data';
return;
}
if (this.url.startsWith('blob:')) {
this.scheme = 'blob';
return;
}
if (this.url === 'about:blank') {
this.scheme = 'about';
return;
}
this.path = this.url;
}
const lastSlashIndex = this.path.lastIndexOf('/');
if (lastSlashIndex !== -1) {
this.folderPathComponents = this.path.substring(0, lastSlashIndex);
this.lastPathComponent = this.path.substring(lastSlashIndex + 1);
}
else {
this.lastPathComponent = this.path;
}
}
static concatenate(devToolsPath, ...appendage) {
return devToolsPath.concat(...appendage);
}
static beginsWithWindowsDriveLetter(url) {
return /^[A-Za-z]:/.test(url);
}
static beginsWithScheme(url) {
return /^[A-Za-z][A-Za-z0-9+.-]*:/.test(url);
}
static isRelativeURL(url) {
return !this.beginsWithScheme(url) || this.beginsWithWindowsDriveLetter(url);
}
get displayName() {
if (this.#displayNameInternal) {
return this.#displayNameInternal;
}
if (this.isDataURL()) {
return this.dataURLDisplayName();
}
if (this.isBlobURL()) {
return this.url;
}
if (this.isAboutBlank()) {
return this.url;
}
this.#displayNameInternal = this.lastPathComponent;
if (!this.#displayNameInternal) {
this.#displayNameInternal = (this.host || '') + '/';
}
if (this.#displayNameInternal === '/') {
this.#displayNameInternal = this.url;
}
return this.#displayNameInternal;
}
static urlRegexInstance = null;
}
exports.ParsedURL = ParsedURL;

View File

@@ -0,0 +1,87 @@
export = SourceMap;
declare class SourceMap {
/**
* Implements Source Map V3 model. See https://github.com/google/closure-compiler/wiki/Source-Maps
* for format description.
*/
constructor(compiledURL: any, sourceMappingURL: any, payload: any);
compiledURL(): any;
url(): any;
sourceURLs(): any[];
embeddedContentByURL(sourceURL: any): any;
findEntry(lineNumber: any, columnNumber: any): {
lineNumber: number;
columnNumber: number;
sourceURL?: string | undefined;
sourceLineNumber: number;
sourceColumnNumber: number;
name?: string | undefined;
lastColumnNumber?: number | undefined;
} | null;
findEntryRanges(lineNumber: any, columnNumber: any): {
range: any;
sourceRange: any;
sourceURL: string;
} | null;
sourceLineMapping(sourceURL: any, lineNumber: any, columnNumber: any): {
lineNumber: number;
columnNumber: number;
sourceURL?: string | undefined;
sourceLineNumber: number;
sourceColumnNumber: number;
name?: string | undefined;
lastColumnNumber?: number | undefined;
} | null;
findReverseIndices(sourceURL: any, lineNumber: any, columnNumber: any): any;
findReverseEntries(sourceURL: any, lineNumber: any, columnNumber: any): any;
findReverseRanges(sourceURL: any, lineNumber: any, columnNumber: any): any[];
/** @return {Array<{lineNumber: number, columnNumber: number, sourceURL?: string, sourceLineNumber: number, sourceColumnNumber: number, name?: string, lastColumnNumber?: number}>} */
mappings(): {
lineNumber: number;
columnNumber: number;
sourceURL?: string | undefined;
sourceLineNumber: number;
sourceColumnNumber: number;
name?: string | undefined;
lastColumnNumber?: number | undefined;
}[];
reversedMappings(sourceURL: any): any;
eachSection(callback: any): void;
parseSources(sourceMap: any): void;
parseMap(map: any, lineNumber: any, columnNumber: any): void;
isSeparator(char: any): boolean;
decodeVLQ(stringCharIterator: any): number;
mapsOrigin(): boolean;
hasIgnoreListHint(sourceURL: any): any;
/**
* Returns a list of ranges in the generated script for original sources that
* match a predicate. Each range is a [begin, end) pair, meaning that code at
* the beginning location, up to but not including the end location, matches
* the predicate.
*/
findRanges(predicate: any, options: any): any[];
#private;
}
declare namespace SourceMap {
export { parseSourceMap, __esModule, SourceMapEntry, SourceMap };
}
/**
* Parses the {@link content} as JSON, ignoring BOM markers in the beginning, and
* also handling the CORB bypass prefix correctly.
*
* @param content the string representation of a sourcemap.
* @returns the {@link SourceMapV3} representation of the {@link content}.
*/
declare function parseSourceMap(content: any): any;
declare const __esModule: boolean;
declare class SourceMapEntry {
static compare(entry1: any, entry2: any): number;
constructor(lineNumber: any, columnNumber: any, sourceURL: any, sourceLineNumber: any, sourceColumnNumber: any, name: any);
lineNumber: any;
columnNumber: any;
sourceURL: any;
sourceLineNumber: any;
sourceColumnNumber: any;
name: any;
}
//# sourceMappingURL=SourceMap.d.ts.map

View File

@@ -0,0 +1,477 @@
// @ts-nocheck
// generated by yarn build-cdt-lib
/* eslint-disable */
"use strict";
const Common = require('../Common.js');
const Platform = require('../Platform.js');
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
Object.defineProperty(exports, "__esModule", { value: true });
exports.SourceMap = exports.SourceMapEntry = exports.parseSourceMap = void 0;
/*
* Copyright (C) 2012 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the #name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
;
;
;
/**
* Parses the {@link content} as JSON, ignoring BOM markers in the beginning, and
* also handling the CORB bypass prefix correctly.
*
* @param content the string representation of a sourcemap.
* @returns the {@link SourceMapV3} representation of the {@link content}.
*/
function parseSourceMap(content) {
if (content.startsWith(')]}')) {
content = content.substring(content.indexOf('\n'));
}
if (content.charCodeAt(0) === 0xFEFF) {
// Strip BOM at the beginning before parsing the JSON.
content = content.slice(1);
}
return JSON.parse(content);
}
exports.parseSourceMap = parseSourceMap;
class SourceMapEntry {
lineNumber;
columnNumber;
sourceURL;
sourceLineNumber;
sourceColumnNumber;
name;
constructor(lineNumber, columnNumber, sourceURL, sourceLineNumber, sourceColumnNumber, name) {
this.lineNumber = lineNumber;
this.columnNumber = columnNumber;
this.sourceURL = sourceURL;
this.sourceLineNumber = sourceLineNumber;
this.sourceColumnNumber = sourceColumnNumber;
this.name = name;
}
static compare(entry1, entry2) {
if (entry1.lineNumber !== entry2.lineNumber) {
return entry1.lineNumber - entry2.lineNumber;
}
return entry1.columnNumber - entry2.columnNumber;
}
}
exports.SourceMapEntry = SourceMapEntry;
const base64Digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const base64Map = new Map();
for (let i = 0; i < base64Digits.length; ++i) {
base64Map.set(base64Digits.charAt(i), i);
}
const sourceMapToSourceList = new WeakMap();
class SourceMap {
#json;
#compiledURLInternal;
#sourceMappingURL;
#baseURL;
#mappingsInternal;
#sourceInfos;
/**
* Implements Source Map V3 model. See https://github.com/google/closure-compiler/wiki/Source-Maps
* for format description.
*/
constructor(compiledURL, sourceMappingURL, payload) {
this.#json = payload;
this.#compiledURLInternal = compiledURL;
this.#sourceMappingURL = sourceMappingURL;
this.#baseURL = (sourceMappingURL.startsWith('data:') ? compiledURL : sourceMappingURL);
this.#mappingsInternal = null;
this.#sourceInfos = new Map();
if ('sections' in this.#json) {
if (this.#json.sections.find(section => 'url' in section)) {
console.warn(`SourceMap "${sourceMappingURL}" contains unsupported "URL" field in one of its sections.`);
}
}
this.eachSection(this.parseSources.bind(this));
}
compiledURL() {
return this.#compiledURLInternal;
}
url() {
return this.#sourceMappingURL;
}
sourceURLs() {
return [...this.#sourceInfos.keys()];
}
embeddedContentByURL(sourceURL) {
const entry = this.#sourceInfos.get(sourceURL);
if (!entry) {
return null;
}
return entry.content;
}
findEntry(lineNumber, columnNumber) {
const mappings = this.mappings();
const index = Platform.ArrayUtilities.upperBound(mappings, undefined, (unused, entry) => lineNumber - entry.lineNumber || columnNumber - entry.columnNumber);
return index ? mappings[index - 1] : null;
}
findEntryRanges(lineNumber, columnNumber) {
const mappings = this.mappings();
const endIndex = Platform.ArrayUtilities.upperBound(mappings, undefined, (unused, entry) => lineNumber - entry.lineNumber || columnNumber - entry.columnNumber);
if (!endIndex) {
// If the line and column are preceding all the entries, then there is nothing to map.
return null;
}
// startIndex must be within mappings range because endIndex must be not falsy
const startIndex = endIndex - 1;
const sourceURL = mappings[startIndex].sourceURL;
if (!sourceURL) {
return null;
}
// Let us compute the range that contains the source position in the compiled code.
const endLine = endIndex < mappings.length ? mappings[endIndex].lineNumber : 2 ** 31 - 1;
const endColumn = endIndex < mappings.length ? mappings[endIndex].columnNumber : 2 ** 31 - 1;
const range = new TextUtils.TextRange.TextRange(mappings[startIndex].lineNumber, mappings[startIndex].columnNumber, endLine, endColumn);
// Now try to find the corresponding token in the original code.
const reverseMappings = this.reversedMappings(sourceURL);
const startSourceLine = mappings[startIndex].sourceLineNumber;
const startSourceColumn = mappings[startIndex].sourceColumnNumber;
const endReverseIndex = Platform.ArrayUtilities.upperBound(reverseMappings, undefined, (unused, i) => startSourceLine - mappings[i].sourceLineNumber || startSourceColumn - mappings[i].sourceColumnNumber);
if (!endReverseIndex) {
return null;
}
const endSourceLine = endReverseIndex < reverseMappings.length ?
mappings[reverseMappings[endReverseIndex]].sourceLineNumber :
2 ** 31 - 1;
const endSourceColumn = endReverseIndex < reverseMappings.length ?
mappings[reverseMappings[endReverseIndex]].sourceColumnNumber :
2 ** 31 - 1;
const sourceRange = new TextUtils.TextRange.TextRange(startSourceLine, startSourceColumn, endSourceLine, endSourceColumn);
return { range, sourceRange, sourceURL };
}
sourceLineMapping(sourceURL, lineNumber, columnNumber) {
const mappings = this.mappings();
const reverseMappings = this.reversedMappings(sourceURL);
const first = Platform.ArrayUtilities.lowerBound(reverseMappings, lineNumber, lineComparator);
const last = Platform.ArrayUtilities.upperBound(reverseMappings, lineNumber, lineComparator);
if (first >= reverseMappings.length || mappings[reverseMappings[first]].sourceLineNumber !== lineNumber) {
return null;
}
const columnMappings = reverseMappings.slice(first, last);
if (!columnMappings.length) {
return null;
}
const index = Platform.ArrayUtilities.lowerBound(columnMappings, columnNumber, (columnNumber, i) => columnNumber - mappings[i].sourceColumnNumber);
return index >= columnMappings.length ? mappings[columnMappings[columnMappings.length - 1]] :
mappings[columnMappings[index]];
function lineComparator(lineNumber, i) {
return lineNumber - mappings[i].sourceLineNumber;
}
}
findReverseIndices(sourceURL, lineNumber, columnNumber) {
const mappings = this.mappings();
const reverseMappings = this.reversedMappings(sourceURL);
const endIndex = Platform.ArrayUtilities.upperBound(reverseMappings, undefined, (unused, i) => lineNumber - mappings[i].sourceLineNumber || columnNumber - mappings[i].sourceColumnNumber);
let startIndex = endIndex;
while (startIndex > 0 &&
mappings[reverseMappings[startIndex - 1]].sourceLineNumber ===
mappings[reverseMappings[endIndex - 1]].sourceLineNumber &&
mappings[reverseMappings[startIndex - 1]].sourceColumnNumber ===
mappings[reverseMappings[endIndex - 1]].sourceColumnNumber) {
--startIndex;
}
return reverseMappings.slice(startIndex, endIndex);
}
findReverseEntries(sourceURL, lineNumber, columnNumber) {
const mappings = this.mappings();
return this.findReverseIndices(sourceURL, lineNumber, columnNumber).map(i => mappings[i]);
}
findReverseRanges(sourceURL, lineNumber, columnNumber) {
const mappings = this.mappings();
const indices = this.findReverseIndices(sourceURL, lineNumber, columnNumber);
const ranges = [];
for (let i = 0; i < indices.length; ++i) {
const startIndex = indices[i];
// Merge adjacent ranges.
let endIndex = startIndex + 1;
while (i + 1 < indices.length && endIndex === indices[i + 1]) {
++endIndex;
++i;
}
// Source maps don't contain end positions for entries, but each entry is assumed to
// span until the following entry. This doesn't work however in case of the last
// entry, where there's no following entry. We also don't know the number of lines
// and columns in the original source code (which might not be available at all), so
// for that case we store the maximum signed 32-bit integer, which is definitely going
// to be larger than any script we can process and can safely be serialized as part of
// the skip list we send to V8 with `Debugger.stepOver` (http://crbug.com/1305956).
const startLine = mappings[startIndex].lineNumber;
const startColumn = mappings[startIndex].columnNumber;
const endLine = endIndex < mappings.length ? mappings[endIndex].lineNumber : 2 ** 31 - 1;
const endColumn = endIndex < mappings.length ? mappings[endIndex].columnNumber : 2 ** 31 - 1;
ranges.push(new TextUtils.TextRange.TextRange(startLine, startColumn, endLine, endColumn));
}
return ranges;
}
/** @return {Array<{lineNumber: number, columnNumber: number, sourceURL?: string, sourceLineNumber: number, sourceColumnNumber: number, name?: string, lastColumnNumber?: number}>} */
mappings() {
this.#ensureMappingsProcessed();
return this.#mappingsInternal ?? [];
}
reversedMappings(sourceURL) {
this.#ensureMappingsProcessed();
return this.#sourceInfos.get(sourceURL)?.reverseMappings ?? [];
}
#ensureMappingsProcessed() {
if (this.#mappingsInternal === null) {
this.#mappingsInternal = [];
this.eachSection(this.parseMap.bind(this));
// As per spec, mappings are not necessarily sorted.
this.mappings().sort(SourceMapEntry.compare);
this.#computeReverseMappings(this.#mappingsInternal);
this.#json = null;
}
}
#computeReverseMappings(mappings) {
const reverseMappingsPerUrl = new Map();
for (let i = 0; i < mappings.length; i++) {
const entryUrl = mappings[i].sourceURL;
if (!entryUrl) {
continue;
}
let reverseMap = reverseMappingsPerUrl.get(entryUrl);
if (!reverseMap) {
reverseMap = [];
reverseMappingsPerUrl.set(entryUrl, reverseMap);
}
reverseMap.push(i);
}
for (const [url, reverseMap] of reverseMappingsPerUrl.entries()) {
const info = this.#sourceInfos.get(url);
if (!info) {
continue;
}
reverseMap.sort(sourceMappingComparator);
info.reverseMappings = reverseMap;
}
function sourceMappingComparator(indexA, indexB) {
const a = mappings[indexA];
const b = mappings[indexB];
return a.sourceLineNumber - b.sourceLineNumber || a.sourceColumnNumber - b.sourceColumnNumber ||
a.lineNumber - b.lineNumber || a.columnNumber - b.columnNumber;
}
}
eachSection(callback) {
if (!this.#json) {
return;
}
if ('sections' in this.#json) {
for (const section of this.#json.sections) {
if ('map' in section) {
callback(section.map, section.offset.line, section.offset.column);
}
}
}
else {
callback(this.#json, 0, 0);
}
}
parseSources(sourceMap) {
const sourcesList = [];
const sourceRoot = sourceMap.sourceRoot ?? '';
const ignoreList = new Set(sourceMap.x_google_ignoreList);
for (let i = 0; i < sourceMap.sources.length; ++i) {
let href = sourceMap.sources[i];
// The source map v3 proposal says to prepend the sourceRoot to the source URL
// and if the resulting URL is not absolute, then resolve the source URL against
// the source map URL. Prepending the sourceRoot (if one exists) is not likely to
// be meaningful or useful if the source URL is already absolute though. In this
// case, use the source URL as is without prepending the sourceRoot.
if (Common.ParsedURL.ParsedURL.isRelativeURL(href)) {
if (sourceRoot && !sourceRoot.endsWith('/') && href && !href.startsWith('/')) {
href = sourceRoot.concat('/', href);
}
else {
href = sourceRoot.concat(href);
}
}
const url = '' || href;
const source = sourceMap.sourcesContent && sourceMap.sourcesContent[i];
sourcesList.push(url);
if (!this.#sourceInfos.has(url)) {
const content = source ?? null;
const ignoreListHint = ignoreList.has(i);
this.#sourceInfos.set(url, { content, ignoreListHint, reverseMappings: null });
}
}
sourceMapToSourceList.set(sourceMap, sourcesList);
}
parseMap(map, lineNumber, columnNumber) {
let sourceIndex = 0;
let sourceLineNumber = 0;
let sourceColumnNumber = 0;
let nameIndex = 0;
// TODO(crbug.com/1011811): refactor away map.
// `sources` can be undefined if it wasn't previously
// processed and added to the list. However, that
// is not WAI and we should make sure that we can
// only reach this point when we are certain
// we have the list available.
const sources = sourceMapToSourceList.get(map);
const names = map.names ?? [];
const stringCharIterator = new SourceMap.StringCharIterator(map.mappings);
let sourceURL = sources && sources[sourceIndex];
while (true) {
if (stringCharIterator.peek() === ',') {
stringCharIterator.next();
}
else {
while (stringCharIterator.peek() === ';') {
lineNumber += 1;
columnNumber = 0;
stringCharIterator.next();
}
if (!stringCharIterator.hasNext()) {
break;
}
}
columnNumber += this.decodeVLQ(stringCharIterator);
if (!stringCharIterator.hasNext() || this.isSeparator(stringCharIterator.peek())) {
this.mappings().push(new SourceMapEntry(lineNumber, columnNumber));
continue;
}
const sourceIndexDelta = this.decodeVLQ(stringCharIterator);
if (sourceIndexDelta) {
sourceIndex += sourceIndexDelta;
if (sources) {
sourceURL = sources[sourceIndex];
}
}
sourceLineNumber += this.decodeVLQ(stringCharIterator);
sourceColumnNumber += this.decodeVLQ(stringCharIterator);
if (!stringCharIterator.hasNext() || this.isSeparator(stringCharIterator.peek())) {
this.mappings().push(new SourceMapEntry(lineNumber, columnNumber, sourceURL, sourceLineNumber, sourceColumnNumber));
continue;
}
nameIndex += this.decodeVLQ(stringCharIterator);
this.mappings().push(new SourceMapEntry(lineNumber, columnNumber, sourceURL, sourceLineNumber, sourceColumnNumber, names[nameIndex]));
}
}
isSeparator(char) {
return char === ',' || char === ';';
}
decodeVLQ(stringCharIterator) {
// Read unsigned value.
let result = 0;
let shift = 0;
let digit = SourceMap._VLQ_CONTINUATION_MASK;
while (digit & SourceMap._VLQ_CONTINUATION_MASK) {
digit = base64Map.get(stringCharIterator.next()) || 0;
result += (digit & SourceMap._VLQ_BASE_MASK) << shift;
shift += SourceMap._VLQ_BASE_SHIFT;
}
// Fix the sign.
const negative = result & 1;
result >>= 1;
return negative ? -result : result;
}
mapsOrigin() {
const mappings = this.mappings();
if (mappings.length > 0) {
const firstEntry = mappings[0];
return firstEntry?.lineNumber === 0 || firstEntry.columnNumber === 0;
}
return false;
}
hasIgnoreListHint(sourceURL) {
return this.#sourceInfos.get(sourceURL)?.ignoreListHint ?? false;
}
/**
* Returns a list of ranges in the generated script for original sources that
* match a predicate. Each range is a [begin, end) pair, meaning that code at
* the beginning location, up to but not including the end location, matches
* the predicate.
*/
findRanges(predicate, options) {
const mappings = this.mappings();
const ranges = [];
if (!mappings.length) {
return [];
}
let current = null;
// If the first mapping isn't at the beginning of the original source, it's
// up to the caller to decide if it should be considered matching the
// predicate or not. By default, it's not.
if ((mappings[0].lineNumber !== 0 || mappings[0].columnNumber !== 0) && options?.isStartMatching) {
current = TextUtils.TextRange.TextRange.createUnboundedFromLocation(0, 0);
ranges.push(current);
}
for (const { sourceURL, lineNumber, columnNumber } of mappings) {
const ignoreListHint = sourceURL && predicate(sourceURL);
if (!current && ignoreListHint) {
current = TextUtils.TextRange.TextRange.createUnboundedFromLocation(lineNumber, columnNumber);
ranges.push(current);
continue;
}
if (current && !ignoreListHint) {
current.endLine = lineNumber;
current.endColumn = columnNumber;
current = null;
}
}
return ranges;
}
}
exports.SourceMap = SourceMap;
(function (SourceMap) {
// TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration
// eslint-disable-next-line @typescript-eslint/naming-convention
SourceMap._VLQ_BASE_SHIFT = 5;
// TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration
// eslint-disable-next-line @typescript-eslint/naming-convention
SourceMap._VLQ_BASE_MASK = (1 << 5) - 1;
// TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration
// eslint-disable-next-line @typescript-eslint/naming-convention
SourceMap._VLQ_CONTINUATION_MASK = 1 << 5;
class StringCharIterator {
string;
position;
constructor(string) {
this.string = string;
this.position = 0;
}
next() {
return this.string.charAt(this.position++);
}
peek() {
return this.string.charAt(this.position);
}
hasNext() {
return this.position < this.string.length;
}
}
SourceMap.StringCharIterator = StringCharIterator;
})(SourceMap = exports.SourceMap || (exports.SourceMap = {}));
module.exports = SourceMap;
SourceMap.parseSourceMap = parseSourceMap;

4
node_modules/lighthouse/core/lib/cdt/package.json generated vendored Normal file
View File

@@ -0,0 +1,4 @@
{
"type": "commonjs",
"//": "Temporary file until made esm"
}

41
node_modules/lighthouse/core/lib/csp-evaluator.d.ts generated vendored Normal file
View File

@@ -0,0 +1,41 @@
export type Finding = import('csp_evaluator/finding').Finding;
/**
* @param {Finding} finding
* @return {LH.IcuMessage|string}
*/
export function getTranslatedDescription(finding: Finding): LH.IcuMessage | string;
/**
* @param {string[]} rawCsps
* @return {{bypasses: Finding[], warnings: Finding[], syntax: Finding[][]}}
*/
export function evaluateRawCspsForXss(rawCsps: string[]): {
bypasses: Finding[];
warnings: Finding[];
syntax: Finding[][];
};
/**
* @param {string} rawCsp
*/
export function parseCsp(rawCsp: string): import("csp_evaluator/dist/csp.js").Csp;
export namespace UIStrings {
const missingBaseUri: string;
const missingScriptSrc: string;
const missingObjectSrc: string;
const strictDynamic: string;
const unsafeInline: string;
const unsafeInlineFallback: string;
const allowlistFallback: string;
const reportToOnly: string;
const reportingDestinationMissing: string;
const nonceLength: string;
const nonceCharset: string;
const missingSemicolon: string;
const unknownDirective: string;
const unknownKeyword: string;
const deprecatedReflectedXSS: string;
const deprecatedReferrer: string;
const deprecatedDisownOpener: string;
const plainWildcards: string;
const plainUrlScheme: string;
}
//# sourceMappingURL=csp-evaluator.d.ts.map

170
node_modules/lighthouse/core/lib/csp-evaluator.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
/**
* @license Copyright 2021 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {import('csp_evaluator/finding').Finding} Finding */
import {
evaluateForFailure, evaluateForSyntaxErrors, evaluateForWarnings,
} from 'csp_evaluator/dist/lighthouse/lighthouse_checks.js';
import {Type} from 'csp_evaluator/dist/finding.js';
import {CspParser} from 'csp_evaluator/dist/parser.js';
import {Directive} from 'csp_evaluator/dist/csp.js';
import log from 'lighthouse-logger';
import * as i18n from '../lib/i18n/i18n.js';
import {isIcuMessage} from '../../shared/localization/format.js';
const UIStrings = {
/** Message shown when a CSP does not have a base-uri directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "base-uri", "'none'", and "'self'" do not need to be translated. */
missingBaseUri: 'Missing base-uri allows injected <base> tags to set the base URL for all ' +
'relative URLs (e.g. scripts) to an attacker controlled domain. ' +
'Consider setting base-uri to \'none\' or \'self\'.',
/** Message shown when a CSP does not have a script-src directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "script-src" does not need to be translated. */
missingScriptSrc: 'script-src directive is missing. ' +
'This can allow the execution of unsafe scripts.',
/** Message shown when a CSP does not have a script-src directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "object-src" and "'none'" do not need to be translated. */
missingObjectSrc: 'Missing object-src allows the injection of plugins ' +
'that execute unsafe scripts. Consider setting object-src to \'none\' if you can.',
/** Message shown when a CSP uses a domain allowlist to filter out malicious scripts. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "CSP", "'strict-dynamic'", "nonces", and "hashes" do not need to be translated. "allowlists" can be interpreted as "whitelist". */
strictDynamic: 'Host allowlists can frequently be bypassed. Consider using ' +
'CSP nonces or hashes instead, along with \'strict-dynamic\' if necessary.',
/** Message shown when a CSP allows inline scripts to be run in the page. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "CSP", "'unsafe-inline'", "nonces", and "hashes" do not need to be translated. */
unsafeInline: '\'unsafe-inline\' allows the execution of unsafe in-page scripts ' +
'and event handlers. Consider using CSP nonces or hashes to allow scripts individually.',
/** Message shown when a CSP is not backwards compatible with browsers that do not support CSP nonces/hashes. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "'unsafe-inline'", "nonces", and "hashes" do not need to be translated. */
unsafeInlineFallback: 'Consider adding \'unsafe-inline\' (ignored by browsers supporting ' +
'nonces/hashes) to be backward compatible with older browsers.',
/** Message shown when a CSP is not backwards compatible with browsers that do not support the 'strict-dynamic' keyword. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "http:", "https:", and "'strict-dynamic'" do not need to be translated. */
allowlistFallback: 'Consider adding https: and http: URL schemes (ignored by browsers ' +
'supporting \'strict-dynamic\') to be backward compatible with older browsers.',
/** Message shown when a CSP only provides a reporting destination through the report-to directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "report-to", "report-uri", and "Chromium" do not need to be translated. */
reportToOnly: 'The reporting destination is only configured via the report-to directive. ' +
'This directive is only supported in Chromium-based browsers so it is ' +
'recommended to also use a report-uri directive.',
/** Message shown when a CSP does not provide a reporting destination. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "CSP" does not need to be translated. */
reportingDestinationMissing: 'No CSP configures a reporting destination. ' +
'This makes it difficult to maintain the CSP over time and monitor for any breakages.',
/** Message shown when a CSP nonce has less than 8 characters. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "Nonces" does not need to be translated. */
nonceLength: 'Nonces should be at least 8 characters long.',
/** Message shown when a CSP nonce does not use teh base64 charset. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "Nonces" and "base84" do not need to be translated. "charset" can be interpreted as "a set of characters". */
nonceCharset: 'Nonces should use the base64 charset.',
/**
* @description Message shown when a CSP is missing a semicolon. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy".
* @example {'object-src'} keyword
*/
missingSemicolon: 'Did you forget the semicolon? ' +
'{keyword} seems to be a directive, not a keyword.',
/** Message shown when a CSP contains an unknown keyword. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "CSP" does not need to be translated. */
unknownDirective: 'Unknown CSP directive.',
/**
* @description Message shown when a CSP contains an invalid keyword. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy".
* @example {'invalid-keyword'} keyword
*/
unknownKeyword: '{keyword} seems to be an invalid keyword.',
/** Message shown when a CSP uses the deprecated reflected-xss directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "reflected-xss", "CSP2" and "X-XSS-Protection" do not need to be translated. */
deprecatedReflectedXSS: 'reflected-xss is deprecated since CSP2. ' +
'Please, use the X-XSS-Protection header instead.',
/** Message shown when a CSP uses the deprecated referrer directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "referrer", "CSP2" and "Referrer-Policy" do not need to be translated. */
deprecatedReferrer: 'referrer is deprecated since CSP2. ' +
'Please, use the Referrer-Policy header instead.',
/** Message shown when a CSP uses the deprecated disown-opener directive. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy". "disown-opener", "CSP3" and "Cross-Origin-Opener-Policy" do not need to be translated. */
deprecatedDisownOpener: 'disown-opener is deprecated since CSP3. ' +
'Please, use the Cross-Origin-Opener-Policy header instead.',
/**
* @description Message shown when a CSP wildcard allows unsafe scripts to be run in the page. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy".
* @example {*} keyword
*/
plainWildcards: 'Avoid using plain wildcards ({keyword}) in this directive. ' +
'Plain wildcards allow scripts to be sourced from an unsafe domain.',
/**
* @description Message shown when a CSP URL scheme allows unsafe scripts to be run in the page. Shown in a table with a list of other CSP vulnerabilities and suggestions. "CSP" stands for "Content Security Policy".
* @example {https:} keyword
*/
plainUrlScheme: 'Avoid using plain URL schemes ({keyword}) in this directive. ' +
'Plain URL schemes allow scripts to be sourced from an unsafe domain.',
};
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
/** @type {Record<number, string|LH.IcuMessage|Record<string, LH.IcuMessage>>} */
const FINDING_TO_UI_STRING = {
[Type.MISSING_SEMICOLON]: UIStrings.missingSemicolon,
[Type.UNKNOWN_DIRECTIVE]: str_(UIStrings.unknownDirective),
[Type.INVALID_KEYWORD]: UIStrings.unknownKeyword,
[Type.MISSING_DIRECTIVES]: {
[Directive.BASE_URI]: str_(UIStrings.missingBaseUri),
[Directive.SCRIPT_SRC]: str_(UIStrings.missingScriptSrc),
[Directive.OBJECT_SRC]: str_(UIStrings.missingObjectSrc),
},
[Type.SCRIPT_UNSAFE_INLINE]: str_(UIStrings.unsafeInline),
[Type.PLAIN_WILDCARD]: UIStrings.plainWildcards,
[Type.PLAIN_URL_SCHEMES]: UIStrings.plainUrlScheme,
[Type.NONCE_LENGTH]: str_(UIStrings.nonceLength),
[Type.NONCE_CHARSET]: str_(UIStrings.nonceCharset),
[Type.DEPRECATED_DIRECTIVE]: {
[Directive.REFLECTED_XSS]: str_(UIStrings.deprecatedReflectedXSS),
[Directive.REFERRER]: str_(UIStrings.deprecatedReferrer),
[Directive.DISOWN_OPENER]: str_(UIStrings.deprecatedDisownOpener),
},
[Type.STRICT_DYNAMIC]: str_(UIStrings.strictDynamic),
[Type.UNSAFE_INLINE_FALLBACK]: str_(UIStrings.unsafeInlineFallback),
[Type.ALLOWLIST_FALLBACK]: str_(UIStrings.allowlistFallback),
[Type.REPORTING_DESTINATION_MISSING]: str_(UIStrings.reportingDestinationMissing),
[Type.REPORT_TO_ONLY]: str_(UIStrings.reportToOnly),
};
/**
* @param {Finding} finding
* @return {LH.IcuMessage|string}
*/
function getTranslatedDescription(finding) {
let result = FINDING_TO_UI_STRING[finding.type];
if (!result) {
log.warn('CSP Evaluator', `No translation found for description: ${finding.description}`);
return finding.description;
}
// Return if translated result found.
if (isIcuMessage(result)) return result;
// If result was not translated, that means `finding.value` is included in the UI string.
if (typeof result === 'string') return str_(result, {keyword: finding.value || ''});
// Result is a record object, UI string depends on the directive.
result = result[finding.directive];
if (!result) {
log.warn('CSP Evaluator', `No translation found for description: ${finding.description}`);
return finding.description;
}
return result;
}
/**
* @param {string} rawCsp
*/
function parseCsp(rawCsp) {
return new CspParser(rawCsp).csp;
}
/**
* @param {string[]} rawCsps
* @return {{bypasses: Finding[], warnings: Finding[], syntax: Finding[][]}}
*/
function evaluateRawCspsForXss(rawCsps) {
const parsedCsps = rawCsps.map(parseCsp);
const bypasses = evaluateForFailure(parsedCsps);
const warnings = evaluateForWarnings(parsedCsps);
const syntax = evaluateForSyntaxErrors(parsedCsps);
return {bypasses, warnings, syntax};
}
export {
getTranslatedDescription,
evaluateRawCspsForXss,
parseCsp,
UIStrings,
};

View File

@@ -0,0 +1,161 @@
/**
* A union of all types derived from BaseNode, allowing type check discrimination
* based on `node.type`. If a new node type is created, it should be added here.
*/
export type Node = import('./cpu-node.js').CPUNode | import('./network-node.js').NetworkNode;
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* A union of all types derived from BaseNode, allowing type check discrimination
* based on `node.type`. If a new node type is created, it should be added here.
* @typedef {import('./cpu-node.js').CPUNode | import('./network-node.js').NetworkNode} Node
*/
/**
* @fileoverview This class encapsulates logic for handling resources and tasks used to model the
* execution dependency graph of the page. A node has a unique identifier and can depend on other
* nodes/be depended on. The construction of the graph maintains some important invariants that are
* inherent to the model:
*
* 1. The graph is a DAG, there are no cycles.
* 2. There is always a root node upon which all other nodes eventually depend.
*
* This allows particular optimizations in this class so that we do no need to check for cycles as
* these methods are called and we can always start traversal at the root node.
*/
export class BaseNode {
/**
* Returns whether the given node has a cycle in its dependent graph by performing a DFS.
* @param {Node} node
* @param {'dependents'|'dependencies'|'both'} [direction]
* @return {boolean}
*/
static hasCycle(node: Node, direction?: "dependents" | "dependencies" | "both" | undefined): boolean;
/**
* @param {string} id
*/
constructor(id: string);
_id: string;
_isMainDocument: boolean;
/** @type {Node[]} */
_dependents: Node[];
/** @type {Node[]} */
_dependencies: Node[];
/**
* @return {string}
*/
get id(): string;
/**
* @return {typeof BaseNode.TYPES[keyof typeof BaseNode.TYPES]}
*/
get type(): "network" | "cpu";
/**
* In microseconds
* @return {number}
*/
get startTime(): number;
/**
* In microseconds
* @return {number}
*/
get endTime(): number;
/**
* @param {boolean} value
*/
setIsMainDocument(value: boolean): void;
/**
* @return {boolean}
*/
isMainDocument(): boolean;
/**
* @return {Node[]}
*/
getDependents(): Node[];
/**
* @return {number}
*/
getNumberOfDependents(): number;
/**
* @return {Node[]}
*/
getDependencies(): Node[];
/**
* @return {number}
*/
getNumberOfDependencies(): number;
/**
* @return {Node}
*/
getRootNode(): Node;
/**
* @param {Node} node
*/
addDependent(node: Node): void;
/**
* @param {Node} node
*/
addDependency(node: Node): void;
/**
* @param {Node} node
*/
removeDependent(node: Node): void;
/**
* @param {Node} node
*/
removeDependency(node: Node): void;
removeAllDependencies(): void;
/**
* Computes whether the given node is anywhere in the dependency graph of this node.
* While this method can prevent cycles, it walks the graph and should be used sparingly.
* Nodes are always considered dependent on themselves for the purposes of cycle detection.
* @param {BaseNode} node
* @return {boolean}
*/
isDependentOn(node: BaseNode): boolean;
/**
* Clones the node's information without adding any dependencies/dependents.
* @return {Node}
*/
cloneWithoutRelationships(): Node;
/**
* Clones the entire graph connected to this node filtered by the optional predicate. If a node is
* included by the predicate, all nodes along the paths between the node and the root will be included. If the
* node this was called on is not included in the resulting filtered graph, the method will throw.
* @param {function(Node):boolean} [predicate]
* @return {Node}
*/
cloneWithRelationships(predicate?: ((arg0: Node) => boolean) | undefined): Node;
/**
* Traverses all connected nodes in BFS order, calling `callback` exactly once
* on each. `traversalPath` is the shortest (though not necessarily unique)
* path from `node` to the root of the iteration.
*
* The `getNextNodes` function takes a visited node and returns which nodes to
* visit next. It defaults to returning the node's dependents.
* @param {(node: Node, traversalPath: Node[]) => void} callback
* @param {function(Node): Node[]} [getNextNodes]
*/
traverse(callback: (node: Node, traversalPath: Node[]) => void, getNextNodes?: ((arg0: Node) => Node[]) | undefined): void;
/**
* @see BaseNode.traverse
* @param {function(Node): Node[]} [getNextNodes]
*/
traverseGenerator(getNextNodes?: ((arg0: Node) => Node[]) | undefined): Generator<{
node: Node;
traversalPath: Node[];
}, void, unknown>;
/**
* @param {Node} node
* @return {boolean}
*/
canDependOn(node: Node): boolean;
}
export namespace BaseNode {
namespace TYPES {
const NETWORK: 'network';
const CPU: 'cpu';
}
}
//# sourceMappingURL=base-node.d.ts.map

View File

@@ -0,0 +1,364 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* A union of all types derived from BaseNode, allowing type check discrimination
* based on `node.type`. If a new node type is created, it should be added here.
* @typedef {import('./cpu-node.js').CPUNode | import('./network-node.js').NetworkNode} Node
*/
/**
* @fileoverview This class encapsulates logic for handling resources and tasks used to model the
* execution dependency graph of the page. A node has a unique identifier and can depend on other
* nodes/be depended on. The construction of the graph maintains some important invariants that are
* inherent to the model:
*
* 1. The graph is a DAG, there are no cycles.
* 2. There is always a root node upon which all other nodes eventually depend.
*
* This allows particular optimizations in this class so that we do no need to check for cycles as
* these methods are called and we can always start traversal at the root node.
*/
class BaseNode {
/**
* @param {string} id
*/
constructor(id) {
this._id = id;
this._isMainDocument = false;
/** @type {Node[]} */
this._dependents = [];
/** @type {Node[]} */
this._dependencies = [];
}
/**
* @return {string}
*/
get id() {
return this._id;
}
/**
* @return {typeof BaseNode.TYPES[keyof typeof BaseNode.TYPES]}
*/
get type() {
throw new Error('Unimplemented');
}
/**
* In microseconds
* @return {number}
*/
get startTime() {
throw new Error('Unimplemented');
}
/**
* In microseconds
* @return {number}
*/
get endTime() {
throw new Error('Unimplemented');
}
/**
* @param {boolean} value
*/
setIsMainDocument(value) {
this._isMainDocument = value;
}
/**
* @return {boolean}
*/
isMainDocument() {
return this._isMainDocument;
}
/**
* @return {Node[]}
*/
getDependents() {
return this._dependents.slice();
}
/**
* @return {number}
*/
getNumberOfDependents() {
return this._dependents.length;
}
/**
* @return {Node[]}
*/
getDependencies() {
return this._dependencies.slice();
}
/**
* @return {number}
*/
getNumberOfDependencies() {
return this._dependencies.length;
}
/**
* @return {Node}
*/
getRootNode() {
let rootNode = /** @type {Node} */ (/** @type {BaseNode} */ (this));
while (rootNode._dependencies.length) {
rootNode = rootNode._dependencies[0];
}
return rootNode;
}
/**
* @param {Node} node
*/
addDependent(node) {
node.addDependency(/** @type {Node} */ (/** @type {BaseNode} */ (this)));
}
/**
* @param {Node} node
*/
addDependency(node) {
// @ts-expect-error - in checkJs, ts doesn't know that CPUNode and NetworkNode *are* BaseNodes.
if (node === this) throw new Error('Cannot add dependency on itself');
if (this._dependencies.includes(node)) {
return;
}
node._dependents.push(/** @type {Node} */ (/** @type {BaseNode} */ (this)));
this._dependencies.push(node);
}
/**
* @param {Node} node
*/
removeDependent(node) {
node.removeDependency(/** @type {Node} */ (/** @type {BaseNode} */ (this)));
}
/**
* @param {Node} node
*/
removeDependency(node) {
if (!this._dependencies.includes(node)) {
return;
}
const thisIndex = node._dependents.indexOf(/** @type {Node} */ (/** @type {BaseNode} */(this)));
node._dependents.splice(thisIndex, 1);
this._dependencies.splice(this._dependencies.indexOf(node), 1);
}
removeAllDependencies() {
for (const node of this._dependencies.slice()) {
this.removeDependency(node);
}
}
/**
* Computes whether the given node is anywhere in the dependency graph of this node.
* While this method can prevent cycles, it walks the graph and should be used sparingly.
* Nodes are always considered dependent on themselves for the purposes of cycle detection.
* @param {BaseNode} node
* @return {boolean}
*/
isDependentOn(node) {
let isDependentOnNode = false;
this.traverse(currentNode => {
if (isDependentOnNode) return;
isDependentOnNode = currentNode === node;
}, currentNode => {
// If we've already found the dependency, don't traverse further.
if (isDependentOnNode) return [];
// Otherwise, traverse the dependencies.
return currentNode.getDependencies();
});
return isDependentOnNode;
}
/**
* Clones the node's information without adding any dependencies/dependents.
* @return {Node}
*/
cloneWithoutRelationships() {
const node = /** @type {Node} */ (new BaseNode(this.id));
node.setIsMainDocument(this._isMainDocument);
return node;
}
/**
* Clones the entire graph connected to this node filtered by the optional predicate. If a node is
* included by the predicate, all nodes along the paths between the node and the root will be included. If the
* node this was called on is not included in the resulting filtered graph, the method will throw.
* @param {function(Node):boolean} [predicate]
* @return {Node}
*/
cloneWithRelationships(predicate) {
const rootNode = this.getRootNode();
/** @type {Map<string, Node>} */
const idsToIncludedClones = new Map();
// Walk down dependents.
rootNode.traverse(node => {
if (idsToIncludedClones.has(node.id)) return;
if (predicate === undefined) {
// No condition for entry, so clone every node.
idsToIncludedClones.set(node.id, node.cloneWithoutRelationships());
return;
}
if (predicate(node)) {
// Node included, so walk back up dependencies, cloning nodes from here back to the root.
node.traverse(
node => idsToIncludedClones.set(node.id, node.cloneWithoutRelationships()),
// Dependencies already cloned have already cloned ancestors, so no need to visit again.
node => node._dependencies.filter(parent => !idsToIncludedClones.has(parent.id))
);
}
});
// Copy dependencies between nodes.
rootNode.traverse(originalNode => {
const clonedNode = idsToIncludedClones.get(originalNode.id);
if (!clonedNode) return;
for (const dependency of originalNode._dependencies) {
const clonedDependency = idsToIncludedClones.get(dependency.id);
if (!clonedDependency) throw new Error('Dependency somehow not cloned');
clonedNode.addDependency(clonedDependency);
}
});
const clonedThisNode = idsToIncludedClones.get(this.id);
if (!clonedThisNode) throw new Error('Cloned graph missing node');
return clonedThisNode;
}
/**
* Traverses all connected nodes in BFS order, calling `callback` exactly once
* on each. `traversalPath` is the shortest (though not necessarily unique)
* path from `node` to the root of the iteration.
*
* The `getNextNodes` function takes a visited node and returns which nodes to
* visit next. It defaults to returning the node's dependents.
* @param {(node: Node, traversalPath: Node[]) => void} callback
* @param {function(Node): Node[]} [getNextNodes]
*/
traverse(callback, getNextNodes) {
for (const {node, traversalPath} of this.traverseGenerator(getNextNodes)) {
callback(node, traversalPath);
}
}
/**
* @see BaseNode.traverse
* @param {function(Node): Node[]} [getNextNodes]
*/
* traverseGenerator(getNextNodes) {
if (!getNextNodes) {
getNextNodes = node => node.getDependents();
}
/** @type {Node[][]} */
// @ts-expect-error - only traverses graphs of Node, so force tsc to treat `this` as one
const queue = [[this]];
const visited = new Set([this.id]);
while (queue.length) {
/** @type {Node[]} */
// @ts-expect-error - queue has length so it's guaranteed to have an item
const traversalPath = queue.shift();
const node = traversalPath[0];
yield {node, traversalPath};
for (const nextNode of getNextNodes(node)) {
if (visited.has(nextNode.id)) continue;
visited.add(nextNode.id);
queue.push([nextNode, ...traversalPath]);
}
}
}
/**
* Returns whether the given node has a cycle in its dependent graph by performing a DFS.
* @param {Node} node
* @param {'dependents'|'dependencies'|'both'} [direction]
* @return {boolean}
*/
static hasCycle(node, direction = 'both') {
// Checking 'both' is the default entrypoint to recursively check both directions
if (direction === 'both') {
return BaseNode.hasCycle(node, 'dependents') || BaseNode.hasCycle(node, 'dependencies');
}
const visited = new Set();
/** @type {Node[]} */
const currentPath = [];
const toVisit = [node];
const depthAdded = new Map([[node, 0]]);
// Keep going while we have nodes to visit in the stack
while (toVisit.length) {
// Get the last node in the stack (DFS uses stack, not queue)
/** @type {Node} */
// @ts-expect-error - toVisit has length so it's guaranteed to have an item
const currentNode = toVisit.pop();
// We've hit a cycle if the node we're visiting is in our current dependency path
if (currentPath.includes(currentNode)) return true;
// If we've already visited the node, no need to revisit it
if (visited.has(currentNode)) continue;
// Since we're visiting this node, clear out any nodes in our path that we had to backtrack
// @ts-expect-error
while (currentPath.length > depthAdded.get(currentNode)) currentPath.pop();
// Update our data structures to reflect that we're adding this node to our path
visited.add(currentNode);
currentPath.push(currentNode);
// Add all of its dependents to our toVisit stack
const nodesToExplore = direction === 'dependents' ?
currentNode._dependents :
currentNode._dependencies;
for (const nextNode of nodesToExplore) {
if (toVisit.includes(nextNode)) continue;
toVisit.push(nextNode);
depthAdded.set(nextNode, currentPath.length);
}
}
return false;
}
/**
* @param {Node} node
* @return {boolean}
*/
canDependOn(node) {
return node.startTime <= this.startTime;
}
}
BaseNode.TYPES = /** @type {{NETWORK: 'network', CPU: 'cpu'}} */({
NETWORK: 'network',
CPU: 'cpu',
});
export {BaseNode};

View File

@@ -0,0 +1,34 @@
export class CPUNode extends BaseNode {
/**
* @param {LH.TraceEvent} parentEvent
* @param {LH.TraceEvent[]=} childEvents
*/
constructor(parentEvent: LH.TraceEvent, childEvents?: LH.TraceEvent[] | undefined);
_event: LH.TraceEvent;
_childEvents: LH.TraceEvent[];
get type(): "cpu";
/**
* @return {LH.TraceEvent}
*/
get event(): LH.TraceEvent;
/**
* @return {LH.TraceEvent[]}
*/
get childEvents(): LH.TraceEvent[];
/**
* Returns true if this node contains a Layout task.
* @return {boolean}
*/
didPerformLayout(): boolean;
/**
* Returns the script URLs that had their EvaluateScript events occur in this task.
*/
getEvaluateScriptURLs(): Set<string>;
/**
* @return {CPUNode}
*/
cloneWithoutRelationships(): CPUNode;
}
import { BaseNode } from './base-node.js';
import * as LH from '../../../types/lh.js';
//# sourceMappingURL=cpu-node.d.ts.map

View File

@@ -0,0 +1,86 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../types/lh.js';
import {BaseNode} from './base-node.js';
class CPUNode extends BaseNode {
/**
* @param {LH.TraceEvent} parentEvent
* @param {LH.TraceEvent[]=} childEvents
*/
constructor(parentEvent, childEvents = []) {
const nodeId = `${parentEvent.tid}.${parentEvent.ts}`;
super(nodeId);
this._event = parentEvent;
this._childEvents = childEvents;
}
get type() {
return BaseNode.TYPES.CPU;
}
/**
* @return {number}
*/
get startTime() {
return this._event.ts;
}
/**
* @return {number}
*/
get endTime() {
return this._event.ts + this._event.dur;
}
/**
* @return {LH.TraceEvent}
*/
get event() {
return this._event;
}
/**
* @return {LH.TraceEvent[]}
*/
get childEvents() {
return this._childEvents;
}
/**
* Returns true if this node contains a Layout task.
* @return {boolean}
*/
didPerformLayout() {
return this._childEvents.some(evt => evt.name === 'Layout');
}
/**
* Returns the script URLs that had their EvaluateScript events occur in this task.
*/
getEvaluateScriptURLs() {
/** @type {Set<string>} */
const urls = new Set();
for (const event of this._childEvents) {
if (event.name !== 'EvaluateScript') continue;
if (!event.args.data || !event.args.data.url) continue;
urls.add(event.args.data.url);
}
return urls;
}
/**
* @return {CPUNode}
*/
cloneWithoutRelationships() {
return new CPUNode(this._event, this._childEvents);
}
}
export {CPUNode};

View File

@@ -0,0 +1,43 @@
export class NetworkNode extends BaseNode {
/**
* @param {LH.Artifacts.NetworkRequest} networkRecord
*/
constructor(networkRecord: LH.Artifacts.NetworkRequest);
/** @private */
private _record;
get type(): "network";
/**
* @return {LH.Artifacts.NetworkRequest}
*/
get record(): NetworkRequest;
/**
* @return {string}
*/
get initiatorType(): string;
/**
* @return {boolean}
*/
get fromDiskCache(): boolean;
/**
* @return {boolean}
*/
get isNonNetworkProtocol(): boolean;
/**
* Returns whether this network record can be downloaded without a TCP connection.
* During simulation we treat data coming in over a network connection separately from on-device data.
* @return {boolean}
*/
get isConnectionless(): boolean;
/**
* @return {boolean}
*/
hasRenderBlockingPriority(): boolean;
/**
* @return {NetworkNode}
*/
cloneWithoutRelationships(): NetworkNode;
}
import { BaseNode } from './base-node.js';
import { NetworkRequest } from '../network-request.js';
import * as LH from '../../../types/lh.js';
//# sourceMappingURL=network-node.d.ts.map

View File

@@ -0,0 +1,99 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../types/lh.js';
import {BaseNode} from './base-node.js';
import {NetworkRequest} from '../network-request.js';
class NetworkNode extends BaseNode {
/**
* @param {LH.Artifacts.NetworkRequest} networkRecord
*/
constructor(networkRecord) {
super(networkRecord.requestId);
/** @private */
this._record = networkRecord;
}
get type() {
return BaseNode.TYPES.NETWORK;
}
/**
* @return {number}
*/
get startTime() {
return this._record.networkRequestTime * 1000;
}
/**
* @return {number}
*/
get endTime() {
return this._record.networkEndTime * 1000;
}
/**
* @return {LH.Artifacts.NetworkRequest}
*/
get record() {
return this._record;
}
/**
* @return {string}
*/
get initiatorType() {
return this._record.initiator && this._record.initiator.type;
}
/**
* @return {boolean}
*/
get fromDiskCache() {
return !!this._record.fromDiskCache;
}
/**
* @return {boolean}
*/
get isNonNetworkProtocol() {
return NetworkRequest.isNonNetworkRequest(this._record);
}
/**
* Returns whether this network record can be downloaded without a TCP connection.
* During simulation we treat data coming in over a network connection separately from on-device data.
* @return {boolean}
*/
get isConnectionless() {
return this.fromDiskCache || this.isNonNetworkProtocol;
}
/**
* @return {boolean}
*/
hasRenderBlockingPriority() {
const priority = this._record.priority;
const isScript = this._record.resourceType === NetworkRequest.TYPES.Script;
const isDocument = this._record.resourceType === NetworkRequest.TYPES.Document;
const isBlockingScript = priority === 'High' && isScript;
const isBlockingHtmlImport = priority === 'High' && isDocument;
return priority === 'VeryHigh' || isBlockingScript || isBlockingHtmlImport;
}
/**
* @return {NetworkNode}
*/
cloneWithoutRelationships() {
const node = new NetworkNode(this._record);
node.setIsMainDocument(this._isMainDocument);
return node;
}
}
export {NetworkNode};

View File

@@ -0,0 +1,58 @@
export class ConnectionPool {
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {Required<LH.Gatherer.Simulation.Options>} options
*/
constructor(records: LH.Artifacts.NetworkRequest[], options: Required<LH.Gatherer.Simulation.Options>);
_options: Required<LH.Gatherer.Simulation.Options>;
_records: import("../../network-request.js").NetworkRequest[];
/** @type {Map<string, TcpConnection[]>} */
_connectionsByOrigin: Map<string, TcpConnection[]>;
/** @type {Map<LH.Artifacts.NetworkRequest, TcpConnection>} */
_connectionsByRecord: Map<LH.Artifacts.NetworkRequest, TcpConnection>;
_connectionsInUse: Set<any>;
_connectionReusedByRequestId: Map<string, boolean>;
/**
* @return {TcpConnection[]}
*/
connectionsInUse(): TcpConnection[];
_initializeConnections(): void;
/**
* @param {Array<TcpConnection>} connections
* @param {{ignoreConnectionReused?: boolean, observedConnectionWasReused: boolean}} options
*/
_findAvailableConnectionWithLargestCongestionWindow(connections: Array<TcpConnection>, options: {
ignoreConnectionReused?: boolean | undefined;
observedConnectionWasReused: boolean;
}): TcpConnection | null;
/**
* This method finds an available connection to the origin specified by the network record or null
* if no connection was available. If returned, connection will not be available for other network
* records until release is called.
*
* If ignoreConnectionReused is true, acquire will consider all connections not in use as available.
* Otherwise, only connections that have matching "warmth" are considered available.
*
* @param {LH.Artifacts.NetworkRequest} record
* @param {{ignoreConnectionReused?: boolean}} options
* @return {?TcpConnection}
*/
acquire(record: LH.Artifacts.NetworkRequest, options?: {
ignoreConnectionReused?: boolean;
}): TcpConnection | null;
/**
* Return the connection currently being used to fetch a record. If no connection
* currently being used for this record, an error will be thrown.
*
* @param {LH.Artifacts.NetworkRequest} record
* @return {TcpConnection}
*/
acquireActiveConnectionFromRecord(record: LH.Artifacts.NetworkRequest): TcpConnection;
/**
* @param {LH.Artifacts.NetworkRequest} record
*/
release(record: LH.Artifacts.NetworkRequest): void;
}
import * as LH from '../../../../types/lh.js';
import { TcpConnection } from './tcp-connection.js';
//# sourceMappingURL=connection-pool.d.ts.map

View File

@@ -0,0 +1,171 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../../types/lh.js';
import {NetworkAnalyzer} from './network-analyzer.js';
import {TcpConnection} from './tcp-connection.js';
const DEFAULT_SERVER_RESPONSE_TIME = 30;
const TLS_SCHEMES = ['https', 'wss'];
// Each origin can have 6 simulatenous connections open
// https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group"
const CONNECTIONS_PER_ORIGIN = 6;
export class ConnectionPool {
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {Required<LH.Gatherer.Simulation.Options>} options
*/
constructor(records, options) {
this._options = options;
this._records = records;
/** @type {Map<string, TcpConnection[]>} */
this._connectionsByOrigin = new Map();
/** @type {Map<LH.Artifacts.NetworkRequest, TcpConnection>} */
this._connectionsByRecord = new Map();
this._connectionsInUse = new Set();
this._connectionReusedByRequestId = NetworkAnalyzer.estimateIfConnectionWasReused(records, {
forceCoarseEstimates: true,
});
this._initializeConnections();
}
/**
* @return {TcpConnection[]}
*/
connectionsInUse() {
return Array.from(this._connectionsInUse);
}
_initializeConnections() {
const connectionReused = this._connectionReusedByRequestId;
const additionalRttByOrigin = this._options.additionalRttByOrigin;
const serverResponseTimeByOrigin = this._options.serverResponseTimeByOrigin;
const recordsByOrigin = NetworkAnalyzer.groupByOrigin(this._records);
for (const [origin, records] of recordsByOrigin.entries()) {
const connections = [];
const additionalRtt = additionalRttByOrigin.get(origin) || 0;
const responseTime = serverResponseTimeByOrigin.get(origin) || DEFAULT_SERVER_RESPONSE_TIME;
for (const record of records) {
if (connectionReused.get(record.requestId)) continue;
const isTLS = TLS_SCHEMES.includes(record.parsedURL.scheme);
const isH2 = record.protocol === 'h2';
const connection = new TcpConnection(
this._options.rtt + additionalRtt,
this._options.throughput,
responseTime,
isTLS,
isH2
);
connections.push(connection);
}
if (!connections.length) {
throw new Error(`Could not find a connection for origin: ${origin}`);
}
// Make sure each origin has minimum number of connections available for max throughput.
// But only if it's not over H2 which maximizes throughput already.
const minConnections = connections[0].isH2() ? 1 : CONNECTIONS_PER_ORIGIN;
while (connections.length < minConnections) connections.push(connections[0].clone());
this._connectionsByOrigin.set(origin, connections);
}
}
/**
* @param {Array<TcpConnection>} connections
* @param {{ignoreConnectionReused?: boolean, observedConnectionWasReused: boolean}} options
*/
_findAvailableConnectionWithLargestCongestionWindow(connections, options) {
const {ignoreConnectionReused, observedConnectionWasReused} = options;
/** @type {TcpConnection|null} */
let maxConnection = null;
for (let i = 0; i < connections.length; i++) {
const connection = connections[i];
// Normally, we want to make sure the connection warmth matches the state of the record
// we're acquiring for. Do this check first since it's the common case and cheaper than our
// "in use" check below.
// Use the _warmed property instead of the getter because this is a surprisingly hot code path.
if (!ignoreConnectionReused && connection._warmed !== observedConnectionWasReused) {
continue;
}
// Connections that are in use are never available.
if (this._connectionsInUse.has(connection)) {
continue;
}
// This connection is a match and is available! Update our max if it has a larger congestionWindow
const currentMax = (maxConnection?.congestionWindow) || -Infinity;
if (connection.congestionWindow > currentMax) maxConnection = connection;
}
return maxConnection;
}
/**
* This method finds an available connection to the origin specified by the network record or null
* if no connection was available. If returned, connection will not be available for other network
* records until release is called.
*
* If ignoreConnectionReused is true, acquire will consider all connections not in use as available.
* Otherwise, only connections that have matching "warmth" are considered available.
*
* @param {LH.Artifacts.NetworkRequest} record
* @param {{ignoreConnectionReused?: boolean}} options
* @return {?TcpConnection}
*/
acquire(record, options = {}) {
if (this._connectionsByRecord.has(record)) throw new Error('Record already has a connection');
const origin = record.parsedURL.securityOrigin;
const observedConnectionWasReused = !!this._connectionReusedByRequestId.get(record.requestId);
const connections = this._connectionsByOrigin.get(origin) || [];
const connectionToUse = this._findAvailableConnectionWithLargestCongestionWindow(connections, {
ignoreConnectionReused: options.ignoreConnectionReused,
observedConnectionWasReused,
});
if (!connectionToUse) return null;
this._connectionsInUse.add(connectionToUse);
this._connectionsByRecord.set(record, connectionToUse);
return connectionToUse;
}
/**
* Return the connection currently being used to fetch a record. If no connection
* currently being used for this record, an error will be thrown.
*
* @param {LH.Artifacts.NetworkRequest} record
* @return {TcpConnection}
*/
acquireActiveConnectionFromRecord(record) {
const activeConnection = this._connectionsByRecord.get(record);
if (!activeConnection) throw new Error('Could not find an active connection for record');
return activeConnection;
}
/**
* @param {LH.Artifacts.NetworkRequest} record
*/
release(record) {
const connection = this._connectionsByRecord.get(record);
this._connectionsByRecord.delete(record);
this._connectionsInUse.delete(connection);
}
}

View File

@@ -0,0 +1,42 @@
export class DNSCache {
/**
* @param {{rtt: number}} options
*/
constructor({ rtt }: {
rtt: number;
});
_rtt: number;
/** @type {Map<string, {resolvedAt: number}>} */
_resolvedDomainNames: Map<string, {
resolvedAt: number;
}>;
/**
* @param {LH.Artifacts.NetworkRequest} request
* @param {{requestedAt: number, shouldUpdateCache: boolean}=} options
* @return {number}
*/
getTimeUntilResolution(request: LH.Artifacts.NetworkRequest, options?: {
requestedAt: number;
shouldUpdateCache: boolean;
} | undefined): number;
/**
* @param {LH.Artifacts.NetworkRequest} request
* @param {number} resolvedAt
*/
_updateCacheResolvedAtIfNeeded(request: LH.Artifacts.NetworkRequest, resolvedAt: number): void;
/**
* Forcefully sets the DNS resolution time for a record.
* Useful for testing and alternate execution simulations.
*
* @param {string} domain
* @param {number} resolvedAt
*/
setResolvedAt(domain: string, resolvedAt: number): void;
}
export namespace DNSCache {
export { DNS_RESOLUTION_RTT_MULTIPLIER as RTT_MULTIPLIER };
}
import * as LH from '../../../../types/lh.js';
declare const DNS_RESOLUTION_RTT_MULTIPLIER: 2;
export {};
//# sourceMappingURL=dns-cache.d.ts.map

View File

@@ -0,0 +1,74 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../../types/lh.js';
// A DNS lookup will usually take ~1-2 roundtrips of connection latency plus the extra DNS routing time.
// Example: https://www.webpagetest.org/result/180703_3A_e33ec79747c002ed4d7bcbfc81462203/1/details/#waterfall_view_step1
// Example: https://www.webpagetest.org/result/180707_1M_89673eb633b5d98386de95dfcf9b33d5/1/details/#waterfall_view_step1
// DNS is highly variable though, many times it's a little more than 1, but can easily be 4-5x RTT.
// We'll use 2 since it seems to give the most accurate results on average, but this can be tweaked.
const DNS_RESOLUTION_RTT_MULTIPLIER = 2;
class DNSCache {
/**
* @param {{rtt: number}} options
*/
constructor({rtt}) {
this._rtt = rtt;
/** @type {Map<string, {resolvedAt: number}>} */
this._resolvedDomainNames = new Map();
}
/**
* @param {LH.Artifacts.NetworkRequest} request
* @param {{requestedAt: number, shouldUpdateCache: boolean}=} options
* @return {number}
*/
getTimeUntilResolution(request, options) {
const {requestedAt = 0, shouldUpdateCache = false} = options || {};
const domain = request.parsedURL.host;
const cacheEntry = this._resolvedDomainNames.get(domain);
let timeUntilResolved = this._rtt * DNSCache.RTT_MULTIPLIER;
if (cacheEntry) {
const timeUntilCachedIsResolved = Math.max(cacheEntry.resolvedAt - requestedAt, 0);
timeUntilResolved = Math.min(timeUntilCachedIsResolved, timeUntilResolved);
}
const resolvedAt = requestedAt + timeUntilResolved;
if (shouldUpdateCache) this._updateCacheResolvedAtIfNeeded(request, resolvedAt);
return timeUntilResolved;
}
/**
* @param {LH.Artifacts.NetworkRequest} request
* @param {number} resolvedAt
*/
_updateCacheResolvedAtIfNeeded(request, resolvedAt) {
const domain = request.parsedURL.host;
const cacheEntry = this._resolvedDomainNames.get(domain) || {resolvedAt};
cacheEntry.resolvedAt = Math.min(cacheEntry.resolvedAt, resolvedAt);
this._resolvedDomainNames.set(domain, cacheEntry);
}
/**
* Forcefully sets the DNS resolution time for a record.
* Useful for testing and alternate execution simulations.
*
* @param {string} domain
* @param {number} resolvedAt
*/
setResolvedAt(domain, resolvedAt) {
this._resolvedDomainNames.set(domain, {resolvedAt});
}
}
DNSCache.RTT_MULTIPLIER = DNS_RESOLUTION_RTT_MULTIPLIER;
export {DNSCache};

View File

@@ -0,0 +1,170 @@
export type Summary = {
min: number;
max: number;
avg: number;
median: number;
};
export type RTTEstimateOptions = {
/**
* TCP connection handshake information will be used when available, but in some circumstances this data can be unreliable. This flag exposes an option to ignore the handshake data and use the coarse download/TTFB timing data.
*/
forceCoarseEstimates?: boolean | undefined;
/**
* Coarse estimates include lots of extra time and noise multiply by some factor to deflate the estimates a bit.
*/
coarseEstimateMultiplier?: number | undefined;
/**
* Useful for testing to isolate the different methods of estimation.
*/
useDownloadEstimates?: boolean | undefined;
/**
* Useful for testing to isolate the different methods of estimation.
*/
useSendStartEstimates?: boolean | undefined;
/**
* Useful for testing to isolate the different methods of estimation.
*/
useHeadersEndEstimates?: boolean | undefined;
};
export class NetworkAnalyzer {
/**
* @return {string}
*/
static get SUMMARY(): string;
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, LH.Artifacts.NetworkRequest[]>}
*/
static groupByOrigin(records: LH.Artifacts.NetworkRequest[]): Map<string, LH.Artifacts.NetworkRequest[]>;
/**
* @param {number[]} values
* @return {Summary}
*/
static getSummary(values: number[]): Summary;
/**
* @param {Map<string,number[]>} values
* @return {Map<string, Summary>}
*/
static summarize(values: Map<string, number[]>): Map<string, Summary>;
/** @typedef {{record: LH.Artifacts.NetworkRequest, timing: LH.Crdp.Network.ResourceTiming, connectionReused?: boolean}} RequestInfo */
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {(e: RequestInfo) => number | number[] | undefined} iteratee
* @return {Map<string, number[]>}
*/
static _estimateValueByOrigin(records: LH.Artifacts.NetworkRequest[], iteratee: (e: {
record: LH.Artifacts.NetworkRequest;
timing: LH.Crdp.Network.ResourceTiming;
connectionReused?: boolean | undefined;
}) => number | number[] | undefined): Map<string, number[]>;
/**
* Estimates the observed RTT to each origin based on how long the connection setup.
* For h1 and h2, this could includes two estimates - one for the TCP handshake, another for
* SSL negotiation.
* For h3, we get only one estimate since QUIC establishes a secure connection in a
* single handshake.
* This is the most accurate and preferred method of measurement when the data is available.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaConnectionTiming(records: LH.Artifacts.NetworkRequest[]): Map<string, number[]>;
/**
* Estimates the observed RTT to each origin based on how long a download took on a fresh connection.
* NOTE: this will tend to overestimate the actual RTT quite significantly as the download can be
* slow for other reasons as well such as bandwidth constraints.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaDownloadTiming(records: LH.Artifacts.NetworkRequest[]): Map<string, number[]>;
/**
* Estimates the observed RTT to each origin based on how long it took until Chrome could
* start sending the actual request when a new connection was required.
* NOTE: this will tend to overestimate the actual RTT as the request can be delayed for other
* reasons as well such as more SSL handshakes if TLS False Start is not enabled.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaSendStartTiming(records: LH.Artifacts.NetworkRequest[]): Map<string, number[]>;
/**
* Estimates the observed RTT to each origin based on how long it took until Chrome received the
* headers of the response (~TTFB).
* NOTE: this is the most inaccurate way to estimate the RTT, but in some environments it's all
* we have access to :(
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaHeadersEndTiming(records: LH.Artifacts.NetworkRequest[]): Map<string, number[]>;
/**
* Given the RTT to each origin, estimates the observed server response times.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {Map<string, number>} rttByOrigin
* @return {Map<string, number[]>}
*/
static _estimateResponseTimeByOrigin(records: LH.Artifacts.NetworkRequest[], rttByOrigin: Map<string, number>): Map<string, number[]>;
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {boolean}
*/
static canTrustConnectionInformation(records: LH.Artifacts.NetworkRequest[]): boolean;
/**
* Returns a map of requestId -> connectionReused, estimating the information if the information
* available in the records themselves appears untrustworthy.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {{forceCoarseEstimates: boolean}} [options]
* @return {Map<string, boolean>}
*/
static estimateIfConnectionWasReused(records: LH.Artifacts.NetworkRequest[], options?: {
forceCoarseEstimates: boolean;
} | undefined): Map<string, boolean>;
/**
* Estimates the RTT to each origin by examining observed network timing information.
* Attempts to use the most accurate information first and falls back to coarser estimates when it
* is unavailable.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {RTTEstimateOptions} [options]
* @return {Map<string, Summary>}
*/
static estimateRTTByOrigin(records: LH.Artifacts.NetworkRequest[], options?: RTTEstimateOptions | undefined): Map<string, Summary>;
/**
* Estimates the server response time of each origin. RTT times can be passed in or will be
* estimated automatically if not provided.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {RTTEstimateOptions & {rttByOrigin?: Map<string, number>}} [options]
* @return {Map<string, Summary>}
*/
static estimateServerResponseTimeByOrigin(records: LH.Artifacts.NetworkRequest[], options?: (RTTEstimateOptions & {
rttByOrigin?: Map<string, number> | undefined;
}) | undefined): Map<string, Summary>;
/**
* Computes the average throughput for the given records in bits/second.
* Excludes data URI, failed or otherwise incomplete, and cached requests.
* Returns Infinity if there were no analyzable network records.
*
* @param {Array<LH.Artifacts.NetworkRequest>} networkRecords
* @return {number}
*/
static estimateThroughput(networkRecords: Array<LH.Artifacts.NetworkRequest>): number;
/**
* @param {Array<LH.Artifacts.NetworkRequest>} records
* @param {string} resourceUrl
* @return {LH.Artifacts.NetworkRequest|undefined}
*/
static findResourceForUrl(records: Array<LH.Artifacts.NetworkRequest>, resourceUrl: string): LH.Artifacts.NetworkRequest | undefined;
/**
* Resolves redirect chain given a main document.
* See: {@link NetworkAnalyzer.findResourceForUrl}) for how to retrieve main document.
*
* @param {LH.Artifacts.NetworkRequest} request
* @return {LH.Artifacts.NetworkRequest}
*/
static resolveRedirects(request: LH.Artifacts.NetworkRequest): LH.Artifacts.NetworkRequest;
}
//# sourceMappingURL=network-analyzer.d.ts.map

View File

@@ -0,0 +1,511 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import UrlUtils from '../../url-utils.js';
const INITIAL_CWD = 14 * 1024;
// Assume that 40% of TTFB was server response time by default for static assets
const DEFAULT_SERVER_RESPONSE_PERCENTAGE = 0.4;
/**
* For certain resource types, server response time takes up a greater percentage of TTFB (dynamic
* assets like HTML documents, XHR/API calls, etc)
* @type {Partial<Record<LH.Crdp.Network.ResourceType, number>>}
*/
const SERVER_RESPONSE_PERCENTAGE_OF_TTFB = {
Document: 0.9,
XHR: 0.9,
Fetch: 0.9,
};
class NetworkAnalyzer {
/**
* @return {string}
*/
static get SUMMARY() {
return '__SUMMARY__';
}
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, LH.Artifacts.NetworkRequest[]>}
*/
static groupByOrigin(records) {
const grouped = new Map();
records.forEach(item => {
const key = item.parsedURL.securityOrigin;
const group = grouped.get(key) || [];
group.push(item);
grouped.set(key, group);
});
return grouped;
}
/**
* @param {number[]} values
* @return {Summary}
*/
static getSummary(values) {
values.sort((a, b) => a - b);
let median;
if (values.length === 0) {
median = values[0];
} else if (values.length % 2 === 0) {
const a = values[Math.floor((values.length - 1) / 2)];
const b = values[Math.floor((values.length - 1) / 2) + 1];
median = (a + b) / 2;
} else {
median = values[Math.floor((values.length - 1) / 2)];
}
return {
min: values[0],
max: values[values.length - 1],
avg: values.reduce((a, b) => a + b, 0) / values.length,
median,
};
}
/**
* @param {Map<string,number[]>} values
* @return {Map<string, Summary>}
*/
static summarize(values) {
const summaryByKey = new Map();
const allEstimates = [];
for (const [key, estimates] of values) {
summaryByKey.set(key, NetworkAnalyzer.getSummary(estimates));
allEstimates.push(...estimates);
}
summaryByKey.set(NetworkAnalyzer.SUMMARY, NetworkAnalyzer.getSummary(allEstimates));
return summaryByKey;
}
/** @typedef {{record: LH.Artifacts.NetworkRequest, timing: LH.Crdp.Network.ResourceTiming, connectionReused?: boolean}} RequestInfo */
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {(e: RequestInfo) => number | number[] | undefined} iteratee
* @return {Map<string, number[]>}
*/
static _estimateValueByOrigin(records, iteratee) {
const connectionWasReused = NetworkAnalyzer.estimateIfConnectionWasReused(records);
const groupedByOrigin = NetworkAnalyzer.groupByOrigin(records);
const estimates = new Map();
for (const [origin, originRecords] of groupedByOrigin.entries()) {
/** @type {number[]} */
let originEstimates = [];
for (const record of originRecords) {
const timing = record.timing;
if (!timing) continue;
const value = iteratee({
record,
timing,
connectionReused: connectionWasReused.get(record.requestId),
});
if (typeof value !== 'undefined') {
originEstimates = originEstimates.concat(value);
}
}
if (!originEstimates.length) continue;
estimates.set(origin, originEstimates);
}
return estimates;
}
/**
* Estimates the observed RTT to each origin based on how long the connection setup.
* For h1 and h2, this could includes two estimates - one for the TCP handshake, another for
* SSL negotiation.
* For h3, we get only one estimate since QUIC establishes a secure connection in a
* single handshake.
* This is the most accurate and preferred method of measurement when the data is available.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaConnectionTiming(records) {
return NetworkAnalyzer._estimateValueByOrigin(records, ({timing, connectionReused, record}) => {
if (connectionReused) return;
// In LR, network records are missing connection timing, but we've smuggled it in via headers.
if (global.isLightrider && record.lrStatistics) {
if (record.protocol.startsWith('h3')) {
return record.lrStatistics.TCPMs;
} else {
return [record.lrStatistics.TCPMs / 2, record.lrStatistics.TCPMs / 2];
}
}
const {connectStart, sslStart, sslEnd, connectEnd} = timing;
if (connectEnd >= 0 && connectStart >= 0 && record.protocol.startsWith('h3')) {
// These values are equal to sslStart and sslEnd for h3.
return connectEnd - connectStart;
} else if (sslStart >= 0 && sslEnd >= 0 && sslStart !== connectStart) {
// SSL can also be more than 1 RT but assume False Start was used.
return [connectEnd - sslStart, sslStart - connectStart];
} else if (connectStart >= 0 && connectEnd >= 0) {
return connectEnd - connectStart;
}
});
}
/**
* Estimates the observed RTT to each origin based on how long a download took on a fresh connection.
* NOTE: this will tend to overestimate the actual RTT quite significantly as the download can be
* slow for other reasons as well such as bandwidth constraints.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaDownloadTiming(records) {
return NetworkAnalyzer._estimateValueByOrigin(records, ({record, timing, connectionReused}) => {
if (connectionReused) return;
// Only look at downloads that went past the initial congestion window
if (record.transferSize <= INITIAL_CWD) return;
if (!Number.isFinite(timing.receiveHeadersEnd) || timing.receiveHeadersEnd < 0) return;
// Compute the amount of time downloading everything after the first congestion window took
const totalTime = record.networkEndTime - record.networkRequestTime;
const downloadTimeAfterFirstByte = totalTime - timing.receiveHeadersEnd;
const numberOfRoundTrips = Math.log2(record.transferSize / INITIAL_CWD);
// Ignore requests that required a high number of round trips since bandwidth starts to play
// a larger role than latency
if (numberOfRoundTrips > 5) return;
return downloadTimeAfterFirstByte / numberOfRoundTrips;
});
}
/**
* Estimates the observed RTT to each origin based on how long it took until Chrome could
* start sending the actual request when a new connection was required.
* NOTE: this will tend to overestimate the actual RTT as the request can be delayed for other
* reasons as well such as more SSL handshakes if TLS False Start is not enabled.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaSendStartTiming(records) {
return NetworkAnalyzer._estimateValueByOrigin(records, ({record, timing, connectionReused}) => {
if (connectionReused) return;
if (!Number.isFinite(timing.sendStart) || timing.sendStart < 0) return;
// Assume everything before sendStart was just DNS + (SSL)? + TCP handshake
// 1 RT for DNS, 1 RT (maybe) for SSL, 1 RT for TCP
let roundTrips = 1;
if (!record.protocol.startsWith('h3')) roundTrips += 1; // TCP
if (record.parsedURL.scheme === 'https') roundTrips += 1;
return timing.sendStart / roundTrips;
});
}
/**
* Estimates the observed RTT to each origin based on how long it took until Chrome received the
* headers of the response (~TTFB).
* NOTE: this is the most inaccurate way to estimate the RTT, but in some environments it's all
* we have access to :(
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {Map<string, number[]>}
*/
static _estimateRTTByOriginViaHeadersEndTiming(records) {
return NetworkAnalyzer._estimateValueByOrigin(records, ({record, timing, connectionReused}) => {
if (!Number.isFinite(timing.receiveHeadersEnd) || timing.receiveHeadersEnd < 0) return;
if (!record.resourceType) return;
const serverResponseTimePercentage =
SERVER_RESPONSE_PERCENTAGE_OF_TTFB[record.resourceType] ||
DEFAULT_SERVER_RESPONSE_PERCENTAGE;
const estimatedServerResponseTime = timing.receiveHeadersEnd * serverResponseTimePercentage;
// When connection was reused...
// TTFB = 1 RT for request + server response time
let roundTrips = 1;
// When connection was fresh...
// TTFB = DNS + (SSL)? + TCP handshake + 1 RT for request + server response time
if (!connectionReused) {
roundTrips += 1; // DNS
if (!record.protocol.startsWith('h3')) roundTrips += 1; // TCP
if (record.parsedURL.scheme === 'https') roundTrips += 1; // SSL
}
// subtract out our estimated server response time
return Math.max((timing.receiveHeadersEnd - estimatedServerResponseTime) / roundTrips, 3);
});
}
/**
* Given the RTT to each origin, estimates the observed server response times.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {Map<string, number>} rttByOrigin
* @return {Map<string, number[]>}
*/
static _estimateResponseTimeByOrigin(records, rttByOrigin) {
return NetworkAnalyzer._estimateValueByOrigin(records, ({record, timing}) => {
// Lightrider does not have timings for sendEnd, but we do have this timing which should be
// close to the response time.
if (global.isLightrider && record.lrStatistics) return record.lrStatistics.requestMs;
if (!Number.isFinite(timing.receiveHeadersEnd) || timing.receiveHeadersEnd < 0) return;
if (!Number.isFinite(timing.sendEnd) || timing.sendEnd < 0) return;
const ttfb = timing.receiveHeadersEnd - timing.sendEnd;
const origin = record.parsedURL.securityOrigin;
const rtt = rttByOrigin.get(origin) || rttByOrigin.get(NetworkAnalyzer.SUMMARY) || 0;
return Math.max(ttfb - rtt, 0);
});
}
/**
* @param {LH.Artifacts.NetworkRequest[]} records
* @return {boolean}
*/
static canTrustConnectionInformation(records) {
const connectionIdWasStarted = new Map();
for (const record of records) {
const started = connectionIdWasStarted.get(record.connectionId) || !record.connectionReused;
connectionIdWasStarted.set(record.connectionId, started);
}
// We probably can't trust the network information if all the connection IDs were the same
if (connectionIdWasStarted.size <= 1) return false;
// Or if there were connections that were always reused (a connection had to have started at some point)
return Array.from(connectionIdWasStarted.values()).every(started => started);
}
/**
* Returns a map of requestId -> connectionReused, estimating the information if the information
* available in the records themselves appears untrustworthy.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {{forceCoarseEstimates: boolean}} [options]
* @return {Map<string, boolean>}
*/
static estimateIfConnectionWasReused(records, options) {
const {forceCoarseEstimates = false} = options || {};
// Check if we can trust the connection information coming from the protocol
if (!forceCoarseEstimates && NetworkAnalyzer.canTrustConnectionInformation(records)) {
return new Map(records.map(record => [record.requestId, !!record.connectionReused]));
}
// Otherwise we're on our own, a record may not have needed a fresh connection if...
// - It was not the first request to the domain
// - It was H2
// - It was after the first request to the domain ended
const connectionWasReused = new Map();
const groupedByOrigin = NetworkAnalyzer.groupByOrigin(records);
for (const [_, originRecords] of groupedByOrigin.entries()) {
const earliestReusePossible = originRecords
.map(record => record.networkEndTime)
.reduce((a, b) => Math.min(a, b), Infinity);
for (const record of originRecords) {
connectionWasReused.set(
record.requestId,
record.networkRequestTime >= earliestReusePossible || record.protocol === 'h2'
);
}
const firstRecord = originRecords.reduce((a, b) => {
return a.networkRequestTime > b.networkRequestTime ? b : a;
});
connectionWasReused.set(firstRecord.requestId, false);
}
return connectionWasReused;
}
/**
* Estimates the RTT to each origin by examining observed network timing information.
* Attempts to use the most accurate information first and falls back to coarser estimates when it
* is unavailable.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {RTTEstimateOptions} [options]
* @return {Map<string, Summary>}
*/
static estimateRTTByOrigin(records, options) {
const {
forceCoarseEstimates = false,
// coarse estimates include lots of extra time and noise
// multiply by some factor to deflate the estimates a bit.
coarseEstimateMultiplier = 0.3,
useDownloadEstimates = true,
useSendStartEstimates = true,
useHeadersEndEstimates = true,
} = options || {};
let estimatesByOrigin = NetworkAnalyzer._estimateRTTByOriginViaConnectionTiming(records);
if (!estimatesByOrigin.size || forceCoarseEstimates) {
estimatesByOrigin = new Map();
const estimatesViaDownload = NetworkAnalyzer._estimateRTTByOriginViaDownloadTiming(records);
const estimatesViaSendStart = NetworkAnalyzer._estimateRTTByOriginViaSendStartTiming(records);
const estimatesViaTTFB = NetworkAnalyzer._estimateRTTByOriginViaHeadersEndTiming(records);
for (const [origin, estimates] of estimatesViaDownload.entries()) {
if (!useDownloadEstimates) continue;
estimatesByOrigin.set(origin, estimates);
}
for (const [origin, estimates] of estimatesViaSendStart.entries()) {
if (!useSendStartEstimates) continue;
const existing = estimatesByOrigin.get(origin) || [];
estimatesByOrigin.set(origin, existing.concat(estimates));
}
for (const [origin, estimates] of estimatesViaTTFB.entries()) {
if (!useHeadersEndEstimates) continue;
const existing = estimatesByOrigin.get(origin) || [];
estimatesByOrigin.set(origin, existing.concat(estimates));
}
for (const estimates of estimatesByOrigin.values()) {
estimates.forEach((x, i) => (estimates[i] = x * coarseEstimateMultiplier));
}
}
if (!estimatesByOrigin.size) throw new Error('No timing information available');
return NetworkAnalyzer.summarize(estimatesByOrigin);
}
/**
* Estimates the server response time of each origin. RTT times can be passed in or will be
* estimated automatically if not provided.
*
* @param {LH.Artifacts.NetworkRequest[]} records
* @param {RTTEstimateOptions & {rttByOrigin?: Map<string, number>}} [options]
* @return {Map<string, Summary>}
*/
static estimateServerResponseTimeByOrigin(records, options) {
let rttByOrigin = (options || {}).rttByOrigin;
if (!rttByOrigin) {
/** @type {Map<string, number>} */
rttByOrigin = new Map();
const rttSummaryByOrigin = NetworkAnalyzer.estimateRTTByOrigin(records, options);
for (const [origin, summary] of rttSummaryByOrigin.entries()) {
rttByOrigin.set(origin, summary.min);
}
}
const estimatesByOrigin = NetworkAnalyzer._estimateResponseTimeByOrigin(records, rttByOrigin);
return NetworkAnalyzer.summarize(estimatesByOrigin);
}
/**
* Computes the average throughput for the given records in bits/second.
* Excludes data URI, failed or otherwise incomplete, and cached requests.
* Returns Infinity if there were no analyzable network records.
*
* @param {Array<LH.Artifacts.NetworkRequest>} networkRecords
* @return {number}
*/
static estimateThroughput(networkRecords) {
let totalBytes = 0;
// We will measure throughput by summing the total bytes downloaded by the total time spent
// downloading those bytes. We slice up all the network records into start/end boundaries, so
// it's easier to deal with the gaps in downloading.
const timeBoundaries = networkRecords.reduce((boundaries, record) => {
const scheme = record.parsedURL?.scheme;
// Requests whose bodies didn't come over the network or didn't completely finish will mess
// with the computation, just skip over them.
if (scheme === 'data' || record.failed || !record.finished ||
record.statusCode > 300 || !record.transferSize) {
return boundaries;
}
// If we've made it this far, all the times we need should be valid (i.e. not undefined/-1).
totalBytes += record.transferSize;
boundaries.push({time: record.responseHeadersEndTime / 1000, isStart: true});
boundaries.push({time: record.networkEndTime / 1000, isStart: false});
return boundaries;
}, /** @type {Array<{time: number, isStart: boolean}>} */([])).sort((a, b) => a.time - b.time);
if (!timeBoundaries.length) {
return Infinity;
}
let inflight = 0;
let currentStart = 0;
let totalDuration = 0;
timeBoundaries.forEach(boundary => {
if (boundary.isStart) {
if (inflight === 0) {
// We just ended a quiet period, keep track of when the download period started
currentStart = boundary.time;
}
inflight++;
} else {
inflight--;
if (inflight === 0) {
// We just entered a quiet period, update our duration with the time we spent downloading
totalDuration += boundary.time - currentStart;
}
}
});
return totalBytes * 8 / totalDuration;
}
/**
* @param {Array<LH.Artifacts.NetworkRequest>} records
* @param {string} resourceUrl
* @return {LH.Artifacts.NetworkRequest|undefined}
*/
static findResourceForUrl(records, resourceUrl) {
// equalWithExcludedFragments is expensive, so check that the resourceUrl starts with the request url first
return records.find(request =>
resourceUrl.startsWith(request.url) &&
UrlUtils.equalWithExcludedFragments(request.url, resourceUrl)
);
}
/**
* Resolves redirect chain given a main document.
* See: {@link NetworkAnalyzer.findResourceForUrl}) for how to retrieve main document.
*
* @param {LH.Artifacts.NetworkRequest} request
* @return {LH.Artifacts.NetworkRequest}
*/
static resolveRedirects(request) {
while (request.redirectDestination) request = request.redirectDestination;
return request;
}
}
export {NetworkAnalyzer};
/**
* @typedef Summary
* @property {number} min
* @property {number} max
* @property {number} avg
* @property {number} median
*/
/**
* @typedef RTTEstimateOptions
* @property {boolean} [forceCoarseEstimates] TCP connection handshake information will be used when available, but in some circumstances this data can be unreliable. This flag exposes an option to ignore the handshake data and use the coarse download/TTFB timing data.
* @property {number} [coarseEstimateMultiplier] Coarse estimates include lots of extra time and noise multiply by some factor to deflate the estimates a bit.
* @property {boolean} [useDownloadEstimates] Useful for testing to isolate the different methods of estimation.
* @property {boolean} [useSendStartEstimates] Useful for testing to isolate the different methods of estimation.
* @property {boolean} [useHeadersEndEstimates] Useful for testing to isolate the different methods of estimation.
*/

View File

@@ -0,0 +1,153 @@
export type Node = import('../base-node.js').Node;
export type NetworkNode = import('../network-node.js').NetworkNode;
export type CpuNode = import('../cpu-node.js').CPUNode;
export type NodeTimingComplete = {
startTime: number;
endTime: number;
/**
* Helpful for debugging.
*/
queuedTime: number;
estimatedTimeElapsed: number;
timeElapsed: number;
timeElapsedOvershoot: number;
bytesDownloaded: number;
};
export type NodeTimingQueued = Pick<NodeTimingComplete, 'queuedTime'>;
export type CpuNodeTimingStarted = NodeTimingQueued & Pick<NodeTimingComplete, 'startTime' | 'timeElapsed'>;
export type NetworkNodeTimingStarted = CpuNodeTimingStarted & Pick<NodeTimingComplete, 'timeElapsedOvershoot' | 'bytesDownloaded'>;
export type CpuNodeTimingInProgress = CpuNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>;
export type NetworkNodeTimingInProgress = NetworkNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>;
export type CpuNodeTimingComplete = CpuNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'>;
export type NetworkNodeTimingComplete = NetworkNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'> & {
connectionTiming: ConnectionTiming;
};
export type NodeTimingData = NodeTimingQueued | CpuNodeTimingStarted | NetworkNodeTimingStarted | CpuNodeTimingInProgress | NetworkNodeTimingInProgress | CpuNodeTimingComplete | NetworkNodeTimingComplete;
/**
* A breakdown of network connection timings.
*/
export type ConnectionTiming = {
dnsResolutionTime?: number | undefined;
connectionTime?: number | undefined;
sslTime?: number | undefined;
timeToFirstByte: number;
};
/**
* @fileoverview
*
* This class encapsulates the type-related validation logic for moving timing information for nodes
* through the different simulation phases. Methods here ensure that the invariants of simulation hold
* as nodes are queued, partially simulated, and completed.
*/
/** @typedef {import('../base-node.js').Node} Node */
/** @typedef {import('../network-node.js').NetworkNode} NetworkNode */
/** @typedef {import('../cpu-node.js').CPUNode} CpuNode */
/**
* @typedef NodeTimingComplete
* @property {number} startTime
* @property {number} endTime
* @property {number} queuedTime Helpful for debugging.
* @property {number} estimatedTimeElapsed
* @property {number} timeElapsed
* @property {number} timeElapsedOvershoot
* @property {number} bytesDownloaded
*/
/** @typedef {Pick<NodeTimingComplete, 'queuedTime'>} NodeTimingQueued */
/** @typedef {NodeTimingQueued & Pick<NodeTimingComplete, 'startTime'|'timeElapsed'>} CpuNodeTimingStarted */
/** @typedef {CpuNodeTimingStarted & Pick<NodeTimingComplete, 'timeElapsedOvershoot'|'bytesDownloaded'>} NetworkNodeTimingStarted */
/** @typedef {CpuNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>} CpuNodeTimingInProgress */
/** @typedef {NetworkNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>} NetworkNodeTimingInProgress */
/** @typedef {CpuNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'>} CpuNodeTimingComplete */
/** @typedef {NetworkNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'> & {connectionTiming: ConnectionTiming}} NetworkNodeTimingComplete */
/** @typedef {NodeTimingQueued | CpuNodeTimingStarted | NetworkNodeTimingStarted | CpuNodeTimingInProgress | NetworkNodeTimingInProgress | CpuNodeTimingComplete | NetworkNodeTimingComplete} NodeTimingData */
/**
* @typedef ConnectionTiming A breakdown of network connection timings.
* @property {number} [dnsResolutionTime]
* @property {number} [connectionTime]
* @property {number} [sslTime]
* @property {number} timeToFirstByte
*/
export class SimulatorTimingMap {
/** @type {Map<Node, NodeTimingData>} */
_nodeTimings: Map<Node, NodeTimingData>;
/** @return {Array<Node>} */
getNodes(): Array<Node>;
/**
* @param {Node} node
* @param {{queuedTime: number}} values
*/
setReadyToStart(node: Node, values: {
queuedTime: number;
}): void;
/**
* @param {Node} node
* @param {{startTime: number}} values
*/
setInProgress(node: Node, values: {
startTime: number;
}): void;
/**
* @param {Node} node
* @param {{endTime: number, connectionTiming?: ConnectionTiming}} values
*/
setCompleted(node: Node, values: {
endTime: number;
connectionTiming?: ConnectionTiming;
}): void;
/**
* @param {CpuNode} node
* @param {{timeElapsed: number}} values
*/
setCpu(node: CpuNode, values: {
timeElapsed: number;
}): void;
/**
* @param {CpuNode} node
* @param {{estimatedTimeElapsed: number}} values
*/
setCpuEstimated(node: CpuNode, values: {
estimatedTimeElapsed: number;
}): void;
/**
* @param {NetworkNode} node
* @param {{timeElapsed: number, timeElapsedOvershoot: number, bytesDownloaded: number}} values
*/
setNetwork(node: NetworkNode, values: {
timeElapsed: number;
timeElapsedOvershoot: number;
bytesDownloaded: number;
}): void;
/**
* @param {NetworkNode} node
* @param {{estimatedTimeElapsed: number}} values
*/
setNetworkEstimated(node: NetworkNode, values: {
estimatedTimeElapsed: number;
}): void;
/**
* @param {Node} node
* @return {NodeTimingQueued}
*/
getQueued(node: Node): NodeTimingQueued;
/**
* @param {CpuNode} node
* @return {CpuNodeTimingStarted}
*/
getCpuStarted(node: CpuNode): CpuNodeTimingStarted;
/**
* @param {NetworkNode} node
* @return {NetworkNodeTimingStarted}
*/
getNetworkStarted(node: NetworkNode): NetworkNodeTimingStarted;
/**
* @param {Node} node
* @return {CpuNodeTimingInProgress | NetworkNodeTimingInProgress}
*/
getInProgress(node: Node): CpuNodeTimingInProgress | NetworkNodeTimingInProgress;
/**
* @param {Node} node
* @return {CpuNodeTimingComplete | NetworkNodeTimingComplete}
*/
getCompleted(node: Node): CpuNodeTimingComplete | NetworkNodeTimingComplete;
}
//# sourceMappingURL=simulator-timing-map.d.ts.map

View File

@@ -0,0 +1,220 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {BaseNode} from '../base-node.js';
/**
* @fileoverview
*
* This class encapsulates the type-related validation logic for moving timing information for nodes
* through the different simulation phases. Methods here ensure that the invariants of simulation hold
* as nodes are queued, partially simulated, and completed.
*/
/** @typedef {import('../base-node.js').Node} Node */
/** @typedef {import('../network-node.js').NetworkNode} NetworkNode */
/** @typedef {import('../cpu-node.js').CPUNode} CpuNode */
/**
* @typedef NodeTimingComplete
* @property {number} startTime
* @property {number} endTime
* @property {number} queuedTime Helpful for debugging.
* @property {number} estimatedTimeElapsed
* @property {number} timeElapsed
* @property {number} timeElapsedOvershoot
* @property {number} bytesDownloaded
*/
/** @typedef {Pick<NodeTimingComplete, 'queuedTime'>} NodeTimingQueued */
/** @typedef {NodeTimingQueued & Pick<NodeTimingComplete, 'startTime'|'timeElapsed'>} CpuNodeTimingStarted */
/** @typedef {CpuNodeTimingStarted & Pick<NodeTimingComplete, 'timeElapsedOvershoot'|'bytesDownloaded'>} NetworkNodeTimingStarted */
/** @typedef {CpuNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>} CpuNodeTimingInProgress */
/** @typedef {NetworkNodeTimingStarted & Pick<NodeTimingComplete, 'estimatedTimeElapsed'>} NetworkNodeTimingInProgress */
/** @typedef {CpuNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'>} CpuNodeTimingComplete */
/** @typedef {NetworkNodeTimingInProgress & Pick<NodeTimingComplete, 'endTime'> & {connectionTiming: ConnectionTiming}} NetworkNodeTimingComplete */
/** @typedef {NodeTimingQueued | CpuNodeTimingStarted | NetworkNodeTimingStarted | CpuNodeTimingInProgress | NetworkNodeTimingInProgress | CpuNodeTimingComplete | NetworkNodeTimingComplete} NodeTimingData */
/**
* @typedef ConnectionTiming A breakdown of network connection timings.
* @property {number} [dnsResolutionTime]
* @property {number} [connectionTime]
* @property {number} [sslTime]
* @property {number} timeToFirstByte
*/
class SimulatorTimingMap {
constructor() {
/** @type {Map<Node, NodeTimingData>} */
this._nodeTimings = new Map();
}
/** @return {Array<Node>} */
getNodes() {
return Array.from(this._nodeTimings.keys());
}
/**
* @param {Node} node
* @param {{queuedTime: number}} values
*/
setReadyToStart(node, values) {
this._nodeTimings.set(node, values);
}
/**
* @param {Node} node
* @param {{startTime: number}} values
*/
setInProgress(node, values) {
const nodeTiming = {
...this.getQueued(node),
startTime: values.startTime,
timeElapsed: 0,
};
this._nodeTimings.set(
node,
node.type === BaseNode.TYPES.NETWORK
? {...nodeTiming, timeElapsedOvershoot: 0, bytesDownloaded: 0}
: nodeTiming
);
}
/**
* @param {Node} node
* @param {{endTime: number, connectionTiming?: ConnectionTiming}} values
*/
setCompleted(node, values) {
const nodeTiming = {
...this.getInProgress(node),
endTime: values.endTime,
connectionTiming: values.connectionTiming,
};
this._nodeTimings.set(node, nodeTiming);
}
/**
* @param {CpuNode} node
* @param {{timeElapsed: number}} values
*/
setCpu(node, values) {
const nodeTiming = {
...this.getCpuStarted(node),
timeElapsed: values.timeElapsed,
};
this._nodeTimings.set(node, nodeTiming);
}
/**
* @param {CpuNode} node
* @param {{estimatedTimeElapsed: number}} values
*/
setCpuEstimated(node, values) {
const nodeTiming = {
...this.getCpuStarted(node),
estimatedTimeElapsed: values.estimatedTimeElapsed,
};
this._nodeTimings.set(node, nodeTiming);
}
/**
* @param {NetworkNode} node
* @param {{timeElapsed: number, timeElapsedOvershoot: number, bytesDownloaded: number}} values
*/
setNetwork(node, values) {
const nodeTiming = {
...this.getNetworkStarted(node),
timeElapsed: values.timeElapsed,
timeElapsedOvershoot: values.timeElapsedOvershoot,
bytesDownloaded: values.bytesDownloaded,
};
this._nodeTimings.set(node, nodeTiming);
}
/**
* @param {NetworkNode} node
* @param {{estimatedTimeElapsed: number}} values
*/
setNetworkEstimated(node, values) {
const nodeTiming = {
...this.getNetworkStarted(node),
estimatedTimeElapsed: values.estimatedTimeElapsed,
};
this._nodeTimings.set(node, nodeTiming);
}
/**
* @param {Node} node
* @return {NodeTimingQueued}
*/
getQueued(node) {
const timing = this._nodeTimings.get(node);
if (!timing) throw new Error(`Node ${node.id} not yet queued`);
return timing;
}
/**
* @param {CpuNode} node
* @return {CpuNodeTimingStarted}
*/
getCpuStarted(node) {
const timing = this._nodeTimings.get(node);
if (!timing) throw new Error(`Node ${node.id} not yet queued`);
if (!('startTime' in timing)) throw new Error(`Node ${node.id} not yet started`);
if ('bytesDownloaded' in timing) throw new Error(`Node ${node.id} timing not valid`);
return timing;
}
/**
* @param {NetworkNode} node
* @return {NetworkNodeTimingStarted}
*/
getNetworkStarted(node) {
const timing = this._nodeTimings.get(node);
if (!timing) throw new Error(`Node ${node.id} not yet queued`);
if (!('startTime' in timing)) throw new Error(`Node ${node.id} not yet started`);
if (!('bytesDownloaded' in timing)) throw new Error(`Node ${node.id} timing not valid`);
return timing;
}
/**
* @param {Node} node
* @return {CpuNodeTimingInProgress | NetworkNodeTimingInProgress}
*/
getInProgress(node) {
const timing = this._nodeTimings.get(node);
if (!timing) throw new Error(`Node ${node.id} not yet queued`);
if (!('startTime' in timing)) throw new Error(`Node ${node.id} not yet started`);
if (!('estimatedTimeElapsed' in timing)) throw new Error(`Node ${node.id} not yet in progress`);
return timing;
}
/**
* @param {Node} node
* @return {CpuNodeTimingComplete | NetworkNodeTimingComplete}
*/
getCompleted(node) {
const timing = this._nodeTimings.get(node);
if (!timing) throw new Error(`Node ${node.id} not yet queued`);
if (!('startTime' in timing)) throw new Error(`Node ${node.id} not yet started`);
if (!('estimatedTimeElapsed' in timing)) throw new Error(`Node ${node.id} not yet in progress`);
if (!('endTime' in timing)) throw new Error(`Node ${node.id} not yet completed`);
return timing;
}
}
export {SimulatorTimingMap};

View File

@@ -0,0 +1,154 @@
export type Node = import('../base-node.js').Node;
export type NetworkNode = import('../network-node.js').NetworkNode;
export type CpuNode = import('../cpu-node.js').CPUNode;
export type CompleteNodeTiming = import('./simulator-timing-map.js').CpuNodeTimingComplete | import('./simulator-timing-map.js').NetworkNodeTimingComplete;
export type ConnectionTiming = import('./simulator-timing-map.js').ConnectionTiming;
export class Simulator {
/** @return {Map<string, Map<Node, CompleteNodeTiming>>} */
static get ALL_NODE_TIMINGS(): Map<string, Map<import("../base-node.js").Node, CompleteNodeTiming>>;
/**
* We attempt to start nodes by their observed start time using the record priority as a tie breaker.
* When simulating, just because a low priority image started 5ms before a high priority image doesn't mean
* it would have happened like that when the network was slower.
* @param {Node} node
*/
static _computeNodeStartPosition(node: Node): number;
/**
* @param {LH.Gatherer.Simulation.Options} [options]
*/
constructor(options?: LH.Gatherer.Simulation.Options | undefined);
/** @type {Required<LH.Gatherer.Simulation.Options>} */
_options: Required<LH.Gatherer.Simulation.Options>;
_rtt: number;
_throughput: number;
_maximumConcurrentRequests: number;
_cpuSlowdownMultiplier: number;
_layoutTaskMultiplier: number;
/** @type {Array<Node>} */
_cachedNodeListByStartPosition: Array<Node>;
_flexibleOrdering: boolean;
_nodeTimings: SimulatorTimingMap;
/** @type {Map<string, number>} */
_numberInProgressByType: Map<string, number>;
/** @type {Record<number, Set<Node>>} */
_nodes: Record<number, Set<Node>>;
_dns: DNSCache;
/** @type {ConnectionPool} */
_connectionPool: ConnectionPool;
/** @return {number} */
get rtt(): number;
/**
* @param {Node} graph
*/
_initializeConnectionPool(graph: Node): void;
/**
* Initializes the various state data structures such _nodeTimings and the _node Sets by state.
*/
_initializeAuxiliaryData(): void;
/**
* @param {string} type
* @return {number}
*/
_numberInProgress(type: string): number;
/**
* @param {Node} node
* @param {number} queuedTime
*/
_markNodeAsReadyToStart(node: Node, queuedTime: number): void;
/**
* @param {Node} node
* @param {number} startTime
*/
_markNodeAsInProgress(node: Node, startTime: number): void;
/**
* @param {Node} node
* @param {number} endTime
* @param {ConnectionTiming} [connectionTiming] Optional network connection information.
*/
_markNodeAsComplete(node: Node, endTime: number, connectionTiming?: import("./simulator-timing-map.js").ConnectionTiming | undefined): void;
/**
* @param {LH.Artifacts.NetworkRequest} record
* @return {?TcpConnection}
*/
_acquireConnection(record: LH.Artifacts.NetworkRequest): TcpConnection | null;
/**
* @return {Node[]}
*/
_getNodesSortedByStartPosition(): Node[];
/**
* @param {Node} node
* @param {number} totalElapsedTime
*/
_startNodeIfPossible(node: Node, totalElapsedTime: number): void;
/**
* Updates each connection in use with the available throughput based on the number of network requests
* currently in flight.
*/
_updateNetworkCapacity(): void;
/**
* Estimates the number of milliseconds remaining given current condidtions before the node is complete.
* @param {Node} node
* @return {number}
*/
_estimateTimeRemaining(node: Node): number;
/**
* @param {CpuNode} cpuNode
* @return {number}
*/
_estimateCPUTimeRemaining(cpuNode: CpuNode): number;
/**
* @param {NetworkNode} networkNode
* @return {number}
*/
_estimateNetworkTimeRemaining(networkNode: NetworkNode): number;
/**
* Computes and returns the minimum estimated completion time of the nodes currently in progress.
* @return {number}
*/
_findNextNodeCompletionTime(): number;
/**
* Given a time period, computes the progress toward completion that the node made durin that time.
* @param {Node} node
* @param {number} timePeriodLength
* @param {number} totalElapsedTime
*/
_updateProgressMadeInTimePeriod(node: Node, timePeriodLength: number, totalElapsedTime: number): number | void;
/**
* @return {{nodeTimings: Map<Node, LH.Gatherer.Simulation.NodeTiming>, completeNodeTimings: Map<Node, CompleteNodeTiming>}}
*/
_computeFinalNodeTimings(): {
nodeTimings: Map<Node, LH.Gatherer.Simulation.NodeTiming>;
completeNodeTimings: Map<Node, CompleteNodeTiming>;
};
/**
* @return {Required<LH.Gatherer.Simulation.Options>}
*/
getOptions(): Required<LH.Gatherer.Simulation.Options>;
/**
* Estimates the time taken to process all of the graph's nodes, returns the overall time along with
* each node annotated by start/end times.
*
* If flexibleOrdering is set, simulator/connection pool are allowed to deviate from what was
* observed in the trace/devtoolsLog and start requests as soon as they are queued (i.e. do not
* wait around for a warm connection to be available if the original record was fetched on a warm
* connection).
*
* @param {Node} graph
* @param {{flexibleOrdering?: boolean, label?: string}=} options
* @return {LH.Gatherer.Simulation.Result}
*/
simulate(graph: Node, options?: {
flexibleOrdering?: boolean;
label?: string;
} | undefined): LH.Gatherer.Simulation.Result;
/**
* @param {number} wastedBytes
*/
computeWastedMsFromWastedBytes(wastedBytes: number): number;
}
import * as LH from '../../../../types/lh.js';
import { SimulatorTimingMap } from './simulator-timing-map.js';
import { DNSCache } from './dns-cache.js';
import { ConnectionPool } from './connection-pool.js';
import { TcpConnection } from './tcp-connection.js';
//# sourceMappingURL=simulator.d.ts.map

View File

@@ -0,0 +1,542 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../../types/lh.js';
import {BaseNode} from '../base-node.js';
import {TcpConnection} from './tcp-connection.js';
import {ConnectionPool} from './connection-pool.js';
import {DNSCache} from './dns-cache.js';
import {SimulatorTimingMap} from './simulator-timing-map.js';
import * as constants from '../../../config/constants.js';
const mobileSlow4G = constants.throttling.mobileSlow4G;
/** @typedef {import('../base-node.js').Node} Node */
/** @typedef {import('../network-node.js').NetworkNode} NetworkNode */
/** @typedef {import('../cpu-node.js').CPUNode} CpuNode */
/** @typedef {import('./simulator-timing-map.js').CpuNodeTimingComplete | import('./simulator-timing-map.js').NetworkNodeTimingComplete} CompleteNodeTiming */
/** @typedef {import('./simulator-timing-map.js').ConnectionTiming} ConnectionTiming */
// see https://cs.chromium.org/search/?q=kDefaultMaxNumDelayableRequestsPerClient&sq=package:chromium&type=cs
const DEFAULT_MAXIMUM_CONCURRENT_REQUESTS = 10;
// layout tasks tend to be less CPU-bound and do not experience the same increase in duration
const DEFAULT_LAYOUT_TASK_MULTIPLIER = 0.5;
// if a task takes more than 10 seconds it's usually a sign it isn't actually CPU bound and we're overestimating
const DEFAULT_MAXIMUM_CPU_TASK_DURATION = 10000;
const NodeState = {
NotReadyToStart: 0,
ReadyToStart: 1,
InProgress: 2,
Complete: 3,
};
/** @type {Record<NetworkNode['record']['priority'], number>} */
const PriorityStartTimePenalty = {
VeryHigh: 0,
High: 0.25,
Medium: 0.5,
Low: 1,
VeryLow: 2,
};
/** @type {Map<string, Map<Node, CompleteNodeTiming>>} */
const ALL_SIMULATION_NODE_TIMINGS = new Map();
class Simulator {
/**
* @param {LH.Gatherer.Simulation.Options} [options]
*/
constructor(options) {
/** @type {Required<LH.Gatherer.Simulation.Options>} */
this._options = Object.assign(
{
rtt: mobileSlow4G.rttMs,
throughput: mobileSlow4G.throughputKbps * 1024,
maximumConcurrentRequests: DEFAULT_MAXIMUM_CONCURRENT_REQUESTS,
cpuSlowdownMultiplier: mobileSlow4G.cpuSlowdownMultiplier,
layoutTaskMultiplier: DEFAULT_LAYOUT_TASK_MULTIPLIER,
additionalRttByOrigin: new Map(),
serverResponseTimeByOrigin: new Map(),
},
options
);
this._rtt = this._options.rtt;
this._throughput = this._options.throughput;
this._maximumConcurrentRequests = Math.max(Math.min(
TcpConnection.maximumSaturatedConnections(this._rtt, this._throughput),
this._options.maximumConcurrentRequests
), 1);
this._cpuSlowdownMultiplier = this._options.cpuSlowdownMultiplier;
this._layoutTaskMultiplier = this._cpuSlowdownMultiplier * this._options.layoutTaskMultiplier;
/** @type {Array<Node>} */
this._cachedNodeListByStartPosition = [];
// Properties reset on every `.simulate` call but duplicated here for type checking
this._flexibleOrdering = false;
this._nodeTimings = new SimulatorTimingMap();
/** @type {Map<string, number>} */
this._numberInProgressByType = new Map();
/** @type {Record<number, Set<Node>>} */
this._nodes = {};
this._dns = new DNSCache({rtt: this._rtt});
/** @type {ConnectionPool} */
// @ts-expect-error
this._connectionPool = null;
if (!Number.isFinite(this._rtt)) throw new Error(`Invalid rtt ${this._rtt}`);
if (!Number.isFinite(this._throughput)) throw new Error(`Invalid rtt ${this._throughput}`);
}
/** @return {number} */
get rtt() {
return this._rtt;
}
/**
* @param {Node} graph
*/
_initializeConnectionPool(graph) {
/** @type {LH.Artifacts.NetworkRequest[]} */
const records = [];
graph.getRootNode().traverse(node => {
if (node.type === BaseNode.TYPES.NETWORK) {
records.push(node.record);
}
});
this._connectionPool = new ConnectionPool(records, this._options);
}
/**
* Initializes the various state data structures such _nodeTimings and the _node Sets by state.
*/
_initializeAuxiliaryData() {
this._nodeTimings = new SimulatorTimingMap();
this._numberInProgressByType = new Map();
this._nodes = {};
this._cachedNodeListByStartPosition = [];
// NOTE: We don't actually need *all* of these sets, but the clarity that each node progresses
// through the system is quite nice.
for (const state of Object.values(NodeState)) {
this._nodes[state] = new Set();
}
}
/**
* @param {string} type
* @return {number}
*/
_numberInProgress(type) {
return this._numberInProgressByType.get(type) || 0;
}
/**
* @param {Node} node
* @param {number} queuedTime
*/
_markNodeAsReadyToStart(node, queuedTime) {
const nodeStartPosition = Simulator._computeNodeStartPosition(node);
const firstNodeIndexWithGreaterStartPosition = this._cachedNodeListByStartPosition
.findIndex(candidate => Simulator._computeNodeStartPosition(candidate) > nodeStartPosition);
const insertionIndex = firstNodeIndexWithGreaterStartPosition === -1 ?
this._cachedNodeListByStartPosition.length : firstNodeIndexWithGreaterStartPosition;
this._cachedNodeListByStartPosition.splice(insertionIndex, 0, node);
this._nodes[NodeState.ReadyToStart].add(node);
this._nodes[NodeState.NotReadyToStart].delete(node);
this._nodeTimings.setReadyToStart(node, {queuedTime});
}
/**
* @param {Node} node
* @param {number} startTime
*/
_markNodeAsInProgress(node, startTime) {
const indexOfNodeToStart = this._cachedNodeListByStartPosition.indexOf(node);
this._cachedNodeListByStartPosition.splice(indexOfNodeToStart, 1);
this._nodes[NodeState.InProgress].add(node);
this._nodes[NodeState.ReadyToStart].delete(node);
this._numberInProgressByType.set(node.type, this._numberInProgress(node.type) + 1);
this._nodeTimings.setInProgress(node, {startTime});
}
/**
* @param {Node} node
* @param {number} endTime
* @param {ConnectionTiming} [connectionTiming] Optional network connection information.
*/
_markNodeAsComplete(node, endTime, connectionTiming) {
this._nodes[NodeState.Complete].add(node);
this._nodes[NodeState.InProgress].delete(node);
this._numberInProgressByType.set(node.type, this._numberInProgress(node.type) - 1);
this._nodeTimings.setCompleted(node, {endTime, connectionTiming});
// Try to add all its dependents to the queue
for (const dependent of node.getDependents()) {
// Skip dependent node if one of its dependencies hasn't finished yet
const dependencies = dependent.getDependencies();
if (dependencies.some(dep => !this._nodes[NodeState.Complete].has(dep))) continue;
// Otherwise add it to the queue
this._markNodeAsReadyToStart(dependent, endTime);
}
}
/**
* @param {LH.Artifacts.NetworkRequest} record
* @return {?TcpConnection}
*/
_acquireConnection(record) {
return this._connectionPool.acquire(record, {
ignoreConnectionReused: this._flexibleOrdering,
});
}
/**
* @return {Node[]}
*/
_getNodesSortedByStartPosition() {
// Make a copy so we don't skip nodes due to concurrent modification
return Array.from(this._cachedNodeListByStartPosition);
}
/**
* @param {Node} node
* @param {number} totalElapsedTime
*/
_startNodeIfPossible(node, totalElapsedTime) {
if (node.type === BaseNode.TYPES.CPU) {
// Start a CPU task if there's no other CPU task in process
if (this._numberInProgress(node.type) === 0) {
this._markNodeAsInProgress(node, totalElapsedTime);
}
return;
}
if (node.type !== BaseNode.TYPES.NETWORK) throw new Error('Unsupported');
// If a network request is connectionless, we can always start it, so skip the connection checks
if (!node.isConnectionless) {
// Start a network request if we're not at max requests and a connection is available
const numberOfActiveRequests = this._numberInProgress(node.type);
if (numberOfActiveRequests >= this._maximumConcurrentRequests) return;
const connection = this._acquireConnection(node.record);
if (!connection) return;
}
this._markNodeAsInProgress(node, totalElapsedTime);
}
/**
* Updates each connection in use with the available throughput based on the number of network requests
* currently in flight.
*/
_updateNetworkCapacity() {
const inFlight = this._numberInProgress(BaseNode.TYPES.NETWORK);
if (inFlight === 0) return;
for (const connection of this._connectionPool.connectionsInUse()) {
connection.setThroughput(this._throughput / inFlight);
}
}
/**
* Estimates the number of milliseconds remaining given current condidtions before the node is complete.
* @param {Node} node
* @return {number}
*/
_estimateTimeRemaining(node) {
if (node.type === BaseNode.TYPES.CPU) {
return this._estimateCPUTimeRemaining(node);
} else if (node.type === BaseNode.TYPES.NETWORK) {
return this._estimateNetworkTimeRemaining(node);
} else {
throw new Error('Unsupported');
}
}
/**
* @param {CpuNode} cpuNode
* @return {number}
*/
_estimateCPUTimeRemaining(cpuNode) {
const timingData = this._nodeTimings.getCpuStarted(cpuNode);
const multiplier = cpuNode.didPerformLayout()
? this._layoutTaskMultiplier
: this._cpuSlowdownMultiplier;
const totalDuration = Math.min(
Math.round(cpuNode.event.dur / 1000 * multiplier),
DEFAULT_MAXIMUM_CPU_TASK_DURATION
);
const estimatedTimeElapsed = totalDuration - timingData.timeElapsed;
this._nodeTimings.setCpuEstimated(cpuNode, {estimatedTimeElapsed});
return estimatedTimeElapsed;
}
/**
* @param {NetworkNode} networkNode
* @return {number}
*/
_estimateNetworkTimeRemaining(networkNode) {
const record = networkNode.record;
const timingData = this._nodeTimings.getNetworkStarted(networkNode);
let timeElapsed = 0;
if (networkNode.fromDiskCache) {
// Rough access time for seeking to location on disk and reading sequentially.
// 8ms per seek + 20ms/MB
// @see http://norvig.com/21-days.html#answers
const sizeInMb = (record.resourceSize || 0) / 1024 / 1024;
timeElapsed = 8 + 20 * sizeInMb - timingData.timeElapsed;
} else if (networkNode.isNonNetworkProtocol) {
// Estimates for the overhead of a data URL in Chromium and the decoding time for base64-encoded data.
// 2ms per request + 10ms/MB
// @see traces on https://dopiaza.org/tools/datauri/examples/index.php
const sizeInMb = (record.resourceSize || 0) / 1024 / 1024;
timeElapsed = 2 + 10 * sizeInMb - timingData.timeElapsed;
} else {
const connection = this._connectionPool.acquireActiveConnectionFromRecord(record);
const dnsResolutionTime = this._dns.getTimeUntilResolution(record, {
requestedAt: timingData.startTime,
shouldUpdateCache: true,
});
const timeAlreadyElapsed = timingData.timeElapsed;
const calculation = connection.simulateDownloadUntil(
record.transferSize - timingData.bytesDownloaded,
{timeAlreadyElapsed, dnsResolutionTime, maximumTimeToElapse: Infinity}
);
timeElapsed = calculation.timeElapsed;
}
const estimatedTimeElapsed = timeElapsed + timingData.timeElapsedOvershoot;
this._nodeTimings.setNetworkEstimated(networkNode, {estimatedTimeElapsed});
return estimatedTimeElapsed;
}
/**
* Computes and returns the minimum estimated completion time of the nodes currently in progress.
* @return {number}
*/
_findNextNodeCompletionTime() {
let minimumTime = Infinity;
for (const node of this._nodes[NodeState.InProgress]) {
minimumTime = Math.min(minimumTime, this._estimateTimeRemaining(node));
}
return minimumTime;
}
/**
* Given a time period, computes the progress toward completion that the node made durin that time.
* @param {Node} node
* @param {number} timePeriodLength
* @param {number} totalElapsedTime
*/
_updateProgressMadeInTimePeriod(node, timePeriodLength, totalElapsedTime) {
const timingData = this._nodeTimings.getInProgress(node);
const isFinished = timingData.estimatedTimeElapsed === timePeriodLength;
if (node.type === BaseNode.TYPES.CPU || node.isConnectionless) {
return isFinished
? this._markNodeAsComplete(node, totalElapsedTime)
: (timingData.timeElapsed += timePeriodLength);
}
if (node.type !== BaseNode.TYPES.NETWORK) throw new Error('Unsupported');
if (!('bytesDownloaded' in timingData)) throw new Error('Invalid timing data');
const record = node.record;
const connection = this._connectionPool.acquireActiveConnectionFromRecord(record);
const dnsResolutionTime = this._dns.getTimeUntilResolution(record, {
requestedAt: timingData.startTime,
shouldUpdateCache: true,
});
const calculation = connection.simulateDownloadUntil(
record.transferSize - timingData.bytesDownloaded,
{
dnsResolutionTime,
timeAlreadyElapsed: timingData.timeElapsed,
maximumTimeToElapse: timePeriodLength - timingData.timeElapsedOvershoot,
}
);
connection.setCongestionWindow(calculation.congestionWindow);
connection.setH2OverflowBytesDownloaded(calculation.extraBytesDownloaded);
if (isFinished) {
connection.setWarmed(true);
this._connectionPool.release(record);
this._markNodeAsComplete(node, totalElapsedTime, calculation.connectionTiming);
} else {
timingData.timeElapsed += calculation.timeElapsed;
timingData.timeElapsedOvershoot += calculation.timeElapsed - timePeriodLength;
timingData.bytesDownloaded += calculation.bytesDownloaded;
}
}
/**
* @return {{nodeTimings: Map<Node, LH.Gatherer.Simulation.NodeTiming>, completeNodeTimings: Map<Node, CompleteNodeTiming>}}
*/
_computeFinalNodeTimings() {
/** @type {Array<[Node, CompleteNodeTiming]>} */
const completeNodeTimingEntries = this._nodeTimings.getNodes().map(node => {
return [node, this._nodeTimings.getCompleted(node)];
});
// Most consumers will want the entries sorted by startTime, so insert them in that order
completeNodeTimingEntries.sort((a, b) => a[1].startTime - b[1].startTime);
// Trimmed version of type `LH.Gatherer.Simulation.NodeTiming`.
/** @type {Array<[Node, LH.Gatherer.Simulation.NodeTiming]>} */
const nodeTimingEntries = completeNodeTimingEntries.map(([node, timing]) => {
return [node, {
startTime: timing.startTime,
endTime: timing.endTime,
duration: timing.endTime - timing.startTime,
}];
});
return {
nodeTimings: new Map(nodeTimingEntries),
completeNodeTimings: new Map(completeNodeTimingEntries),
};
}
/**
* @return {Required<LH.Gatherer.Simulation.Options>}
*/
getOptions() {
return this._options;
}
/**
* Estimates the time taken to process all of the graph's nodes, returns the overall time along with
* each node annotated by start/end times.
*
* If flexibleOrdering is set, simulator/connection pool are allowed to deviate from what was
* observed in the trace/devtoolsLog and start requests as soon as they are queued (i.e. do not
* wait around for a warm connection to be available if the original record was fetched on a warm
* connection).
*
* @param {Node} graph
* @param {{flexibleOrdering?: boolean, label?: string}=} options
* @return {LH.Gatherer.Simulation.Result}
*/
simulate(graph, options) {
if (BaseNode.hasCycle(graph)) {
throw new Error('Cannot simulate graph with cycle');
}
options = Object.assign({
label: undefined,
flexibleOrdering: false,
}, options);
// initialize the necessary data containers
this._flexibleOrdering = !!options.flexibleOrdering;
this._dns = new DNSCache({rtt: this._rtt});
this._initializeConnectionPool(graph);
this._initializeAuxiliaryData();
const nodesNotReadyToStart = this._nodes[NodeState.NotReadyToStart];
const nodesReadyToStart = this._nodes[NodeState.ReadyToStart];
const nodesInProgress = this._nodes[NodeState.InProgress];
const rootNode = graph.getRootNode();
rootNode.traverse(node => nodesNotReadyToStart.add(node));
let totalElapsedTime = 0;
let iteration = 0;
// root node is always ready to start
this._markNodeAsReadyToStart(rootNode, totalElapsedTime);
// loop as long as we have nodes in the queue or currently in progress
while (nodesReadyToStart.size || nodesInProgress.size) {
// move all possible queued nodes to in progress
for (const node of this._getNodesSortedByStartPosition()) {
this._startNodeIfPossible(node, totalElapsedTime);
}
if (!nodesInProgress.size) {
// interplay between fromDiskCache and connectionReused can be incorrect
// proceed with flexibleOrdering if we can, otherwise give up
if (this._flexibleOrdering) throw new Error('Failed to start a node');
this._flexibleOrdering = true;
continue;
}
// set the available throughput for all connections based on # inflight
this._updateNetworkCapacity();
// find the time that the next node will finish
const minimumTime = this._findNextNodeCompletionTime();
totalElapsedTime += minimumTime;
// While this is no longer strictly necessary, it's always better than LH hanging
if (!Number.isFinite(minimumTime) || iteration > 100000) {
throw new Error('Simulation failed, depth exceeded');
}
iteration++;
// update how far each node will progress until that point
for (const node of nodesInProgress) {
this._updateProgressMadeInTimePeriod(node, minimumTime, totalElapsedTime);
}
}
// `nodeTimings` are used for simulator consumers, `completeNodeTimings` kept for debugging.
const {nodeTimings, completeNodeTimings} = this._computeFinalNodeTimings();
ALL_SIMULATION_NODE_TIMINGS.set(options.label || 'unlabeled', completeNodeTimings);
return {
timeInMs: totalElapsedTime,
nodeTimings,
};
}
/**
* @param {number} wastedBytes
*/
computeWastedMsFromWastedBytes(wastedBytes) {
const {throughput, observedThroughput} = this._options;
// https://github.com/GoogleChrome/lighthouse/pull/13323#issuecomment-962031709
// 0 throughput means the no (additional) throttling is expected.
// This is common for desktop + devtools throttling where throttling is additive and we don't want any additional.
const bitsPerSecond = throughput === 0 ? observedThroughput : throughput;
if (bitsPerSecond === 0) return 0;
const wastedBits = wastedBytes * 8;
const wastedMs = wastedBits / bitsPerSecond * 1000;
// This is an estimate of wasted time, so we won't be more precise than 10ms.
return Math.round(wastedMs / 10) * 10;
}
/** @return {Map<string, Map<Node, CompleteNodeTiming>>} */
static get ALL_NODE_TIMINGS() {
return ALL_SIMULATION_NODE_TIMINGS;
}
/**
* We attempt to start nodes by their observed start time using the record priority as a tie breaker.
* When simulating, just because a low priority image started 5ms before a high priority image doesn't mean
* it would have happened like that when the network was slower.
* @param {Node} node
*/
static _computeNodeStartPosition(node) {
if (node.type === 'cpu') return node.startTime;
return node.startTime + (PriorityStartTimePenalty[node.record.priority] * 1000 * 1000 || 0);
}
}
export {Simulator};

View File

@@ -0,0 +1,89 @@
export type ConnectionTiming = import('./simulator-timing-map.js').ConnectionTiming;
export type DownloadOptions = {
dnsResolutionTime?: number | undefined;
timeAlreadyElapsed?: number | undefined;
maximumTimeToElapse?: number | undefined;
};
export type DownloadResults = {
roundTrips: number;
timeElapsed: number;
bytesDownloaded: number;
extraBytesDownloaded: number;
congestionWindow: number;
connectionTiming: ConnectionTiming;
};
export class TcpConnection {
/**
* @param {number} rtt
* @param {number} availableThroughput
* @return {number}
*/
static maximumSaturatedConnections(rtt: number, availableThroughput: number): number;
/**
* @param {number} rtt
* @param {number} throughput
* @param {number=} serverLatency
* @param {boolean=} ssl
* @param {boolean=} h2
*/
constructor(rtt: number, throughput: number, serverLatency?: number | undefined, ssl?: boolean | undefined, h2?: boolean | undefined);
_warmed: boolean;
_ssl: boolean;
_h2: boolean;
_rtt: number;
_throughput: number;
_serverLatency: number;
_congestionWindow: number;
_h2OverflowBytesDownloaded: number;
/**
* @return {number}
*/
_computeMaximumCongestionWindowInSegments(): number;
/**
* @param {number} throughput
*/
setThroughput(throughput: number): void;
/**
* @param {number} congestion
*/
setCongestionWindow(congestion: number): void;
/**
* @param {boolean} warmed
*/
setWarmed(warmed: boolean): void;
/**
* @return {boolean}
*/
isWarm(): boolean;
/**
* @return {boolean}
*/
isH2(): boolean;
/**
* @return {number}
*/
get congestionWindow(): number;
/**
* Sets the number of excess bytes that are available to this connection on future downloads, only
* applies to H2 connections.
* @param {number} bytes
*/
setH2OverflowBytesDownloaded(bytes: number): void;
/**
* @return {TcpConnection}
*/
clone(): TcpConnection;
/**
* Simulates a network download of a particular number of bytes over an optional maximum amount of time
* and returns information about the ending state.
*
* See https://hpbn.co/building-blocks-of-tcp/#three-way-handshake and
* https://hpbn.co/transport-layer-security-tls/#tls-handshake for details.
*
* @param {number} bytesToDownload
* @param {DownloadOptions} [options]
* @return {DownloadResults}
*/
simulateDownloadUntil(bytesToDownload: number, options?: DownloadOptions | undefined): DownloadResults;
}
//# sourceMappingURL=tcp-connection.d.ts.map

View File

@@ -0,0 +1,230 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {import('./simulator-timing-map.js').ConnectionTiming} ConnectionTiming */
const INITIAL_CONGESTION_WINDOW = 10;
const TCP_SEGMENT_SIZE = 1460;
class TcpConnection {
/**
* @param {number} rtt
* @param {number} throughput
* @param {number=} serverLatency
* @param {boolean=} ssl
* @param {boolean=} h2
*/
constructor(rtt, throughput, serverLatency = 0, ssl = true, h2 = false) {
this._warmed = false;
this._ssl = ssl;
this._h2 = h2;
this._rtt = rtt;
this._throughput = throughput;
this._serverLatency = serverLatency;
this._congestionWindow = INITIAL_CONGESTION_WINDOW;
this._h2OverflowBytesDownloaded = 0;
}
/**
* @param {number} rtt
* @param {number} availableThroughput
* @return {number}
*/
static maximumSaturatedConnections(rtt, availableThroughput) {
const roundTripsPerSecond = 1000 / rtt;
const bytesPerRoundTrip = TCP_SEGMENT_SIZE;
const bytesPerSecond = roundTripsPerSecond * bytesPerRoundTrip;
const minimumThroughputRequiredPerRequest = bytesPerSecond * 8;
return Math.floor(availableThroughput / minimumThroughputRequiredPerRequest);
}
/**
* @return {number}
*/
_computeMaximumCongestionWindowInSegments() {
const bytesPerSecond = this._throughput / 8;
const secondsPerRoundTrip = this._rtt / 1000;
const bytesPerRoundTrip = bytesPerSecond * secondsPerRoundTrip;
return Math.floor(bytesPerRoundTrip / TCP_SEGMENT_SIZE);
}
/**
* @param {number} throughput
*/
setThroughput(throughput) {
this._throughput = throughput;
}
/**
* @param {number} congestion
*/
setCongestionWindow(congestion) {
this._congestionWindow = congestion;
}
/**
* @param {boolean} warmed
*/
setWarmed(warmed) {
this._warmed = warmed;
}
/**
* @return {boolean}
*/
isWarm() {
return this._warmed;
}
/**
* @return {boolean}
*/
isH2() {
return this._h2;
}
/**
* @return {number}
*/
get congestionWindow() {
return this._congestionWindow;
}
/**
* Sets the number of excess bytes that are available to this connection on future downloads, only
* applies to H2 connections.
* @param {number} bytes
*/
setH2OverflowBytesDownloaded(bytes) {
if (!this._h2) return;
this._h2OverflowBytesDownloaded = bytes;
}
/**
* @return {TcpConnection}
*/
clone() {
return Object.assign(new TcpConnection(this._rtt, this._throughput), this);
}
/**
* Simulates a network download of a particular number of bytes over an optional maximum amount of time
* and returns information about the ending state.
*
* See https://hpbn.co/building-blocks-of-tcp/#three-way-handshake and
* https://hpbn.co/transport-layer-security-tls/#tls-handshake for details.
*
* @param {number} bytesToDownload
* @param {DownloadOptions} [options]
* @return {DownloadResults}
*/
simulateDownloadUntil(bytesToDownload, options) {
const {timeAlreadyElapsed = 0, maximumTimeToElapse = Infinity, dnsResolutionTime = 0} =
options || {};
if (this._warmed && this._h2) {
bytesToDownload -= this._h2OverflowBytesDownloaded;
}
const twoWayLatency = this._rtt;
const oneWayLatency = twoWayLatency / 2;
const maximumCongestionWindow = this._computeMaximumCongestionWindowInSegments();
let handshakeAndRequest = oneWayLatency;
if (!this._warmed) {
handshakeAndRequest =
// DNS lookup
dnsResolutionTime +
// SYN
oneWayLatency +
// SYN ACK
oneWayLatency +
// ACK + initial request
oneWayLatency +
// ClientHello/ServerHello assuming TLS False Start is enabled (https://istlsfastyet.com/#server-performance).
(this._ssl ? twoWayLatency : 0);
}
let roundTrips = Math.ceil(handshakeAndRequest / twoWayLatency);
let timeToFirstByte = handshakeAndRequest + this._serverLatency + oneWayLatency;
if (this._warmed && this._h2) timeToFirstByte = 0;
const timeElapsedForTTFB = Math.max(timeToFirstByte - timeAlreadyElapsed, 0);
const maximumDownloadTimeToElapse = maximumTimeToElapse - timeElapsedForTTFB;
let congestionWindow = Math.min(this._congestionWindow, maximumCongestionWindow);
let totalBytesDownloaded = 0;
if (timeElapsedForTTFB > 0) {
totalBytesDownloaded = congestionWindow * TCP_SEGMENT_SIZE;
} else {
roundTrips = 0;
}
let downloadTimeElapsed = 0;
let bytesRemaining = bytesToDownload - totalBytesDownloaded;
while (bytesRemaining > 0 && downloadTimeElapsed <= maximumDownloadTimeToElapse) {
roundTrips++;
downloadTimeElapsed += twoWayLatency;
congestionWindow = Math.max(Math.min(maximumCongestionWindow, congestionWindow * 2), 1);
const bytesDownloadedInWindow = congestionWindow * TCP_SEGMENT_SIZE;
totalBytesDownloaded += bytesDownloadedInWindow;
bytesRemaining -= bytesDownloadedInWindow;
}
const timeElapsed = timeElapsedForTTFB + downloadTimeElapsed;
const extraBytesDownloaded = this._h2 ? Math.max(totalBytesDownloaded - bytesToDownload, 0) : 0;
const bytesDownloaded = Math.max(Math.min(totalBytesDownloaded, bytesToDownload), 0);
/** @type {ConnectionTiming} */
let connectionTiming;
if (!this._warmed) {
connectionTiming = {
dnsResolutionTime,
connectionTime: handshakeAndRequest - dnsResolutionTime,
sslTime: this._ssl ? twoWayLatency : undefined,
timeToFirstByte,
};
} else if (this._h2) {
// TODO: timing information currently difficult to model for warm h2 connections.
connectionTiming = {
timeToFirstByte,
};
} else {
connectionTiming = {
connectionTime: handshakeAndRequest,
timeToFirstByte,
};
}
return {
roundTrips,
timeElapsed,
bytesDownloaded,
extraBytesDownloaded,
congestionWindow,
connectionTiming,
};
}
}
export {TcpConnection};
/**
* @typedef DownloadOptions
* @property {number} [dnsResolutionTime]
* @property {number} [timeAlreadyElapsed]
* @property {number} [maximumTimeToElapse]
*/
/**
* @typedef DownloadResults
* @property {number} roundTrips
* @property {number} timeElapsed
* @property {number} bytesDownloaded
* @property {number} extraBytesDownloaded
* @property {number} congestionWindow
* @property {ConnectionTiming} connectionTiming
*/

View File

@@ -0,0 +1,17 @@
/**
* @param {LH.Crdp.Audits.DeprecationIssueDetails} issueDetails
*/
export function getIssueDetailDescription(issueDetails: LH.Crdp.Audits.DeprecationIssueDetails): {
substitutions: Map<string, import("../index.js").IcuMessage | undefined>;
links: {
link: string;
linkTitle: import("../index.js").IcuMessage;
}[];
message: import("../index.js").IcuMessage | undefined;
};
export namespace UIStrings {
const feature: string;
const milestone: string;
const title: string;
}
//# sourceMappingURL=deprecation-description.d.ts.map

View File

@@ -0,0 +1,71 @@
/**
* @license Copyright 2023 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as i18n from '../lib/i18n/i18n.js';
import {DEPRECATIONS_METADATA, UIStrings as DeprecationUIStrings} from './deprecations-strings.js';
const UIStrings = {
// Store strings used across messages in this block.
/**
* @description This links to the chrome feature status page when one exists.
*/
feature: 'Check the feature status page for more details.',
/**
* @description This links to the chromium dash schedule when a milestone is set.
* @example {100} milestone
*/
milestone: 'This change will go into effect with milestone {milestone}.',
/**
* @description Title of issue raised when a deprecated feature is used
*/
title: 'Deprecated Feature Used',
};
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
const deprecationsStr_ = i18n.createIcuMessageFn(import.meta.url, DeprecationUIStrings);
/**
* @param {LH.Crdp.Audits.DeprecationIssueDetails} issueDetails
*/
function getIssueDetailDescription(issueDetails) {
let message;
const type = /** @type {keyof DEPRECATIONS_METADATA} */ (issueDetails.type);
const maybeEnglishMessage = DeprecationUIStrings[type];
if (maybeEnglishMessage) {
message = deprecationsStr_(maybeEnglishMessage);
}
const links = [];
/** @type {{chromeStatusFeature?: number, milestone?: number}|undefined} */
const deprecationMeta = DEPRECATIONS_METADATA[type];
const feature = deprecationMeta?.chromeStatusFeature ?? 0;
if (feature !== 0) {
links.push({
link: `https://chromestatus.com/feature/${feature}`,
linkTitle: str_(UIStrings.feature),
});
}
const milestone = deprecationMeta?.milestone ?? 0;
if (milestone !== 0) {
links.push({
link: 'https://chromiumdash.appspot.com/schedule',
linkTitle: str_(UIStrings.milestone, {milestone}),
});
}
return ({
substitutions: new Map([
['PLACEHOLDER_title', str_(UIStrings.title)],
['PLACEHOLDER_message', message],
]),
links,
message,
});
}
export {
getIssueDetailDescription,
UIStrings,
};

View File

@@ -0,0 +1,285 @@
export namespace UIStrings {
const AuthorizationCoveredByWildcard: string;
const CanRequestURLHTTPContainingNewline: string;
const ChromeLoadTimesConnectionInfo: string;
const ChromeLoadTimesFirstPaintAfterLoadTime: string;
const ChromeLoadTimesWasAlternateProtocolAvailable: string;
const CookieWithTruncatingChar: string;
const CrossOriginAccessBasedOnDocumentDomain: string;
const CrossOriginWindowAlert: string;
const CrossOriginWindowConfirm: string;
const CSSSelectorInternalMediaControlsOverlayCastButton: string;
const DataUrlInSvgUse: string;
const DocumentDomainSettingWithoutOriginAgentClusterHeader: string;
const DOMMutationEvents: string;
const ExpectCTHeader: string;
const GeolocationInsecureOrigin: string;
const GeolocationInsecureOriginDeprecatedNotRemoved: string;
const GetUserMediaInsecureOrigin: string;
const HostCandidateAttributeGetter: string;
const IdentityInCanMakePaymentEvent: string;
const InsecurePrivateNetworkSubresourceRequest: string;
const InterestGroupDailyUpdateUrl: string;
const LocalCSSFileExtensionRejected: string;
const MediaSourceAbortRemove: string;
const MediaSourceDurationTruncatingBuffered: string;
const NonStandardDeclarativeShadowDOM: string;
const NoSysexWebMIDIWithoutPermission: string;
const NotificationInsecureOrigin: string;
const NotificationPermissionRequestedIframe: string;
const ObsoleteCreateImageBitmapImageOrientationNone: string;
const ObsoleteWebRtcCipherSuite: string;
const OverflowVisibleOnReplacedElement: string;
const PaymentInstruments: string;
const PaymentRequestCSPViolation: string;
const PersistentQuotaType: string;
const PictureSourceSrc: string;
const PrefixedCancelAnimationFrame: string;
const PrefixedRequestAnimationFrame: string;
const PrefixedVideoDisplayingFullscreen: string;
const PrefixedVideoEnterFullScreen: string;
const PrefixedVideoEnterFullscreen: string;
const PrefixedVideoExitFullScreen: string;
const PrefixedVideoExitFullscreen: string;
const PrefixedVideoSupportsFullscreen: string;
const PrivacySandboxExtensionsAPI: string;
const RangeExpand: string;
const RequestedSubresourceWithEmbeddedCredentials: string;
const RTCConstraintEnableDtlsSrtpFalse: string;
const RTCConstraintEnableDtlsSrtpTrue: string;
const RTCPeerConnectionGetStatsLegacyNonCompliant: string;
const RtcpMuxPolicyNegotiate: string;
const SharedArrayBufferConstructedWithoutIsolation: string;
const TextToSpeech_DisallowedByAutoplay: string;
const V8SharedArrayBufferConstructedInExtensionWithoutIsolation: string;
const WebSQL: string;
const WindowPlacementPermissionDescriptorUsed: string;
const WindowPlacementPermissionPolicyParsed: string;
const XHRJSONEncodingDetection: string;
const XMLHttpRequestSynchronousInNonWorkerOutsideBeforeUnload: string;
const XRSupportsSession: string;
}
export namespace DEPRECATIONS_METADATA {
export namespace AuthorizationCoveredByWildcard_1 {
const milestone: number;
}
export { AuthorizationCoveredByWildcard_1 as AuthorizationCoveredByWildcard };
export namespace CSSSelectorInternalMediaControlsOverlayCastButton_1 {
const chromeStatusFeature: number;
}
export { CSSSelectorInternalMediaControlsOverlayCastButton_1 as CSSSelectorInternalMediaControlsOverlayCastButton };
export namespace CanRequestURLHTTPContainingNewline_1 {
const chromeStatusFeature_1: number;
export { chromeStatusFeature_1 as chromeStatusFeature };
}
export { CanRequestURLHTTPContainingNewline_1 as CanRequestURLHTTPContainingNewline };
export namespace ChromeLoadTimesConnectionInfo_1 {
const chromeStatusFeature_2: number;
export { chromeStatusFeature_2 as chromeStatusFeature };
}
export { ChromeLoadTimesConnectionInfo_1 as ChromeLoadTimesConnectionInfo };
export namespace ChromeLoadTimesFirstPaintAfterLoadTime_1 {
const chromeStatusFeature_3: number;
export { chromeStatusFeature_3 as chromeStatusFeature };
}
export { ChromeLoadTimesFirstPaintAfterLoadTime_1 as ChromeLoadTimesFirstPaintAfterLoadTime };
export namespace ChromeLoadTimesWasAlternateProtocolAvailable_1 {
const chromeStatusFeature_4: number;
export { chromeStatusFeature_4 as chromeStatusFeature };
}
export { ChromeLoadTimesWasAlternateProtocolAvailable_1 as ChromeLoadTimesWasAlternateProtocolAvailable };
export namespace CookieWithTruncatingChar_1 {
const milestone_1: number;
export { milestone_1 as milestone };
}
export { CookieWithTruncatingChar_1 as CookieWithTruncatingChar };
export namespace CrossOriginAccessBasedOnDocumentDomain_1 {
const milestone_2: number;
export { milestone_2 as milestone };
}
export { CrossOriginAccessBasedOnDocumentDomain_1 as CrossOriginAccessBasedOnDocumentDomain };
export namespace DOMMutationEvents_1 {
const chromeStatusFeature_5: number;
export { chromeStatusFeature_5 as chromeStatusFeature };
const milestone_3: number;
export { milestone_3 as milestone };
}
export { DOMMutationEvents_1 as DOMMutationEvents };
export namespace DataUrlInSvgUse_1 {
const chromeStatusFeature_6: number;
export { chromeStatusFeature_6 as chromeStatusFeature };
const milestone_4: number;
export { milestone_4 as milestone };
}
export { DataUrlInSvgUse_1 as DataUrlInSvgUse };
export namespace DocumentDomainSettingWithoutOriginAgentClusterHeader_1 {
const milestone_5: number;
export { milestone_5 as milestone };
}
export { DocumentDomainSettingWithoutOriginAgentClusterHeader_1 as DocumentDomainSettingWithoutOriginAgentClusterHeader };
export namespace ExpectCTHeader_1 {
const chromeStatusFeature_7: number;
export { chromeStatusFeature_7 as chromeStatusFeature };
const milestone_6: number;
export { milestone_6 as milestone };
}
export { ExpectCTHeader_1 as ExpectCTHeader };
export namespace IdentityInCanMakePaymentEvent_1 {
const chromeStatusFeature_8: number;
export { chromeStatusFeature_8 as chromeStatusFeature };
}
export { IdentityInCanMakePaymentEvent_1 as IdentityInCanMakePaymentEvent };
export namespace InsecurePrivateNetworkSubresourceRequest_1 {
const chromeStatusFeature_9: number;
export { chromeStatusFeature_9 as chromeStatusFeature };
const milestone_7: number;
export { milestone_7 as milestone };
}
export { InsecurePrivateNetworkSubresourceRequest_1 as InsecurePrivateNetworkSubresourceRequest };
export namespace LocalCSSFileExtensionRejected_1 {
const milestone_8: number;
export { milestone_8 as milestone };
}
export { LocalCSSFileExtensionRejected_1 as LocalCSSFileExtensionRejected };
export namespace MediaSourceAbortRemove_1 {
const chromeStatusFeature_10: number;
export { chromeStatusFeature_10 as chromeStatusFeature };
}
export { MediaSourceAbortRemove_1 as MediaSourceAbortRemove };
export namespace MediaSourceDurationTruncatingBuffered_1 {
const chromeStatusFeature_11: number;
export { chromeStatusFeature_11 as chromeStatusFeature };
}
export { MediaSourceDurationTruncatingBuffered_1 as MediaSourceDurationTruncatingBuffered };
export namespace NoSysexWebMIDIWithoutPermission_1 {
const chromeStatusFeature_12: number;
export { chromeStatusFeature_12 as chromeStatusFeature };
const milestone_9: number;
export { milestone_9 as milestone };
}
export { NoSysexWebMIDIWithoutPermission_1 as NoSysexWebMIDIWithoutPermission };
export namespace NonStandardDeclarativeShadowDOM_1 {
const chromeStatusFeature_13: number;
export { chromeStatusFeature_13 as chromeStatusFeature };
const milestone_10: number;
export { milestone_10 as milestone };
}
export { NonStandardDeclarativeShadowDOM_1 as NonStandardDeclarativeShadowDOM };
export namespace NotificationPermissionRequestedIframe_1 {
const chromeStatusFeature_14: number;
export { chromeStatusFeature_14 as chromeStatusFeature };
}
export { NotificationPermissionRequestedIframe_1 as NotificationPermissionRequestedIframe };
export namespace ObsoleteCreateImageBitmapImageOrientationNone_1 {
const milestone_11: number;
export { milestone_11 as milestone };
}
export { ObsoleteCreateImageBitmapImageOrientationNone_1 as ObsoleteCreateImageBitmapImageOrientationNone };
export namespace ObsoleteWebRtcCipherSuite_1 {
const milestone_12: number;
export { milestone_12 as milestone };
}
export { ObsoleteWebRtcCipherSuite_1 as ObsoleteWebRtcCipherSuite };
export namespace OverflowVisibleOnReplacedElement_1 {
const chromeStatusFeature_15: number;
export { chromeStatusFeature_15 as chromeStatusFeature };
const milestone_13: number;
export { milestone_13 as milestone };
}
export { OverflowVisibleOnReplacedElement_1 as OverflowVisibleOnReplacedElement };
export namespace PaymentInstruments_1 {
const chromeStatusFeature_16: number;
export { chromeStatusFeature_16 as chromeStatusFeature };
}
export { PaymentInstruments_1 as PaymentInstruments };
export namespace PaymentRequestCSPViolation_1 {
const chromeStatusFeature_17: number;
export { chromeStatusFeature_17 as chromeStatusFeature };
}
export { PaymentRequestCSPViolation_1 as PaymentRequestCSPViolation };
export namespace PersistentQuotaType_1 {
const chromeStatusFeature_18: number;
export { chromeStatusFeature_18 as chromeStatusFeature };
const milestone_14: number;
export { milestone_14 as milestone };
}
export { PersistentQuotaType_1 as PersistentQuotaType };
export namespace RTCConstraintEnableDtlsSrtpFalse_1 {
const milestone_15: number;
export { milestone_15 as milestone };
}
export { RTCConstraintEnableDtlsSrtpFalse_1 as RTCConstraintEnableDtlsSrtpFalse };
export namespace RTCConstraintEnableDtlsSrtpTrue_1 {
const milestone_16: number;
export { milestone_16 as milestone };
}
export { RTCConstraintEnableDtlsSrtpTrue_1 as RTCConstraintEnableDtlsSrtpTrue };
export namespace RTCPeerConnectionGetStatsLegacyNonCompliant_1 {
const chromeStatusFeature_19: number;
export { chromeStatusFeature_19 as chromeStatusFeature };
const milestone_17: number;
export { milestone_17 as milestone };
}
export { RTCPeerConnectionGetStatsLegacyNonCompliant_1 as RTCPeerConnectionGetStatsLegacyNonCompliant };
export namespace RequestedSubresourceWithEmbeddedCredentials_1 {
const chromeStatusFeature_20: number;
export { chromeStatusFeature_20 as chromeStatusFeature };
}
export { RequestedSubresourceWithEmbeddedCredentials_1 as RequestedSubresourceWithEmbeddedCredentials };
export namespace RtcpMuxPolicyNegotiate_1 {
const chromeStatusFeature_21: number;
export { chromeStatusFeature_21 as chromeStatusFeature };
const milestone_18: number;
export { milestone_18 as milestone };
}
export { RtcpMuxPolicyNegotiate_1 as RtcpMuxPolicyNegotiate };
export namespace SharedArrayBufferConstructedWithoutIsolation_1 {
const milestone_19: number;
export { milestone_19 as milestone };
}
export { SharedArrayBufferConstructedWithoutIsolation_1 as SharedArrayBufferConstructedWithoutIsolation };
export namespace TextToSpeech_DisallowedByAutoplay_1 {
const chromeStatusFeature_22: number;
export { chromeStatusFeature_22 as chromeStatusFeature };
const milestone_20: number;
export { milestone_20 as milestone };
}
export { TextToSpeech_DisallowedByAutoplay_1 as TextToSpeech_DisallowedByAutoplay };
export namespace V8SharedArrayBufferConstructedInExtensionWithoutIsolation_1 {
const milestone_21: number;
export { milestone_21 as milestone };
}
export { V8SharedArrayBufferConstructedInExtensionWithoutIsolation_1 as V8SharedArrayBufferConstructedInExtensionWithoutIsolation };
export namespace WebSQL_1 {
const chromeStatusFeature_23: number;
export { chromeStatusFeature_23 as chromeStatusFeature };
const milestone_22: number;
export { milestone_22 as milestone };
}
export { WebSQL_1 as WebSQL };
export namespace WindowPlacementPermissionDescriptorUsed_1 {
const chromeStatusFeature_24: number;
export { chromeStatusFeature_24 as chromeStatusFeature };
const milestone_23: number;
export { milestone_23 as milestone };
}
export { WindowPlacementPermissionDescriptorUsed_1 as WindowPlacementPermissionDescriptorUsed };
export namespace WindowPlacementPermissionPolicyParsed_1 {
const chromeStatusFeature_25: number;
export { chromeStatusFeature_25 as chromeStatusFeature };
const milestone_24: number;
export { milestone_24 as milestone };
}
export { WindowPlacementPermissionPolicyParsed_1 as WindowPlacementPermissionPolicyParsed };
export namespace XHRJSONEncodingDetection_1 {
const milestone_25: number;
export { milestone_25 as milestone };
}
export { XHRJSONEncodingDetection_1 as XHRJSONEncodingDetection };
export namespace XRSupportsSession_1 {
const milestone_26: number;
export { milestone_26 as milestone };
}
export { XRSupportsSession_1 as XRSupportsSession };
}
//# sourceMappingURL=deprecations-strings.d.ts.map

View File

@@ -0,0 +1,385 @@
// auto-generated by build/build-cdt-strings.js
/* eslint-disable */
// Copyright 2023 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is auto-generated, do not edit manually.
// Re-generate with: npm run generate-protocol-resources
export const UIStrings = {
/**
* @description We show this warning when 1) an 'authorization' header is attached to the request by scripts, 2) there is no 'authorization' in the 'access-control-allow-headers' header in the response, and 3) there is a wildcard symbol ('*') in the 'access-control-allow-header' header in the response. This is allowed now, but we're planning to reject such responses and require responses to have an 'access-control-allow-headers' containing 'authorization'.
*/
AuthorizationCoveredByWildcard: "Authorization will not be covered by the wildcard symbol (*) in CORS `Access-Control-Allow-Headers` handling.",
/**
* @description This warning occurs when a page attempts to request a resource whose URL contained both a newline character (`\n` or `\r`), and a less-than character (`<`). These resources are blocked.
*/
CanRequestURLHTTPContainingNewline: "Resource requests whose URLs contained both removed whitespace `(n|r|t)` characters and less-than characters (`<`) are blocked. Please remove newlines and encode less-than characters from places like element attribute values in order to load these resources.",
/**
* @description This warning occurs when the website attempts to invoke the deprecated `chrome.loadTimes().connectionInfo` API.
*/
ChromeLoadTimesConnectionInfo: "`chrome.loadTimes()` is deprecated, instead use standardized API: Navigation Timing 2.",
/**
* @description This warning occurs when the website attempts to invoke the deprecated `chrome.loadTimes().firstPaintAfterLoadTime` API.
*/
ChromeLoadTimesFirstPaintAfterLoadTime: "`chrome.loadTimes()` is deprecated, instead use standardized API: Paint Timing.",
/**
* @description This warning occurs when the website attempts to invoke the deprecated `chrome.loadTimes().wasAlternateProtocolAvailable` API.
*/
ChromeLoadTimesWasAlternateProtocolAvailable: "`chrome.loadTimes()` is deprecated, instead use standardized API: `nextHopProtocol` in Navigation Timing 2.",
/**
* @description This warning occurs when the browser attempts to store a cookie containing a banned character. Rather than the cookie string being truncated at the banned character, the entire cookie will be rejected now.
*/
CookieWithTruncatingChar: "Cookies containing a `(0|r|n)` character will be rejected instead of truncated.",
/**
* @description This warning occurs when a frame accesses another frame's data after having set `document.domain` without having set the `Origin-Agent-Cluster` http header. This is a companion warning to `documentDomainSettingWithoutOriginAgentClusterHeader`, where that warning occurs when `document.domain` is set, and this warning occurs when an access has been made, based on that previous `document.domain` setting.
*/
CrossOriginAccessBasedOnDocumentDomain: "Relaxing the same-origin policy by setting `document.domain` is deprecated, and will be disabled by default. This deprecation warning is for a cross-origin access that was enabled by setting `document.domain`.",
/**
* @description Issue text shown when the web page uses a deprecated web API. The window.alert is the deprecated web API function.
*/
CrossOriginWindowAlert: "Triggering window.alert from cross origin iframes has been deprecated and will be removed in the future.",
/**
* @description Issue text shown when the web page uses a deprecated web API. The window.confirm is the deprecated web API function.
*/
CrossOriginWindowConfirm: "Triggering window.confirm from cross origin iframes has been deprecated and will be removed in the future.",
/**
* @description Warning displayed to developers when they hide the Cast button on a video element using the deprecated CSS selector instead of using the disableRemotePlayback attribute on the element.
*/
CSSSelectorInternalMediaControlsOverlayCastButton: "The `disableRemotePlayback` attribute should be used in order to disable the default Cast integration instead of using `-internal-media-controls-overlay-cast-button` selector.",
/**
* @description Warning displayed to developers when a data: URL is assigned to SVG <use> to let them know that the support is deprecated.
*/
DataUrlInSvgUse: "Support for data: URLs in SVG <use> element is deprecated and it will be removed in the future.",
/**
* @description This warning occurs when a script modifies `document.domain` without having set on `Origin-Agent-Cluster` http header. In other words, when a script relies on the default behaviour of `Origin-Agent-Cluster` when setting document.domain.
*/
DocumentDomainSettingWithoutOriginAgentClusterHeader: "Relaxing the same-origin policy by setting `document.domain` is deprecated, and will be disabled by default. To continue using this feature, please opt-out of origin-keyed agent clusters by sending an `Origin-Agent-Cluster: ?0` header along with the HTTP response for the document and frames. See https://developer.chrome.com/blog/immutable-document-domain/ for more details.",
/**
* @description Warning displayed to developers when non-standard Mutation Events are used. These are deprecated and will be removed.
*/
DOMMutationEvents: "DOM Mutation Events, including `DOMSubtreeModified`, `DOMNodeInserted`, `DOMNodeRemoved`, `DOMNodeRemovedFromDocument`, `DOMNodeInsertedIntoDocument`, and `DOMCharacterDataModified` are deprecated (https://w3c.github.io/uievents/#legacy-event-types) and will be removed. Please use `MutationObserver` instead.",
/**
* @description This message is shown when the deprecated Expect-CT header is present.
*/
ExpectCTHeader: "The `Expect-CT` header is deprecated and will be removed. Chrome requires Certificate Transparency for all publicly trusted certificates issued after April 30, 2018.",
/**
* @description Warning displayed to developers when the Geolocation API is used from an insecure origin (one that isn't localhost or doesn't use HTTPS) to notify them that this use is no longer supported.
*/
GeolocationInsecureOrigin: "`getCurrentPosition()` and `watchPosition()` no longer work on insecure origins. To use this feature, you should consider switching your application to a secure origin, such as HTTPS. See https://goo.gle/chrome-insecure-origins for more details.",
/**
* @description Warning displayed to developers when the Geolocation API is used from an insecure origin (one that isn't localhost or doesn't use HTTPS) to notify them that this use is deprecated.
*/
GeolocationInsecureOriginDeprecatedNotRemoved: "`getCurrentPosition()` and `watchPosition()` are deprecated on insecure origins. To use this feature, you should consider switching your application to a secure origin, such as HTTPS. See https://goo.gle/chrome-insecure-origins for more details.",
/**
* @description This warning occurs when the `getUserMedia()` API is invoked on an insecure (e.g., HTTP) site. This is only permitted on secure sites (e.g., HTTPS).
*/
GetUserMediaInsecureOrigin: "`getUserMedia()` no longer works on insecure origins. To use this feature, you should consider switching your application to a secure origin, such as HTTPS. See https://goo.gle/chrome-insecure-origins for more details.",
/**
* @description A deprecation warning shown to developers in the DevTools Issues tab when code tries to use the deprecated hostCandidate field, guiding developers to use the the equivalent information in the .address and .port fields instead.
*/
HostCandidateAttributeGetter: "`RTCPeerConnectionIceErrorEvent.hostCandidate` is deprecated. Please use `RTCPeerConnectionIceErrorEvent.address` or `RTCPeerConnectionIceErrorEvent.port` instead.",
/**
* @description A deprecation warning shown in the DevTools Issues tab, when a service worker reads one of the fields from an event named 'canmakepayment'.
*/
IdentityInCanMakePaymentEvent: "The merchant origin and arbitrary data from the `canmakepayment` service worker event are deprecated and will be removed: `topOrigin`, `paymentRequestOrigin`, `methodData`, `modifiers`.",
/**
* @description This warning occurs when an insecure context (e.g., HTTP) requests a private resource (not on open internet). This is done to mitigate the potential for CSRF and other attacks.
*/
InsecurePrivateNetworkSubresourceRequest: "The website requested a subresource from a network that it could only access because of its users' privileged network position. These requests expose non-public devices and servers to the internet, increasing the risk of a cross-site request forgery (CSRF) attack, and/or information leakage. To mitigate these risks, Chrome deprecates requests to non-public subresources when initiated from non-secure contexts, and will start blocking them.",
/**
* @description This is a deprecated warning to developers that a field in a structure has been renamed.
*/
InterestGroupDailyUpdateUrl: "The `dailyUpdateUrl` field of `InterestGroups` passed to `joinAdInterestGroup()` has been renamed to `updateUrl`, to more accurately reflect its behavior.",
/**
* @description This warning occurs when a stylesheet loaded from a local file directive does not end in the file type `.css`.
*/
LocalCSSFileExtensionRejected: "CSS cannot be loaded from `file:` URLs unless they end in a `.css` file extension.",
/**
* @description This is a deprecation warning to developers that occurs when the script attempts to use the Media Source Extensions API in a way that is no longer supported by the specification for the API. The usage that is problematic is when the script calls the `SourceBuffer.abort()` method at a time when there is still processing happening in response to a previous `SourceBuffer.remove()` call for the same SourceBuffer object. More precisely, we show this warning to developers when script calls the SourceBuffer abort() method while the asynchronous processing of a remove() call on that SourceBuffer is not yet complete. Early versions of the Media Source Extensions specification allowed such aborts, but standardization of the specification resulted in disallowing the aborts. The script should instead wait for the asynchronous remove() operation to complete, which is observable by listening for the associated 'updateend' event from the SourceBuffer. A note is also included in the warning, describing when abort() is meaningful and allowed by the specification for purposes other than interrupting a remove() operation's asynchronous steps. Those supported purposes include using abort() to interrupt processing that may still be happening in response to a previous appendBuffer() call on that SourceBuffer, or using abort() to clear the internal of any unprocessed data remaining from previous appendBuffer() calls. See https://www.w3.org/TR/media-source-2/#dom-sourcebuffer-abort for the currently specified behavior, which would throw an exception once the deprecated removal abort is no longer supported. See https://github.com/w3c/media-source/issues/19 for the discussion that led to the specification change.
*/
MediaSourceAbortRemove: "Using `SourceBuffer.abort()` to abort `remove()`'s asynchronous range removal is deprecated due to specification change. Support will be removed in the future. You should listen to the `updateend` event instead. `abort()` is intended to only abort an asynchronous media append or reset parser state.",
/**
* @description This is a deprecation warning to developers that occurs when the script attempts to use the Media Source Extensions API in a way that is no longer supported by the specification for the API. The usage that is problematic is when the script sets the duration attribute of a MediaSource object too low. The duration attribute of a MediaSource must be longer than the actual duration of any media (audio or video) already in the MediaSource. When set too low, the MediaSource must remove audio and video content that is beyond the time indicated by the new duration. Content removal that is caused by setting the duration attribute too low is no longer allowed by the specification. The message describes the minimum allowable duration value as the 'highest presentation timestamp of any buffered coded frames' as a more precise way of describing the duration of content already in the MediaSource: 'coded frames' are the specification's way of describing compressed audio frames or compressed video frames, and they each have a 'presentation timestamp' that describes precisely when that frame's playback occurs in the overall media presentation. Early versions of the Media Source Extensions specification allowed this to happen, but standardization of the specification resulted in disallowing this behavior. The underlying issue leading to this specification change was that setting the duration attribute should be synchronous, but setting it lower than the timestamp of something currently buffered would cause confusing removal of media between that new duration and the previous, larger, duration. The script should instead explicitly remove that range of media first, before lowering the duration. See https://www.w3.org/TR/media-source-2/#dom-mediasource-duration and https://www.w3.org/TR/media-source-2/#dom-mediasource-duration for the currently specified behavior, which would throw an exception once support is removed for deprecated implicit asynchronous range removal when duration is truncated. See both https://github.com/w3c/media-source/issues/20 and https://github.com/w3c/media-source/issues/26 for the discussion that led to the specification change.
*/
MediaSourceDurationTruncatingBuffered: "Setting `MediaSource.duration` below the highest presentation timestamp of any buffered coded frames is deprecated due to specification change. Support for implicit removal of truncated buffered media will be removed in the future. You should instead perform explicit `remove(newDuration, oldDuration)` on all `sourceBuffers`, where `newDuration < oldDuration`.",
/**
* @description This warning is displayed when a site contains a `<template>` element with the `shadowroot` attribute.
*/
NonStandardDeclarativeShadowDOM: "The older, non-standardized `shadowroot` attribute is deprecated, and will *no longer function* in M119. Please use the new, standardized `shadowrootmode` attribute instead.",
/**
* @description This warning occurs when the browser requests Web MIDI access as sysex (system exclusive messages) can be allowed via prompt even if the browser did not specifically request it.
*/
NoSysexWebMIDIWithoutPermission: "Web MIDI will ask a permission to use even if the sysex is not specified in the `MIDIOptions`.",
/**
* @description Warning displayed to developers when the Notification API is used from an insecure origin (one that isn't localhost or doesn't use HTTPS) to notify them that this use is no longer supported.
*/
NotificationInsecureOrigin: "The Notification API may no longer be used from insecure origins. You should consider switching your application to a secure origin, such as HTTPS. See https://goo.gle/chrome-insecure-origins for more details.",
/**
* @description Warning displayed to developers when permission to use notifications has been requested by a cross-origin iframe, to notify them that this use is no longer supported.
*/
NotificationPermissionRequestedIframe: "Permission for the Notification API may no longer be requested from a cross-origin iframe. You should consider requesting permission from a top-level frame or opening a new window instead.",
/**
* @description Warning displayed to developers when CreateImageBitmap is used with the newly deprecated option imageOrientation: 'none'.
*/
ObsoleteCreateImageBitmapImageOrientationNone: "Option `imageOrientation: 'none'` in createImageBitmap is deprecated. Please use createImageBitmap with option \\{imageOrientation: 'from-image'\\} instead.",
/**
* @description This warning occurs when the WebRTC protocol attempts to negotiate a connection using an obsolete cipher and risks connection security.
*/
ObsoleteWebRtcCipherSuite: "Your partner is negotiating an obsolete (D)TLS version. Please check with your partner to have this fixed.",
/**
* @description Warning displayed to developers that use overflow:visible for replaced elements. This declaration was earlier ignored but will now change the element's painting based on whether the overflow value allows the element to paint outside its bounds.
*/
OverflowVisibleOnReplacedElement: "Specifying `overflow: visible` on img, video and canvas tags may cause them to produce visual content outside of the element bounds. See https://github.com/WICG/shared-element-transitions/blob/main/debugging_overflow_on_images.md.",
/**
* @description Warning displayed to developers when they use the PaymentInstruments API to let them know this API is deprecated.
*/
PaymentInstruments: "`paymentManager.instruments` is deprecated. Please use just-in-time install for payment handlers instead.",
/**
* @description Warning displayed to developers when their Web Payment API usage violates their Content-Security-Policy (CSP) connect-src directive to let them know this CSP bypass has been deprecated.
*/
PaymentRequestCSPViolation: "Your `PaymentRequest` call bypassed Content-Security-Policy (CSP) `connect-src` directive. This bypass is deprecated. Please add the payment method identifier from the `PaymentRequest` API (in `supportedMethods` field) to your CSP `connect-src` directive.",
/**
* @description Warning displayed to developers when persistent storage type is used to notify that storage type is deprecated.
*/
PersistentQuotaType: "`StorageType.persistent` is deprecated. Please use standardized `navigator.storage` instead.",
/**
* @description This issue indicates that a `<source>` element with a `<picture>` parent was using an `src` attribute, which is not valid and is ignored by the browser. The `srcset` attribute should be used instead.
*/
PictureSourceSrc: "`<source src>` with a `<picture>` parent is invalid and therefore ignored. Please use `<source srcset>` instead.",
/**
* @description Warning displayed to developers when the vendor-prefixed method (webkitCancelAnimationFrame) is used rather than the equivalent unprefixed method (cancelAnimationFrame).
*/
PrefixedCancelAnimationFrame: "webkitCancelAnimationFrame is vendor-specific. Please use the standard cancelAnimationFrame instead.",
/**
* @description Warning displayed to developers when the vendor-prefixed method (webkitRequestAnimationFrame) is used rather than the equivalent unprefixed method (requestAnimationFrame).
*/
PrefixedRequestAnimationFrame: "webkitRequestAnimationFrame is vendor-specific. Please use the standard requestAnimationFrame instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoDisplayingFullscreen: "HTMLVideoElement.webkitDisplayingFullscreen is deprecated. Please use Document.fullscreenElement instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoEnterFullScreen: "HTMLVideoElement.webkitEnterFullScreen() is deprecated. Please use Element.requestFullscreen() instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoEnterFullscreen: "HTMLVideoElement.webkitEnterFullscreen() is deprecated. Please use Element.requestFullscreen() instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoExitFullScreen: "HTMLVideoElement.webkitExitFullScreen() is deprecated. Please use Document.exitFullscreen() instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoExitFullscreen: "HTMLVideoElement.webkitExitFullscreen() is deprecated. Please use Document.exitFullscreen() instead.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
PrefixedVideoSupportsFullscreen: "HTMLVideoElement.webkitSupportsFullscreen is deprecated. Please use Document.fullscreenEnabled instead.",
/**
* @description Warning displayed to developers that the API `chrome.privacy.websites.privacySandboxEnabled` is being deprecated in favour of three new more granular APIs: topicsEnabled, FledgeEnabled and adMeasurementEnabled. The `privacySandboxEnabled` API allowed extensions to control the homologous Chrome Setting. The existing Chrome Setting for Privacy Sandbox is also going away in favor of more granular settings that are matched by the new extensions APIs- topicsEnabled, FledgeEnabled and adMeasurementEnabled.
*/
PrivacySandboxExtensionsAPI: "We're deprecating the API `chrome.privacy.websites.privacySandboxEnabled`, though it will remain active for backward compatibility until release M113. Instead, please use `chrome.privacy.websites.topicsEnabled`, `chrome.privacy.websites.fledgeEnabled` and `chrome.privacy.websites.adMeasurementEnabled`. See https://developer.chrome.com/docs/extensions/reference/privacy/#property-websites-privacySandboxEnabled.",
/**
* @description Standard message when one web API is deprecated in favor of another.
*/
RangeExpand: "Range.expand() is deprecated. Please use Selection.modify() instead.",
/**
* @description This warning occurs when a subresource loaded by a page has a URL with an authority portion. These are disallowed.
*/
RequestedSubresourceWithEmbeddedCredentials: "Subresource requests whose URLs contain embedded credentials (e.g. `https://user:pass@host/`) are blocked.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. It's shown when a video conferencing website attempts to use a non-standard crypto method when performing a handshake to set up a connection with another endpoint.
*/
RTCConstraintEnableDtlsSrtpFalse: "The constraint `DtlsSrtpKeyAgreement` is removed. You have specified a `false` value for this constraint, which is interpreted as an attempt to use the removed `SDES key negotiation` method. This functionality is removed; use a service that supports `DTLS key negotiation` instead.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. It's shown when a video conferencing website uses a non-standard API for controlling the crypto method used, but is not having an effect because the desired behavior is already enabled-by-default.
*/
RTCConstraintEnableDtlsSrtpTrue: "The constraint `DtlsSrtpKeyAgreement` is removed. You have specified a `true` value for this constraint, which had no effect, but you can remove this constraint for tidiness.",
/**
* @description WebRTC is set of JavaScript APIs for sending and receiving data, audio and video. getStats() is a method used to obtain network and quality metrics. There are two versions of this method, one is being deprecated because it is non-standard.
*/
RTCPeerConnectionGetStatsLegacyNonCompliant: "The callback-based getStats() is deprecated and will be removed. Use the spec-compliant getStats() instead.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. It's shown then a video conferencing website attempts to use the `RTCP MUX` policy.
*/
RtcpMuxPolicyNegotiate: "The `rtcpMuxPolicy` option is deprecated and will be removed.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. The placeholder is always the noun 'SharedArrayBuffer' which refers to a JavaScript construct.
*/
SharedArrayBufferConstructedWithoutIsolation: "`SharedArrayBuffer` will require cross-origin isolation. See https://developer.chrome.com/blog/enabling-shared-array-buffer/ for more details.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. It's shown when the speech synthesis API is called before the page receives a user activation.
*/
TextToSpeech_DisallowedByAutoplay: "`speechSynthesis.speak()` without user activation is deprecated and will be removed.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. The placeholder is always the noun 'SharedArrayBuffer' which refers to a JavaScript construct. 'Extensions' refers to Chrome extensions. The warning is shown when Chrome Extensions attempt to use 'SharedArrayBuffer's under insecure circumstances.
*/
V8SharedArrayBufferConstructedInExtensionWithoutIsolation: "Extensions should opt into cross-origin isolation to continue using `SharedArrayBuffer`. See https://developer.chrome.com/docs/extensions/mv3/cross-origin-isolation/.",
/**
* @description Warning displayed to developers when the Web SQL API is used to let them know this API is deprecated.
*/
WebSQL: "Web SQL is deprecated. Please use SQLite WebAssembly or Indexed Database",
/**
* @description A deprecation warning shown in the DevTools Issues tab. 'window-placement' and 'window-management' are the name of the javascript descriptors (do not translate). The warning is shown when web pages attempt to use 'window-placement' in permission APIs (e.g. navigator.permissions.query(...))
*/
WindowPlacementPermissionDescriptorUsed: "The permission descriptor `window-placement` is deprecated. Use `window-management` instead. For more help, check https://bit.ly/window-placement-rename.",
/**
* @description A deprecation warning shown in the DevTools Issues tab. 'window-placement' and 'window-management' are the name of the policy descriptors (do not translate). The warning is shown when web pages attempt to use 'window-placement' as a permission policy (parsed in iframe or header).
*/
WindowPlacementPermissionPolicyParsed: "The permission policy `window-placement` is deprecated. Use `window-management` instead. For more help, check https://bit.ly/window-placement-rename.",
/**
* @description Warning displayed to developers that they are using `XMLHttpRequest` API in a way that they expect an unsupported character encoding `UTF-16` could be used in the server reply.
*/
XHRJSONEncodingDetection: "UTF-16 is not supported by response json in `XMLHttpRequest`",
/**
* @description Warning displayed to developers. It is shown when the `XMLHttpRequest` API is used in a way that it slows down the page load of the next page. The `main thread` refers to an operating systems thread used to run most of the processing of HTML documents, so please use a consistent wording.
*/
XMLHttpRequestSynchronousInNonWorkerOutsideBeforeUnload: "Synchronous `XMLHttpRequest` on the main thread is deprecated because of its detrimental effects to the end user's experience. For more help, check https://xhr.spec.whatwg.org/.",
/**
* @description Warning displayed to developers that instead of using `supportsSession()`, which returns a promise that resolves if the XR session can be supported and rejects if not, they should use `isSessionSupported()` which will return a promise which resolves to a boolean indicating if the XR session can be supported or not, but may reject to throw an exception.
*/
XRSupportsSession: "`supportsSession()` is deprecated. Please use `isSessionSupported()` and check the resolved boolean value instead.",
};
export const DEPRECATIONS_METADATA = {
"AuthorizationCoveredByWildcard": {
"milestone": 97
},
"CSSSelectorInternalMediaControlsOverlayCastButton": {
"chromeStatusFeature": 5714245488476160
},
"CanRequestURLHTTPContainingNewline": {
"chromeStatusFeature": 5735596811091968
},
"ChromeLoadTimesConnectionInfo": {
"chromeStatusFeature": 5637885046816768
},
"ChromeLoadTimesFirstPaintAfterLoadTime": {
"chromeStatusFeature": 5637885046816768
},
"ChromeLoadTimesWasAlternateProtocolAvailable": {
"chromeStatusFeature": 5637885046816768
},
"CookieWithTruncatingChar": {
"milestone": 103
},
"CrossOriginAccessBasedOnDocumentDomain": {
"milestone": 115
},
"DOMMutationEvents": {
"chromeStatusFeature": 5083947249172480,
"milestone": 127
},
"DataUrlInSvgUse": {
"chromeStatusFeature": 5128825141198848,
"milestone": 119
},
"DocumentDomainSettingWithoutOriginAgentClusterHeader": {
"milestone": 115
},
"ExpectCTHeader": {
"chromeStatusFeature": 6244547273687040,
"milestone": 107
},
"IdentityInCanMakePaymentEvent": {
"chromeStatusFeature": 5190978431352832
},
"InsecurePrivateNetworkSubresourceRequest": {
"chromeStatusFeature": 5436853517811712,
"milestone": 92
},
"LocalCSSFileExtensionRejected": {
"milestone": 64
},
"MediaSourceAbortRemove": {
"chromeStatusFeature": 6107495151960064
},
"MediaSourceDurationTruncatingBuffered": {
"chromeStatusFeature": 6107495151960064
},
"NoSysexWebMIDIWithoutPermission": {
"chromeStatusFeature": 5138066234671104,
"milestone": 82
},
"NonStandardDeclarativeShadowDOM": {
"chromeStatusFeature": 6239658726391808,
"milestone": 119
},
"NotificationPermissionRequestedIframe": {
"chromeStatusFeature": 6451284559265792
},
"ObsoleteCreateImageBitmapImageOrientationNone": {
"milestone": 111
},
"ObsoleteWebRtcCipherSuite": {
"milestone": 81
},
"OverflowVisibleOnReplacedElement": {
"chromeStatusFeature": 5137515594383360,
"milestone": 108
},
"PaymentInstruments": {
"chromeStatusFeature": 5099285054488576
},
"PaymentRequestCSPViolation": {
"chromeStatusFeature": 6286595631087616
},
"PersistentQuotaType": {
"chromeStatusFeature": 5176235376246784,
"milestone": 106
},
"RTCConstraintEnableDtlsSrtpFalse": {
"milestone": 97
},
"RTCConstraintEnableDtlsSrtpTrue": {
"milestone": 97
},
"RTCPeerConnectionGetStatsLegacyNonCompliant": {
"chromeStatusFeature": 4631626228695040,
"milestone": 117
},
"RequestedSubresourceWithEmbeddedCredentials": {
"chromeStatusFeature": 5669008342777856
},
"RtcpMuxPolicyNegotiate": {
"chromeStatusFeature": 5654810086866944,
"milestone": 62
},
"SharedArrayBufferConstructedWithoutIsolation": {
"milestone": 106
},
"TextToSpeech_DisallowedByAutoplay": {
"chromeStatusFeature": 5687444770914304,
"milestone": 71
},
"V8SharedArrayBufferConstructedInExtensionWithoutIsolation": {
"milestone": 96
},
"WebSQL": {
"chromeStatusFeature": 5134293578285056,
"milestone": 115
},
"WindowPlacementPermissionDescriptorUsed": {
"chromeStatusFeature": 5137018030391296,
"milestone": 112
},
"WindowPlacementPermissionPolicyParsed": {
"chromeStatusFeature": 5137018030391296,
"milestone": 112
},
"XHRJSONEncodingDetection": {
"milestone": 93
},
"XRSupportsSession": {
"milestone": 80
}
};

43
node_modules/lighthouse/core/lib/emulation.d.ts generated vendored Normal file
View File

@@ -0,0 +1,43 @@
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {LH.Config.Settings} settings
* @return {Promise<void>}
*/
export function emulate(session: LH.Gatherer.FRProtocolSession, settings: LH.Config.Settings): Promise<void>;
/**
* Sets the throttling options specified in config settings, clearing existing network throttling if
* throttlingMethod is not `devtools` (but not CPU throttling, suspected requirement of WPT-compat).
*
* @param {LH.Gatherer.FRProtocolSession} session
* @param {LH.Config.Settings} settings
* @return {Promise<void>}
*/
export function throttle(session: LH.Gatherer.FRProtocolSession, settings: LH.Config.Settings): Promise<void>;
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
export function clearThrottling(session: LH.Gatherer.FRProtocolSession): Promise<void>;
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {Required<LH.ThrottlingSettings>} throttlingSettings
* @return {Promise<void>}
*/
export function enableNetworkThrottling(session: LH.Gatherer.FRProtocolSession, throttlingSettings: Required<LH.ThrottlingSettings>): Promise<void>;
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
export function clearNetworkThrottling(session: LH.Gatherer.FRProtocolSession): Promise<void>;
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {Required<LH.ThrottlingSettings>} throttlingSettings
* @return {Promise<void>}
*/
export function enableCPUThrottling(session: LH.Gatherer.FRProtocolSession, throttlingSettings: Required<LH.ThrottlingSettings>): Promise<void>;
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
export function clearCPUThrottling(session: LH.Gatherer.FRProtocolSession): Promise<void>;
//# sourceMappingURL=emulation.d.ts.map

163
node_modules/lighthouse/core/lib/emulation.js generated vendored Normal file
View File

@@ -0,0 +1,163 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {lighthouseVersion} from '../../root.js';
const NO_THROTTLING_METRICS = {
latency: 0,
downloadThroughput: 0,
uploadThroughput: 0,
offline: false,
};
const NO_CPU_THROTTLE_METRICS = {
rate: 1,
};
/**
* @param {string} userAgent
* @param {LH.Config.Settings['formFactor']} formFactor
* @return {LH.Crdp.Emulation.SetUserAgentOverrideRequest['userAgentMetadata']}
*/
function parseUseragentIntoMetadata(userAgent, formFactor) {
const match = userAgent.match(/Chrome\/([\d.]+)/); // eg 'Chrome/(71.0.3577.0)'
const fullVersion = match?.[1] || '99.0.1234.0';
const [version] = fullVersion.split('.', 1);
const brands = [
{brand: 'Chromium', version},
{brand: 'Google Chrome', version},
{brand: 'Lighthouse', version: lighthouseVersion},
];
const motoGPowerDetails = {
platform: 'Android',
platformVersion: '11.0',
architecture: '',
model: 'moto g power (2022)',
};
const macDesktopDetails = {
platform: 'macOS',
platformVersion: '10.15.7',
architecture: 'x86',
model: '',
};
const mobile = formFactor === 'mobile';
return {
brands,
fullVersion,
// Since config users can supply a custom useragent, they likely are emulating something
// other than Moto G Power and MacOS Desktop.
// TODO: Determine how to thoughtfully expose this metadata/client-hints configurability.
...(mobile ? motoGPowerDetails : macDesktopDetails),
mobile,
};
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {LH.Config.Settings} settings
* @return {Promise<void>}
*/
async function emulate(session, settings) {
if (settings.emulatedUserAgent !== false) {
const userAgent = /** @type {string} */ (settings.emulatedUserAgent);
await session.sendCommand('Network.setUserAgentOverride', {
userAgent,
userAgentMetadata: parseUseragentIntoMetadata(userAgent, settings.formFactor),
});
}
// See devtools-entry for one usecase for disabling screenEmulation
if (settings.screenEmulation.disabled !== true) {
const {width, height, deviceScaleFactor, mobile} = settings.screenEmulation;
const params = {width, height, deviceScaleFactor, mobile};
await session.sendCommand('Emulation.setDeviceMetricsOverride', params);
await session.sendCommand('Emulation.setTouchEmulationEnabled', {
enabled: params.mobile,
});
}
}
/**
* Sets the throttling options specified in config settings, clearing existing network throttling if
* throttlingMethod is not `devtools` (but not CPU throttling, suspected requirement of WPT-compat).
*
* @param {LH.Gatherer.FRProtocolSession} session
* @param {LH.Config.Settings} settings
* @return {Promise<void>}
*/
async function throttle(session, settings) {
if (settings.throttlingMethod !== 'devtools') return clearNetworkThrottling(session);
await Promise.all([
enableNetworkThrottling(session, settings.throttling),
enableCPUThrottling(session, settings.throttling),
]);
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
async function clearThrottling(session) {
await Promise.all([clearNetworkThrottling(session), clearCPUThrottling(session)]);
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {Required<LH.ThrottlingSettings>} throttlingSettings
* @return {Promise<void>}
*/
function enableNetworkThrottling(session, throttlingSettings) {
/** @type {LH.Crdp.Network.EmulateNetworkConditionsRequest} */
const conditions = {
offline: false,
latency: throttlingSettings.requestLatencyMs || 0,
downloadThroughput: throttlingSettings.downloadThroughputKbps || 0,
uploadThroughput: throttlingSettings.uploadThroughputKbps || 0,
};
// DevTools expects throughput in bytes per second rather than kbps
conditions.downloadThroughput = Math.floor(conditions.downloadThroughput * 1024 / 8);
conditions.uploadThroughput = Math.floor(conditions.uploadThroughput * 1024 / 8);
return session.sendCommand('Network.emulateNetworkConditions', conditions);
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
function clearNetworkThrottling(session) {
return session.sendCommand('Network.emulateNetworkConditions', NO_THROTTLING_METRICS);
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @param {Required<LH.ThrottlingSettings>} throttlingSettings
* @return {Promise<void>}
*/
function enableCPUThrottling(session, throttlingSettings) {
const rate = throttlingSettings.cpuSlowdownMultiplier;
return session.sendCommand('Emulation.setCPUThrottlingRate', {rate});
}
/**
* @param {LH.Gatherer.FRProtocolSession} session
* @return {Promise<void>}
*/
function clearCPUThrottling(session) {
return session.sendCommand('Emulation.setCPUThrottlingRate', NO_CPU_THROTTLE_METRICS);
}
export {
emulate,
throttle,
clearThrottling,
enableNetworkThrottling,
clearNetworkThrottling,
enableCPUThrottling,
clearCPUThrottling,
};

290
node_modules/lighthouse/core/lib/i18n/README.md generated vendored Normal file
View File

@@ -0,0 +1,290 @@
# Terminology
* **CTC format**: The [Chrome extension & Chrome app i18n format](https://developer.chrome.com/extensions/i18n-messages) with some minor changes. JSON with their specified model for declaring placeholders, examples, etc. Used as an interchange data format.
* **LHL syntax** (Lighthouse Localizable syntax): The ICU-friendly string syntax that is used to author `UIStrings` and is seen in the locale files in `shared/localization/locales/*.json`. Lighthouse has a custom syntax these strings combines many ICU message features along with some markdown.
* **ICU**: ICU (International Components for Unicode) is a localization project and standard defined by the Unicode consortium. In general, we refer to "ICU" as the [ICU message formatting](http://userguide.icu-project.org/formatparse/messages) syntax.
# The Lighthouse i18n pipeline
The translation pipeline has 3 distinct stages, the Collection done at build time, the Translation done in the Google TC pipeline, and the Replacement done at runtime.
The collection and translation pipeline:
```
Source files: Locale files:
+---------------------------+ +----------------------------------------------
| ++ | shared/localization/locales/en-US.json |
| const UIStrings = { ... };|-+ +---> | shared/localization/locales/en-XL.json |
| |-| | +----------------------------------------------+
+-----------------------------| | | ||
+----------------------------| | | shared/localization/locales/*.json |-<+
+---------------------------+ | | || |
| | +----------------------------------------------| |
$ yarn | | +---------------------------------------------+ |
i18n:collect-strings +--------------------+ |
| |
v ▐ ▐ +---------------+ |
+------------+------+ ▐ Google TC Pipeline ▐ +->| *.ctc.json |---+
| en-US.ctc.json | +--------------> ▐ (~2 weeks) ▐ +---------------+
+-------------------+ $ g3/import….sh ▐ ▐ $ g3/export….sh
```
#### String Collection workflow (build time)
To a typical developer, the pipeline looks like this:
* LH contributor makes any changes to strings.
```shell
# collect UIStrings and bake the en-US & en-XL locales
$ yarn i18n:collect-strings
# Test to see that the new translations are valid and apply to all strings
$ yarn build-sample-reports && open dist/xl-accented/index.html
```
Note: Why do `en-US` and `en-XL` get baked early? We write all our strings in `en-US` by default, so they do not need to be translated, so it can be immediately baked without going to the translators. Similarly, `en-XL` is a debugging language, it is an automated version of `en-US` that simply adds markers to `en` strings in order to make it obvious that something has or hasn't been translated. So neither of these files need to go to translators to be used, and both can be used at develop-time to help developer i18n workflow.
#### String Translation in Google Translation Console
* Googler is ready to kick off the TC pipeline again.
```shell
# collect UIStrings (to make sure everything is up to date)
$ yarn i18n:collect-strings
# Extract the CTC format files to translation console
$ sh import-source-from-github.sh
# Submit CL. Wait ~2 weeks for translations
# Import the translated CTC format files to locales/ and bake them
$ sh export-tc-dump-to-github.sh
```
#### String Replacement (runtime)
See [Appendix A: How runtime string replacement works](#appendix)
# Writing UIStrings with LHL
We want to keep strings close to the code in which they are used so that developers can easily understand their context. We use `i18n.js` to extract the `UIStrings` strings from individual js files.
LHL strings in each module are defined in a `UIStrings` object with the strings as its properties. JSDoc is used to provide additional information about each string.
The LHL syntax is based primarily around the standardized [ICU message formatting](http://userguide.icu-project.org/formatparse/messages) syntax.
### Basic example
A simple string.
```javascript
const UIStrings = {
/** Imperative title of a Lighthouse audit that ... */
title: 'Minify CSS',
};
```
For proper translation, **all** strings must be accompanied by a description, written as a preceeding comment.
### Replacements and primitive formatting
Replacements (aka substitutions) include string replacements like `{some_name}` and number formatting like `{timeInMs, number, milliseconds}`.
#### Direct ICU replacement
`{some_name}` is called _Direct ICU_ since the replacement is a direct substitution of ICU with a variable and uses no custom formatting. This is simply a direct replacement of text into a string. Often used for proper nouns, code, or other text that is dynamic and added at runtime.
ICU replacements must use a JSDoc-like syntax to specify an example for direct ICU replacements:
* To specify the description, use `@description …`:
* `@description Label string used to…`
* To specify an example for an ICU replacement, use `@example {…} …`:
* `@example {This is an example ICU replacement} variableName`
```javascript
const UIStrings = {
/**
* @description Error message explaining ...
* @example {NO_SPEEDLINE_FRAMES} errorCode
*/
didntCollectScreenshots: `Chrome didn't .... ({errorCode})`,
};
```
#### Complex ICU replacement
`{timeInMs, number, milliseconds}` is called _Complex ICU_ since the replacement is for numbers and other complex replacements that use the custom formatters in Lighthouse. The supported complex ICU formats are: `milliseconds`, `seconds`, `bytes`, `percent`, and `extendedPercent`.
These complex ICU formats are automatically given @example values during `yarn i18n:collect-strings`. Therefore, a normal description string can be used:
```javascript
const UIStrings = {
/** Description of display value. */
displayValueText: 'Interactive at {timeInMs, number, seconds} s',
};
```
### Ordinals (Numeric Selects), Plurals
An ordinal ICU message is used when the message contains "plurals", wherein a sub-message would need to be selected from a list of messages depending on the value of `itemCount` (in this example). They are a flavor of "Selects" that have a unique syntax.
```javascript
displayValue: `{itemCount, plural,
=1 {1 link found}
other {# links found}
}`,
```
Note: Why are direct ICU and complex ICU placeholdered out, but Ordinals are not? Direct and complex ICU should not contain elements that need to be translated (Direct ICU replaces universal proper nouns, and Complex ICU replaces number formatting), while ordinals do need to be translated. Ordinals and selects are therefore handled specially, and do not need to be placeholdered out.
### Selects
A select ICU message is used when the message should select a sub-message based on the value of a variable `pronoun` in this case. This is often used for gender based selections, but can be used for any enum. Lighthouse does not use selects very often.
```javascript
displayValue: `{pronoun, select,
male {He programmed the link.}
female {She programmed the link.}
other {They programmed the link.}
}`,
```
### Markdown
Some strings, like audit descriptions, can also contain a subset of markdown. See [`audit.d.ts`](https://github.com/GoogleChrome/lighthouse/blob/5e52dcca72b35943d14cc7c27613517c425250b9/types/audit.d.ts) for which properties support markdown rendering and will be rendered in the report.
**Inline code blocks**
To format some text as code it should be contained in `backticks`. Any text within the backticks will not be translated. This should be used whenever code is non-translatable. Such as HTML tags or snippets of code. Also note that there is no escape character for using backticks as part of the string, so ONLY use backticks to define code blocks.
```javascript
const UIStrings = {
title: 'Document has a `<title>` element',
};
```
**Links**
To convert a section of text into a link to another URL, enclose the text itself in [brackets] and then immediately include a link after it in (parentheses). Note that `[link text] (https://...)` is NOT VALID because of the space and will not be converted to a link.
```javascript
const UIStrings = {
description: 'The value of ... [Learn More](https://google.com/)',
};
```
### Why do we call it LHL?
LHL is a name that is distinct and identifies this as the LightHouse Locale format. Since both LHL and CTC use `.json` files it is ambiguous, so LHL is the given name for the string format that `UIStrings` objects and `locale/*.json` files that are consumed by the Lighthouse i18n engine.
# CTC file format (CTC)
### Why do we use CTC as our i18n messages interchange format?
There are a few data formats used for holding messages for internationalization, including XMB and XLIFF. We needed a JS-friendly format supported by Google's Translation Console (TC). This format is [somewhat well-specified](https://developer.chrome.com/extensions/i18n-messages) and defined in JSON rather than XML. ;)
### Why do we call it CTC?
CTC is a name that is distinct and identifies this as the Chrome translation format. `messages.json` is ambiguous in our opinion and so throughout the docs we will refer to files that follow the `messages.json` format as being CTC files with a `.ctc.json` suffix.
### Parts of a CTC message
```json
{
"name": {
"message": "Message text, with optional placeholders, which can be $PLACEHOLDER_TEXT$",
"description": "Translator-aimed description of the message.",
"meaning": "Description given when a message is duplicated, in order to give context to the message. Lighthouse uses a copy of the description for this.",
"placeholders": {
"PLACEHOLDER_TEXT": {
"content": "A string to be placed within the message.",
"example": "Translator-aimed example of the placeholder string."
},
}
}
}
```
### Collisions
Collisions happen when two CTC messages have the same `message`. For Lighthouse, there are two relevant collision types in TC:
- Allowed: the CTC `message`, `description`, and `placeholders` are exactly the same. These collisions are deduped on the TC side and the translation cost is the same as for a single string.
- Disallowed: `message` is the same but one or more of the other properties differ.
When the `message` needs to be the same as another string but another property must differ, that disallowed collision can be fixed by adding a unique `meaning` property to each colliding CTC message. TC will then consider those as separate strings and not a collision.
In Lighthouse, this is done by having a different `description` for the strings, which is then copied to `meaning` in `resolveMessageCollisions()`. `meaning` cannot be manually set.
For instance, the string "Potential Savings" currently refers to both saved KiB and saved milliseconds in different audits. The string is defined twice, each with a different `description` describing the units being saved, in case some locales' translations will use a different word choice depending on the unit.
Internally, TC uses a message ID that's a hash of `message` and `meaning` to check for collisions. Somewhat confusingly, if two messages do have colliding IDs, then `message`, `meaning`, `description`, and `placeholders` are all required to match or an error is thrown. This is why all message properties could cause a collision but `meaning` is the only way to dedupe them.
We treat it as an error if `placeholders` differ between messages in a collision: if there is a need for placeholders to differ, then the strings aren't really the same, and at least the `description` should be changed to explain that context. Placeholders must match in user-controlled data (e.g. if a placeholder has an `@example`, it must be the same example in all instances) and in Lighthouse-controlled data (e.g. the token used to replace it in the CTC `message`, like `$PLACEHOLDER_TEXT$` in the example above).
Finally, identical messages made to not collide by Lighthouse with a `meaning` cost real money and shouldn't be confused with allowed collisions which cost nothing for each additional collision. Fixed collisions are checked against a known list to add a little friction and motivate keeping them few in number. An error is thrown if a collision is fixed that hasn't yet been added to that list.
# Appendix
## Appendix A: How runtime string replacement works
1. String called in `.js` file, converted to `LH.IcuMessage` object.
1. Message object is replaced with the localized string via
`format.replaceIcuMessages` and `format.getFormatted`.
#### Example:
1. string in `core/lib/file_with_uistrings.js`
```javascript
// Declare UIStrings
const UIStrings = {
/** Used to summarize the total byte size of the page and.... */
totalSize: 'Total size was {totalBytes, number, bytes} KiB',
};
// Init the strings in this file with the i18n system.
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
// Create an IcuMessage instance with a replacement value for our localizable string.
const icuMessage = str_(UIStrings.totalSize, {totalBytes: 10240});
```
2. `icuMessage` contains information to localize a message into available locales, including a default fallback created from the original (non-localized) string.
```javascript
// icuMessage
{
i18nId: 'core/lib/file_with_uistrings.js | totalSize',
values: {
totalBytes: 10240
},
formattedDefault: 'Total size was 10 KiB'
}
```
3. Lookup in `format.replaceIcuMessages` and `format.getFormatted` will attempt to find the message in this order:
1. `locales/{locale}.json` The best result. `icuMessage.i18nId` is found in the target locale and the resulting string should appear correct.
2. `locales/en-US.json` _Okay_ result. `icuMessage.i18nId` was not found in the target locale, but it was in `en-US`, so the English string is used.
3. The fallback message in `icuMessage.formattedDefault`. This string will be in English, but the lookup is subtly different than the `en-US` lookup. An `IcuMessage` whose `i18nId` is not in `en-US` may be part of an old set of artifacts (or an old LHR passed into `swap-locale`) that contains a string that has since been removed from Lighthouse. The `formattedDefault` is the only option in that case.
This is also the point at which ICU is replaced by values. So this...
```javascript
totalSize = "Total size was {totalBytes, number, bytes} KiB"
values = {totalBytes: 10240}
```
Becomes...
```javascript
message = "Total size was 10 KiB"
```

81
node_modules/lighthouse/core/lib/i18n/i18n.d.ts generated vendored Normal file
View File

@@ -0,0 +1,81 @@
export type LhlMessages = import('../../../shared/localization/locales').LhlMessages;
export namespace UIStrings {
const ms: string;
const seconds: string;
const displayValueByteSavings: string;
const displayValueMsSavings: string;
const displayValueElementsFound: string;
const columnURL: string;
const columnSize: string;
const columnResourceSize: string;
const columnTransferSize: string;
const columnCacheTTL: string;
const columnWastedBytes: string;
const columnWastedMs: string;
const columnBlockingTime: string;
const columnTimeSpent: string;
const columnLocation: string;
const columnResourceType: string;
const columnRequests: string;
const columnName: string;
const columnSource: string;
const columnOverBudget: string;
const columnElement: string;
const columnStartTime: string;
const columnDuration: string;
const columnFailingElem: string;
const columnDescription: string;
const totalResourceType: string;
const documentResourceType: string;
const scriptResourceType: string;
const stylesheetResourceType: string;
const imageResourceType: string;
const mediaResourceType: string;
const fontResourceType: string;
const otherResourceType: string;
const thirdPartyResourceType: string;
const otherResourcesLabel: string;
const firstContentfulPaintMetric: string;
const interactiveMetric: string;
const firstMeaningfulPaintMetric: string;
const totalBlockingTimeMetric: string;
const maxPotentialFIDMetric: string;
const speedIndexMetric: string;
const largestContentfulPaintMetric: string;
const cumulativeLayoutShiftMetric: string;
const interactionToNextPaint: string;
const itemSeverityLow: string;
const itemSeverityMedium: string;
const itemSeverityHigh: string;
}
/**
* Look up the best available locale for the requested language through these fall backs:
* - exact match
* - progressively shorter prefixes (`de-CH-1996` -> `de-CH` -> `de`)
* - supported locales in Intl formatters
*
* If `locale` isn't provided or one could not be found, DEFAULT_LOCALE is returned.
*
* By default any of the locales Lighthouse has strings for can be returned, but this
* can be overridden with `possibleLocales`, useful e.g. when Lighthouse is bundled and
* only DEFAULT_LOCALE is available, but `possibleLocales` can be used to select a
* locale available to be downloaded on demand.
* @param {string|string[]=} locales
* @param {Array<string>=} possibleLocales
* @return {LH.Locale}
*/
export function lookupLocale(locales?: (string | string[]) | undefined, possibleLocales?: Array<string> | undefined): LH.Locale;
/**
* Returns a function that generates `LH.IcuMessage` objects to localize the
* messages in `fileStrings` and the shared `i18n.UIStrings`.
* @param {string} filename
* @param {Record<string, string>} fileStrings
*/
export function createIcuMessageFn(filename: string, fileStrings: Record<string, string>): (message: string, values?: Record<string, string | number> | undefined) => LH.IcuMessage;
/**
* Returns true if the given value is a string or an LH.IcuMessage.
* @param {unknown} value
* @return {value is string|LH.IcuMessage}
*/
export function isStringOrIcuMessage(value: unknown): value is string | import("../../index.js").IcuMessage;
//# sourceMappingURL=i18n.d.ts.map

229
node_modules/lighthouse/core/lib/i18n/i18n.js generated vendored Normal file
View File

@@ -0,0 +1,229 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {import('../../../shared/localization/locales').LhlMessages} LhlMessages */
import path from 'path';
import url from 'url';
import lookupClosestLocale from 'lookup-closest-locale';
import log from 'lighthouse-logger';
import {getAvailableLocales} from '../../../shared/localization/format.js';
import {LH_ROOT} from '../../../root.js';
import {isIcuMessage, formatMessage, DEFAULT_LOCALE} from '../../../shared/localization/format.js';
import {getModulePath} from '../../../esm-utils.js';
const UIStrings = {
/** Used to show the duration in milliseconds that something lasted. The `{timeInMs}` placeholder will be replaced with the time duration, shown in milliseconds (e.g. 63 ms) */
ms: '{timeInMs, number, milliseconds}\xa0ms',
/** Used to show the duration in seconds that something lasted. The {timeInMs} placeholder will be replaced with the time duration, shown in seconds (e.g. 5.2 s) */
seconds: '{timeInMs, number, seconds}\xa0s',
/** Label shown per-audit to show how many bytes smaller the page could be if the user implemented the suggestions. The `{wastedBytes}` placeholder will be replaced with the number of bytes, shown in kibibytes (e.g. 148 KiB) */
displayValueByteSavings: 'Potential savings of {wastedBytes, number, bytes}\xa0KiB',
/** Label shown per-audit to show how many milliseconds faster the page load could be if the user implemented the suggestions. The `{wastedMs}` placeholder will be replaced with the time duration, shown in milliseconds (e.g. 140 ms) */
displayValueMsSavings: 'Potential savings of {wastedMs, number, milliseconds}\xa0ms',
/** Label shown per-audit to show how many HTML elements did not pass the audit. The `{# elements found}` placeholder will be replaced with the number of failing HTML elements. */
displayValueElementsFound: `{nodeCount, plural, =1 {1 element found} other {# elements found}}`,
/** Label for a column in a data table; entries will be the URL of a web resource */
columnURL: 'URL',
/** Label for a column in a data table; entries will be the size or quantity of some resource, e.g. the width and height dimensions of an image or the number of images in a web page. */
columnSize: 'Size',
/** Label for a column in a data table; entries will be the file size of a web resource in kilobytes. */
columnResourceSize: 'Resource Size',
/** Label for a column in a data table; entries will be the download size of a web resource in kilobytes. */
columnTransferSize: 'Transfer Size',
/** Label for a column in a data table; entries will be the time to live value of the cache header on a web resource. */
columnCacheTTL: 'Cache TTL',
/** Label for a column in a data table; entries will be the number of kilobytes the user could reduce their page by if they implemented the suggestions. */
columnWastedBytes: 'Potential Savings',
/** Label for a column in a data table; entries will be the number of milliseconds the user could reduce page load by if they implemented the suggestions. */
columnWastedMs: 'Potential Savings',
/** Label for a table column that displays how much time each row spent blocking other work on the main thread, entries will be the number of milliseconds spent. */
columnBlockingTime: 'Main-Thread Blocking Time',
/** Label for a column in a data table; entries will be the number of milliseconds spent during a particular activity. */
columnTimeSpent: 'Time Spent',
/** Label for a column in a data table; entries will be the location of a specific line of code in a file, in the format "line: 102". */
columnLocation: 'Location',
/** Label for a column in a data table; entries will be types of resources loaded over the network, e.g. "Scripts", "Third-Party", "Stylesheet". */
columnResourceType: 'Resource Type',
/** Label for a column in a data table; entries will be the number of network requests done by a webpage. */
columnRequests: 'Requests',
/** Label for a column in a data table; entries will be the names of arbitrary objects, e.g. the name of a Javascript library, or the name of a user defined timing event. */
columnName: 'Name',
/** Label for a column in a data table; entries will be the locations of JavaScript or CSS code, e.g. the name of a Javascript package or module. */
columnSource: 'Source',
/** Label for a column in a data table; entries will be how much a predetermined budget has been exeeded by. Depending on the context, this number could represent an excess in quantity or size of network requests, or, an excess in the duration of time that it takes for the page to load.*/
columnOverBudget: 'Over Budget',
/** Label for a column in a data table; entries will be a representation of a DOM element. */
columnElement: 'Element',
/** Label for a column in a data table; entries will be the number of milliseconds since the page started loading. */
columnStartTime: 'Start Time',
/** Label for a column in a data table; entries will be the total number of milliseconds from the start time until the end time. */
columnDuration: 'Duration',
/** Label for a column in a data table; entries will be a representation of a DOM element that did not meet certain suggestions. */
columnFailingElem: 'Failing Elements',
/** Label for a column in a data table; entries will be a description of the table item. */
columnDescription: 'Description',
/** Label for a row in a data table; entries will be the total number and byte size of all resources loaded by a web page. */
totalResourceType: 'Total',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Document' resources loaded by a web page. */
documentResourceType: 'Document',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Script' resources loaded by a web page. 'Script' refers to JavaScript or other files that are executable by a browser. */
scriptResourceType: 'Script',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Stylesheet' resources loaded by a web page. 'Stylesheet' refers to CSS stylesheets. */
stylesheetResourceType: 'Stylesheet',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Image' resources loaded by a web page. */
imageResourceType: 'Image',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Media' resources loaded by a web page. 'Media' refers to audio and video files. */
mediaResourceType: 'Media',
/** Label for a row in a data table; entries will be the total number and byte size of all 'Font' resources loaded by a web page. */
fontResourceType: 'Font',
/** Label for a row in a data table; entries will be the total number and byte size of all resources loaded by a web page that don't fit into the categories of Document, Script, Stylesheet, Image, Media, & Font.*/
otherResourceType: 'Other',
/** Label for a row in a data table; entries will be the total number and byte size of all third-party resources loaded by a web page. 'Third-party resources are items loaded from URLs that aren't controlled by the owner of the web page. */
thirdPartyResourceType: 'Third-party',
/** Label used to identify a value in a table where many individual values are aggregated to a single value, for brevity. "Other resources" could also be read as "the rest of the resources". Resource refers to network resources requested by the browser. */
otherResourcesLabel: 'Other resources',
/** The name of the metric that marks the time at which the first text or image is painted by the browser. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
firstContentfulPaintMetric: 'First Contentful Paint',
/** The name of the metric that marks the time at which the page is fully loaded and is able to quickly respond to user input (clicks, taps, and keypresses feel responsive). Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
interactiveMetric: 'Time to Interactive',
/** The name of the metric that marks the time at which a majority of the content has been painted by the browser. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
firstMeaningfulPaintMetric: 'First Meaningful Paint',
/** The name of a metric that calculates the total duration of blocking time for a web page. Blocking times are time periods when the page would be blocked (prevented) from responding to user input (clicks, taps, and keypresses will feel slow to respond). Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
totalBlockingTimeMetric: 'Total Blocking Time',
/** The name of the metric "Maximum Potential First Input Delay" that marks the maximum estimated time between the page receiving input (a user clicking, tapping, or typing) and the page responding. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
maxPotentialFIDMetric: 'Max Potential First Input Delay',
/** The name of the metric that summarizes how quickly the page looked visually complete. The name of this metric is largely abstract and can be loosely translated. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
speedIndexMetric: 'Speed Index',
/** The name of the metric that marks the time at which the largest text or image is painted by the browser. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
largestContentfulPaintMetric: 'Largest Contentful Paint',
/** The name of the metric "Cumulative Layout Shift" that indicates how much the page changes its layout while it loads. If big segments of the page shift their location during load, the Cumulative Layout Shift will be higher. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
cumulativeLayoutShiftMetric: 'Cumulative Layout Shift',
/** The name of the "Interaction to Next Paint" metric that measures the time between a user interaction and when the browser displays a response on screen. Shown to users as the label for the numeric metric value. Ideally fits within a ~40 character limit. */
interactionToNextPaint: 'Interaction to Next Paint',
/** Table item value for the severity of a small, or low impact vulnerability. Part of a ranking scale in the form: low, medium, high. */
itemSeverityLow: 'Low',
/** Table item value for the severity of a vulnerability. Part of a ranking scale in the form: low, medium, high. */
itemSeverityMedium: 'Medium',
/** Table item value for the severity of a high impact, or dangerous vulnerability. Part of a ranking scale in the form: low, medium, high. */
itemSeverityHigh: 'High',
};
/**
* Look up the best available locale for the requested language through these fall backs:
* - exact match
* - progressively shorter prefixes (`de-CH-1996` -> `de-CH` -> `de`)
* - supported locales in Intl formatters
*
* If `locale` isn't provided or one could not be found, DEFAULT_LOCALE is returned.
*
* By default any of the locales Lighthouse has strings for can be returned, but this
* can be overridden with `possibleLocales`, useful e.g. when Lighthouse is bundled and
* only DEFAULT_LOCALE is available, but `possibleLocales` can be used to select a
* locale available to be downloaded on demand.
* @param {string|string[]=} locales
* @param {Array<string>=} possibleLocales
* @return {LH.Locale}
*/
function lookupLocale(locales, possibleLocales) {
// TODO: lookupLocale may need to be split into two functions, one that canonicalizes
// locales and one that looks up the best locale filename for a given locale.
// e.g. `en-IE` is canonical, but uses `en-GB.json`. See TODO in locales.js
if (typeof Intl !== 'object') {
// If Node was built with `--with-intl=none`, `Intl` won't exist.
throw new Error('Lighthouse must be run in Node with `Intl` support. See https://nodejs.org/api/intl.html for help');
}
const canonicalLocales = Intl.getCanonicalLocales(locales);
// Filter by what's available in this runtime.
const availableLocales = Intl.NumberFormat.supportedLocalesOf(canonicalLocales);
// Get available locales and transform into object to match `lookupClosestLocale`'s API.
const localesWithMessages = possibleLocales || getAvailableLocales();
const localesWithmessagesObj = /** @type {Record<LH.Locale, LhlMessages>} */ (
Object.fromEntries(localesWithMessages.map(l => [l, {}])));
const closestLocale = lookupClosestLocale(availableLocales, localesWithmessagesObj);
if (!closestLocale) {
// Log extra info if we're pretty sure this version of Node was built with `--with-intl=small-icu`.
if (Intl.NumberFormat.supportedLocalesOf('es').length === 0) {
log.warn('i18n', 'Requested locale not available in this version of node. The `full-icu` npm module can provide additional locales. For help, see https://github.com/GoogleChrome/lighthouse/blob/main/readme.md#how-do-i-get-localized-lighthouse-results-via-the-cli');
}
// eslint-disable-next-line max-len
log.warn('i18n', `locale(s) '${locales}' not available. Falling back to default '${DEFAULT_LOCALE}'`);
}
return closestLocale || DEFAULT_LOCALE;
}
/**
* Returns a function that generates `LH.IcuMessage` objects to localize the
* messages in `fileStrings` and the shared `i18n.UIStrings`.
* @param {string} filename
* @param {Record<string, string>} fileStrings
*/
function createIcuMessageFn(filename, fileStrings) {
if (filename.startsWith('file://')) filename = url.fileURLToPath(filename);
// In the common case, `filename` is an absolute path that needs to be transformed
// to be relative to LH_ROOT. In other cases, `filename` might be the exact i18n identifier
// already (see: stack-packs.js, or bundled lighthouse).
if (path.isAbsolute(filename)) filename = path.relative(LH_ROOT, filename);
/**
* Combined so fn can access both caller's strings and i18n.UIStrings shared across LH.
* @type {Record<string, string>}
*/
const mergedStrings = {...UIStrings, ...fileStrings};
/**
* Convert a message string and replacement values into an `LH.IcuMessage`.
* @param {string} message
* @param {Record<string, string | number>} [values]
* @return {LH.IcuMessage}
*/
const getIcuMessageFn = (message, values) => {
const keyname = Object.keys(mergedStrings).find(key => mergedStrings[key] === message);
if (!keyname) throw new Error(`Could not locate: ${message}`);
// `message` can be a UIString defined within the provided `fileStrings`, or it could be
// one of the common strings found in `i18n.UIStrings`.
const filenameToLookup = keyname in fileStrings ?
filename :
path.relative(LH_ROOT, getModulePath(import.meta));
const unixStyleFilename = filenameToLookup.replace(/\\/g, '/');
const i18nId = `${unixStyleFilename} | ${keyname}`;
return {
i18nId,
values,
formattedDefault: formatMessage(message, values, DEFAULT_LOCALE),
};
};
return getIcuMessageFn;
}
/**
* Returns true if the given value is a string or an LH.IcuMessage.
* @param {unknown} value
* @return {value is string|LH.IcuMessage}
*/
function isStringOrIcuMessage(value) {
return typeof value === 'string' || isIcuMessage(value);
}
export {
UIStrings,
lookupLocale,
createIcuMessageFn,
isStringOrIcuMessage,
};

22
node_modules/lighthouse/core/lib/icons.d.ts generated vendored Normal file
View File

@@ -0,0 +1,22 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {boolean} Does the manifest have any icons?
*/
export function doExist(manifest: NonNullable<LH.Artifacts.Manifest['value']>): boolean;
/**
* @param {number} sizeRequirement
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {Array<string>} Value of satisfactory sizes (eg. ['192x192', '256x256'])
*/
export function pngSizedAtLeast(sizeRequirement: number, manifest: NonNullable<LH.Artifacts.Manifest['value']>): Array<string>;
/**
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {boolean} Does the manifest icons value contain at least one icon with purpose including "maskable"
*/
export function containsMaskableIcon(manifest: NonNullable<LH.Artifacts.Manifest['value']>): boolean;
//# sourceMappingURL=icons.d.ts.map

82
node_modules/lighthouse/core/lib/icons.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {boolean} Does the manifest have any icons?
*/
function doExist(manifest) {
if (!manifest || !manifest.icons) {
return false;
}
if (manifest.icons.value.length === 0) {
return false;
}
return true;
}
/**
* @param {number} sizeRequirement
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {Array<string>} Value of satisfactory sizes (eg. ['192x192', '256x256'])
*/
function pngSizedAtLeast(sizeRequirement, manifest) {
// An icon can be provided for a single size, or for multiple sizes.
// To handle both, we flatten all found sizes into a single array.
const iconValues = manifest.icons.value;
/** @type {Array<string>} */
const flattenedSizes = [];
iconValues
.filter(icon => {
const typeHint = icon.value.type.value;
if (typeHint) {
// If a type hint is present, filter out icons that are not 'image/png'.
return typeHint === 'image/png';
}
// Otherwise, fall back to filtering on the icons' extension.
const src = icon.value.src.value;
return src && new URL(src).pathname.endsWith('.png');
})
.forEach(icon => {
// check that the icon has a size
if (icon.value.sizes.value) {
flattenedSizes.push(...icon.value.sizes.value);
}
});
return flattenedSizes
// discard sizes that are not AAxBB (eg. "any")
.filter(size => /\d+x\d+/.test(size))
.filter(size => {
// Split the '24x24' strings into ['24','24'] arrays
const sizeStrs = size.split(/x/i);
// Cast the ['24','24'] strings into [24,24] numbers
const sizeNums = [parseFloat(sizeStrs[0]), parseFloat(sizeStrs[1])];
// Only keep sizes that are as big as our required size
const areIconsBigEnough = sizeNums[0] >= sizeRequirement && sizeNums[1] >= sizeRequirement;
// Square is required: https://code.google.com/p/chromium/codesearch#chromium/src/chrome/browser/manifest/manifest_icon_selector.cc&q=ManifestIconSelector::IconSizesContainsBiggerThanMinimumSize&sq=package:chromium
const areIconsSquare = sizeNums[0] === sizeNums[1];
return areIconsBigEnough && areIconsSquare;
});
}
/**
* @param {NonNullable<LH.Artifacts.Manifest['value']>} manifest
* @return {boolean} Does the manifest icons value contain at least one icon with purpose including "maskable"
*/
function containsMaskableIcon(manifest) {
const iconValues = manifest.icons.value;
return iconValues.some(icon => {
return icon.value.purpose?.value &&
icon.value.purpose.value.includes('maskable');
});
}
export {
doExist,
pngSizedAtLeast,
containsMaskableIcon,
};

View File

@@ -0,0 +1,20 @@
declare namespace _default {
export const simulationNamesToIgnore: string[];
export { convertNodeTimingsToTrace };
}
export default _default;
export type Node = import('./dependency-graph/base-node.js').Node;
export type CompleteNodeTiming = import('./dependency-graph/simulator/simulator.js').CompleteNodeTiming;
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {import('./dependency-graph/base-node.js').Node} Node */
/** @typedef {import('./dependency-graph/simulator/simulator.js').CompleteNodeTiming} CompleteNodeTiming */
/**
* @param {Map<Node, CompleteNodeTiming>} nodeTimings
* @return {LH.Trace}
*/
declare function convertNodeTimingsToTrace(nodeTimings: Map<Node, CompleteNodeTiming>): LH.Trace;
//# sourceMappingURL=lantern-trace-saver.d.ts.map

260
node_modules/lighthouse/core/lib/lantern-trace-saver.js generated vendored Normal file
View File

@@ -0,0 +1,260 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {import('./dependency-graph/base-node.js').Node} Node */
/** @typedef {import('./dependency-graph/simulator/simulator.js').CompleteNodeTiming} CompleteNodeTiming */
/**
* @param {Map<Node, CompleteNodeTiming>} nodeTimings
* @return {LH.Trace}
*/
function convertNodeTimingsToTrace(nodeTimings) {
/** @type {LH.TraceEvent[]} */
const traceEvents = [];
const baseTs = 1e9;
const baseEvent = {pid: 1, tid: 1, cat: 'devtools.timeline'};
const frame = 'A00001';
/** @param {number} ms */
const toMicroseconds = ms => baseTs + ms * 1000;
traceEvents.push(createFakeTracingStartedEvent());
traceEvents.push({...createFakeTracingStartedEvent(), name: 'TracingStartedInBrowser'});
// Create a fake requestId counter
let requestId = 1;
let lastEventEndTime = 0;
for (const [node, timing] of nodeTimings.entries()) {
lastEventEndTime = Math.max(lastEventEndTime, timing.endTime);
if (node.type === 'cpu') {
// Represent all CPU work that was bundled in a task as an EvaluateScript event
traceEvents.push(...createFakeTaskEvents(node, timing));
} else {
// Ignore data URIs as they don't really add much value
if (/^data/.test(node.record.url)) continue;
traceEvents.push(...createFakeNetworkEvents(requestId, node.record, timing));
requestId++;
}
}
// Create a fake task event ~1s after the trace ends for a sane default bounds in DT
traceEvents.push(
...createFakeTaskEvents(
// @ts-expect-error
{childEvents: [], event: {}},
{
startTime: lastEventEndTime + 1000,
endTime: lastEventEndTime + 1001,
}
)
);
return {traceEvents};
/**
* @return {LH.TraceEvent}
*/
function createFakeTracingStartedEvent() {
const argsData = {
frameTreeNodeId: 1,
sessionId: '1.1',
page: frame,
persistentIds: true,
frames: [{frame, url: 'about:blank', name: '', processId: 1}],
};
return {
...baseEvent,
ts: baseTs - 1e5,
ph: 'I',
s: 't',
cat: 'disabled-by-default-devtools.timeline',
name: 'TracingStartedInPage',
args: {data: argsData},
dur: 0,
};
}
/**
* @param {LH.Gatherer.Simulation.GraphCPUNode} cpuNode
* @param {{startTime: number, endTime: number}} timing
* @return {LH.TraceEvent[]}
*/
function createFakeTaskEvents(cpuNode, timing) {
const argsData = {
url: '',
frame,
lineNumber: 0,
columnNumber: 0,
};
const eventTs = toMicroseconds(timing.startTime);
/** @type {LH.TraceEvent[]} */
const events = [
{
...baseEvent,
ph: 'X',
name: 'Task',
ts: eventTs,
dur: (timing.endTime - timing.startTime) * 1000,
args: {data: argsData},
},
];
const nestedBaseTs = cpuNode.event.ts || 0;
const multiplier = (timing.endTime - timing.startTime) * 1000 / cpuNode.event.dur;
// https://github.com/ChromeDevTools/devtools-frontend/blob/5429ac8a61ad4fa/front_end/timeline_model/TimelineModel.js#L1129-L1130
const netReqEvents = new Set(['ResourceSendRequest', 'ResourceFinish',
'ResourceReceiveResponse', 'ResourceReceivedData']);
for (const event of cpuNode.childEvents) {
if (netReqEvents.has(event.name)) continue;
const ts = eventTs + (event.ts - nestedBaseTs) * multiplier;
const newEvent = {...event, ...{pid: baseEvent.pid, tid: baseEvent.tid}, ts};
if (event.dur) newEvent.dur = event.dur * multiplier;
events.push(newEvent);
}
return events;
}
/**
* @param {number} requestId
* @param {LH.Artifacts.NetworkRequest} record
* @param {CompleteNodeTiming} timing
* @return {LH.TraceEvent}
*/
function createWillSendRequestEvent(requestId, record, timing) {
return {
...baseEvent,
ph: 'I',
s: 't',
// No `dur` on network instant events but add to keep types happy.
dur: 0,
name: 'ResourceWillSendRequest',
ts: toMicroseconds(timing.startTime),
args: {data: {requestId: String(requestId)}},
};
}
/**
* @param {number} requestId
* @param {LH.Artifacts.NetworkRequest} record
* @param {CompleteNodeTiming} timing
* @return {LH.TraceEvent[]}
*/
function createFakeNetworkEvents(requestId, record, timing) {
if (!('connectionTiming' in timing)) {
throw new Error('Network node timing incomplete');
}
// 0ms requests get super-messed up rendering
// Use 0.3ms instead so they're still hoverable, https://github.com/GoogleChrome/lighthouse/pull/5350#discussion_r194563201
let {startTime, endTime} = timing; // eslint-disable-line prefer-const
if (startTime === endTime) endTime += 0.3;
const requestData = {requestId: requestId.toString(), frame};
// No `dur` on network instant events but add to keep types happy.
/** @type {LH.Util.StrictOmit<LH.TraceEvent, 'name'|'ts'|'args'>} */
const baseRequestEvent = {...baseEvent, ph: 'I', s: 't', dur: 0};
const sendRequestData = {
...requestData,
requestMethod: record.requestMethod,
url: record.url,
priority: record.priority,
};
const {dnsResolutionTime, connectionTime, sslTime, timeToFirstByte} = timing.connectionTiming;
let sslStart = -1;
let sslEnd = -1;
if (connectionTime !== undefined && sslTime !== undefined) {
sslStart = connectionTime - sslTime;
sslEnd = connectionTime;
}
const receiveResponseData = {
...requestData,
statusCode: record.statusCode,
mimeType: record.mimeType,
encodedDataLength: record.transferSize,
fromCache: record.fromDiskCache,
fromServiceWorker: record.fetchedViaServiceWorker,
timing: {
// `requestTime` is in seconds.
requestTime: toMicroseconds(startTime) / (1000 * 1000),
// Remaining values are milliseconds after `requestTime`.
dnsStart: dnsResolutionTime === undefined ? -1 : 0,
dnsEnd: dnsResolutionTime ?? -1,
connectStart: dnsResolutionTime ?? -1,
connectEnd: connectionTime ?? -1,
sslStart,
sslEnd,
sendStart: connectionTime ?? 0,
sendEnd: connectionTime ?? 0,
receiveHeadersEnd: timeToFirstByte,
workerStart: -1,
workerReady: -1,
proxyStart: -1,
proxyEnd: -1,
pushStart: 0,
pushEnd: 0,
},
};
const resourceFinishData = {
requestId: requestId.toString(),
encodedDataLength: record.transferSize,
decodedBodyLength: record.resourceSize,
didFail: !!record.failed,
finishTime: toMicroseconds(endTime) / (1000 * 1000),
};
/** @type {LH.TraceEvent[]} */
const events = [];
// Navigation request needs an additional ResourceWillSendRequest event.
if (requestId === 1) {
events.push(createWillSendRequestEvent(requestId, record, timing));
}
events.push(
{
...baseRequestEvent,
name: 'ResourceSendRequest',
ts: toMicroseconds(startTime),
args: {data: sendRequestData},
},
{
...baseRequestEvent,
name: 'ResourceFinish',
ts: toMicroseconds(endTime),
args: {data: resourceFinishData},
}
);
if (!record.failed) {
events.push({
...baseRequestEvent,
name: 'ResourceReceiveResponse',
// Event `ts` isn't meaningful, so just pick a time.
ts: toMicroseconds((startTime + endTime) / 2),
args: {data: receiveResponseData},
});
}
return events;
}
}
export default {
simulationNamesToIgnore: [
'unlabeled',
// These node timings should be nearly identical to the ones produced for Interactive
'optimisticSpeedIndex',
'optimisticFlexSpeedIndex',
'pessimisticSpeedIndex',
],
convertNodeTimingsToTrace,
};

2
node_modules/lighthouse/core/lib/lh-env.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export const isUnderTest: boolean;
//# sourceMappingURL=lh-env.d.ts.map

15
node_modules/lighthouse/core/lib/lh-env.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import process from 'process';
// NODE_ENV is set to test by mocha-setup.js and the smokehouse CLI runner
// CI as a catchall for everything we do in GitHub Actions
const isUnderTest = !!process.env.CI || process.env.NODE_ENV === 'test';
export {
isUnderTest,
};

345
node_modules/lighthouse/core/lib/lh-error.d.ts generated vendored Normal file
View File

@@ -0,0 +1,345 @@
export type LighthouseErrorDefinition = {
code: string;
message: string;
pattern?: RegExp | undefined;
/**
* True if it should appear in the top-level LHR.runtimeError property.
*/
lhrRuntimeError?: boolean | undefined;
};
export type SerializedLighthouseError = {
sentinel: '__LighthouseErrorSentinel';
code: string;
stack?: string | undefined;
cause?: unknown;
properties?: {
[p: string]: string | undefined;
} | undefined;
};
export type SerializedBaseError = {
sentinel: '__ErrorSentinel';
message: string;
code?: string;
stack?: string;
cause?: unknown;
};
/**
* The {@link ErrorOptions } type wasn't added until es2022 (Node 16), so we recreate it here to support ts targets before es2022.
* TODO: Just use `ErrorOptions` if we can't support targets before es2022 in the docs test.
*/
export type LHErrorOptions = {
cause: unknown;
};
/**
* @typedef {{sentinel: '__LighthouseErrorSentinel', code: string, stack?: string, cause?: unknown, properties?: {[p: string]: string|undefined}}} SerializedLighthouseError
* @typedef {{sentinel: '__ErrorSentinel', message: string, code?: string, stack?: string, cause?: unknown}} SerializedBaseError
*/
/**
* The {@link ErrorOptions} type wasn't added until es2022 (Node 16), so we recreate it here to support ts targets before es2022.
* TODO: Just use `ErrorOptions` if we can't support targets before es2022 in the docs test.
* @typedef {{cause: unknown}} LHErrorOptions
*/
export class LighthouseError extends Error {
/**
* @param {string} method
* @param {{message: string, data?: string|undefined}} protocolError
* @return {Error|LighthouseError}
*/
static fromProtocolMessage(method: string, protocolError: {
message: string;
data?: string | undefined;
}): Error | LighthouseError;
/**
* A JSON.stringify replacer to serialize LighthouseErrors and (as a fallback) Errors.
* Returns a simplified version of the error object that can be reconstituted
* as a copy of the original error at parse time.
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The_replacer_parameter
* @param {Error|LighthouseError} err
* @return {SerializedBaseError|SerializedLighthouseError}
*/
static stringifyReplacer(err: Error | LighthouseError): SerializedBaseError | SerializedLighthouseError;
/**
* A JSON.parse reviver. If any value passed in is a serialized Error or
* LighthouseError, the error is recreated as the original object. Otherwise, the
* value is passed through unchanged.
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Using_the_reviver_parameter
* @param {string} key
* @param {any} possibleError
* @return {any}
*/
static parseReviver(key: string, possibleError: any): any;
/**
* @param {LighthouseErrorDefinition} errorDefinition
* @param {Record<string, string|undefined>=} properties
* @param {LHErrorOptions=} options
*/
constructor(errorDefinition: LighthouseErrorDefinition, properties?: Record<string, string | undefined> | undefined, options?: LHErrorOptions | undefined);
code: string;
friendlyMessage: import("../index.js").IcuMessage;
lhrRuntimeError: boolean;
}
export namespace LighthouseError {
export { ERRORS as errors };
export const NO_ERROR: string;
export const UNKNOWN_ERROR: string;
}
export namespace UIStrings {
const didntCollectScreenshots: string;
const badTraceRecording: string;
const noFcp: string;
const noLcp: string;
const pageLoadTookTooLong: string;
const pageLoadFailed: string;
const pageLoadFailedWithStatusCode: string;
const pageLoadFailedWithDetails: string;
const pageLoadFailedInsecure: string;
const pageLoadFailedInterstitial: string;
const internalChromeError: string;
const requestContentTimeout: string;
const notHtml: string;
const urlInvalid: string;
const protocolTimeout: string;
const dnsFailure: string;
const pageLoadFailedHung: string;
const criTimeout: string;
const missingRequiredArtifact: string;
const erroredRequiredArtifact: string;
const oldChromeDoesNotSupportFeature: string;
}
declare namespace ERRORS {
namespace NO_SPEEDLINE_FRAMES {
export const code: string;
import message = UIStrings.didntCollectScreenshots;
export { message };
export const lhrRuntimeError: boolean;
}
namespace SPEEDINDEX_OF_ZERO {
const code_1: string;
export { code_1 as code };
import message_1 = UIStrings.didntCollectScreenshots;
export { message_1 as message };
const lhrRuntimeError_1: boolean;
export { lhrRuntimeError_1 as lhrRuntimeError };
}
namespace NO_SCREENSHOTS {
const code_2: string;
export { code_2 as code };
import message_2 = UIStrings.didntCollectScreenshots;
export { message_2 as message };
const lhrRuntimeError_2: boolean;
export { lhrRuntimeError_2 as lhrRuntimeError };
}
namespace INVALID_SPEEDLINE {
const code_3: string;
export { code_3 as code };
import message_3 = UIStrings.didntCollectScreenshots;
export { message_3 as message };
const lhrRuntimeError_3: boolean;
export { lhrRuntimeError_3 as lhrRuntimeError };
}
namespace NO_TRACING_STARTED {
const code_4: string;
export { code_4 as code };
import message_4 = UIStrings.badTraceRecording;
export { message_4 as message };
const lhrRuntimeError_4: boolean;
export { lhrRuntimeError_4 as lhrRuntimeError };
}
namespace NO_RESOURCE_REQUEST {
const code_5: string;
export { code_5 as code };
import message_5 = UIStrings.badTraceRecording;
export { message_5 as message };
const lhrRuntimeError_5: boolean;
export { lhrRuntimeError_5 as lhrRuntimeError };
}
namespace NO_NAVSTART {
const code_6: string;
export { code_6 as code };
import message_6 = UIStrings.badTraceRecording;
export { message_6 as message };
const lhrRuntimeError_6: boolean;
export { lhrRuntimeError_6 as lhrRuntimeError };
}
namespace NO_FCP {
const code_7: string;
export { code_7 as code };
import message_7 = UIStrings.noFcp;
export { message_7 as message };
const lhrRuntimeError_7: boolean;
export { lhrRuntimeError_7 as lhrRuntimeError };
}
namespace NO_DCL {
const code_8: string;
export { code_8 as code };
import message_8 = UIStrings.badTraceRecording;
export { message_8 as message };
const lhrRuntimeError_8: boolean;
export { lhrRuntimeError_8 as lhrRuntimeError };
}
namespace NO_FMP {
const code_9: string;
export { code_9 as code };
import message_9 = UIStrings.badTraceRecording;
export { message_9 as message };
}
namespace NO_LCP {
const code_10: string;
export { code_10 as code };
import message_10 = UIStrings.noLcp;
export { message_10 as message };
}
namespace NO_LCP_ALL_FRAMES {
const code_11: string;
export { code_11 as code };
import message_11 = UIStrings.noLcp;
export { message_11 as message };
}
namespace UNSUPPORTED_OLD_CHROME {
const code_12: string;
export { code_12 as code };
import message_12 = UIStrings.oldChromeDoesNotSupportFeature;
export { message_12 as message };
}
namespace NO_TTI_CPU_IDLE_PERIOD {
const code_13: string;
export { code_13 as code };
import message_13 = UIStrings.pageLoadTookTooLong;
export { message_13 as message };
}
namespace NO_TTI_NETWORK_IDLE_PERIOD {
const code_14: string;
export { code_14 as code };
import message_14 = UIStrings.pageLoadTookTooLong;
export { message_14 as message };
}
namespace NO_DOCUMENT_REQUEST {
const code_15: string;
export { code_15 as code };
import message_15 = UIStrings.pageLoadFailed;
export { message_15 as message };
const lhrRuntimeError_9: boolean;
export { lhrRuntimeError_9 as lhrRuntimeError };
}
namespace FAILED_DOCUMENT_REQUEST {
const code_16: string;
export { code_16 as code };
import message_16 = UIStrings.pageLoadFailedWithDetails;
export { message_16 as message };
const lhrRuntimeError_10: boolean;
export { lhrRuntimeError_10 as lhrRuntimeError };
}
namespace ERRORED_DOCUMENT_REQUEST {
const code_17: string;
export { code_17 as code };
import message_17 = UIStrings.pageLoadFailedWithStatusCode;
export { message_17 as message };
const lhrRuntimeError_11: boolean;
export { lhrRuntimeError_11 as lhrRuntimeError };
}
namespace INSECURE_DOCUMENT_REQUEST {
const code_18: string;
export { code_18 as code };
import message_18 = UIStrings.pageLoadFailedInsecure;
export { message_18 as message };
const lhrRuntimeError_12: boolean;
export { lhrRuntimeError_12 as lhrRuntimeError };
}
namespace CHROME_INTERSTITIAL_ERROR {
const code_19: string;
export { code_19 as code };
import message_19 = UIStrings.pageLoadFailedInterstitial;
export { message_19 as message };
const lhrRuntimeError_13: boolean;
export { lhrRuntimeError_13 as lhrRuntimeError };
}
namespace PAGE_HUNG {
const code_20: string;
export { code_20 as code };
import message_20 = UIStrings.pageLoadFailedHung;
export { message_20 as message };
const lhrRuntimeError_14: boolean;
export { lhrRuntimeError_14 as lhrRuntimeError };
}
namespace NOT_HTML {
const code_21: string;
export { code_21 as code };
import message_21 = UIStrings.notHtml;
export { message_21 as message };
const lhrRuntimeError_15: boolean;
export { lhrRuntimeError_15 as lhrRuntimeError };
}
namespace TRACING_ALREADY_STARTED {
const code_22: string;
export { code_22 as code };
import message_22 = UIStrings.internalChromeError;
export { message_22 as message };
export const pattern: RegExp;
const lhrRuntimeError_16: boolean;
export { lhrRuntimeError_16 as lhrRuntimeError };
}
namespace PARSING_PROBLEM {
const code_23: string;
export { code_23 as code };
import message_23 = UIStrings.internalChromeError;
export { message_23 as message };
const pattern_1: RegExp;
export { pattern_1 as pattern };
const lhrRuntimeError_17: boolean;
export { lhrRuntimeError_17 as lhrRuntimeError };
}
namespace READ_FAILED {
const code_24: string;
export { code_24 as code };
import message_24 = UIStrings.internalChromeError;
export { message_24 as message };
const pattern_2: RegExp;
export { pattern_2 as pattern };
const lhrRuntimeError_18: boolean;
export { lhrRuntimeError_18 as lhrRuntimeError };
}
namespace INVALID_URL {
const code_25: string;
export { code_25 as code };
import message_25 = UIStrings.urlInvalid;
export { message_25 as message };
}
namespace PROTOCOL_TIMEOUT {
const code_26: string;
export { code_26 as code };
import message_26 = UIStrings.protocolTimeout;
export { message_26 as message };
const lhrRuntimeError_19: boolean;
export { lhrRuntimeError_19 as lhrRuntimeError };
}
namespace DNS_FAILURE {
const code_27: string;
export { code_27 as code };
import message_27 = UIStrings.dnsFailure;
export { message_27 as message };
const lhrRuntimeError_20: boolean;
export { lhrRuntimeError_20 as lhrRuntimeError };
}
namespace CRI_TIMEOUT {
const code_28: string;
export { code_28 as code };
import message_28 = UIStrings.criTimeout;
export { message_28 as message };
const lhrRuntimeError_21: boolean;
export { lhrRuntimeError_21 as lhrRuntimeError };
}
namespace MISSING_REQUIRED_ARTIFACT {
const code_29: string;
export { code_29 as code };
import message_29 = UIStrings.missingRequiredArtifact;
export { message_29 as message };
}
namespace ERRORED_REQUIRED_ARTIFACT {
const code_30: string;
export { code_30 as code };
import message_30 = UIStrings.erroredRequiredArtifact;
export { message_30 as message };
}
}
export {};
//# sourceMappingURL=lh-error.d.ts.map

435
node_modules/lighthouse/core/lib/lh-error.js generated vendored Normal file
View File

@@ -0,0 +1,435 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as i18n from './i18n/i18n.js';
/* eslint-disable max-len */
const UIStrings = {
/**
* @description Error message explaining that the Lighthouse run was not able to collect screenshots through Chrome.
* @example {NO_SPEEDLINE_FRAMES} errorCode
* */
didntCollectScreenshots: `Chrome didn't collect any screenshots during the page load. Please make sure there is content visible on the page, and then try re-running Lighthouse. ({errorCode})`,
/**
* @description Error message explaining that the performance trace was not able to be recorded for the Lighthouse run.
* @example {NO_TRACING_STARTED} errorCode
* */
badTraceRecording: 'Something went wrong with recording the trace over your page load. Please run Lighthouse again. ({errorCode})',
/**
* @description Error message explaining that the First Contentful Paint metric was not seen during the page load.
* @example {NO_FCP} errorCode
* */
noFcp: 'The page did not paint any content. Please ensure you keep the browser window in the foreground during the load and try again. ({errorCode})',
/**
* @description Error message explaining that the Largest Contentful Paint metric was not seen during the page load.
* @example {NO_LCP} errorCode
* */
noLcp: 'The page did not display content that qualifies as a Largest Contentful Paint (LCP). Ensure the page has a valid LCP element and then try again. ({errorCode})',
/**
* @description Error message explaining that the page loaded too slowly to perform a Lighthouse run.
* @example {NO_TTI_CPU_IDLE_PERIOD} errorCode
* */
pageLoadTookTooLong: 'Your page took too long to load. Please follow the opportunities in the report to reduce your page load time, and then try re-running Lighthouse. ({errorCode})',
/** Error message explaining that Lighthouse could not load the requested URL and the steps that might be taken to fix the unreliability. */
pageLoadFailed: 'Lighthouse was unable to reliably load the page you requested. Make sure you are testing the correct URL and that the server is properly responding to all requests.',
/**
* @description Error message explaining that Lighthouse could not load the requested URL and the steps that might be taken to fix the unreliability.
* @example {404} statusCode
* */
pageLoadFailedWithStatusCode: 'Lighthouse was unable to reliably load the page you requested. Make sure you are testing the correct URL and that the server is properly responding to all requests. (Status code: {statusCode})',
/**
* @description Error message explaining that Lighthouse could not load the requested URL and the steps that might be taken to fix the unreliability.
* @example {FAILED_DOCUMENT_REQUEST} errorDetails
* */
pageLoadFailedWithDetails: 'Lighthouse was unable to reliably load the page you requested. Make sure you are testing the correct URL and that the server is properly responding to all requests. (Details: {errorDetails})',
/**
* @description Error message explaining that the security certificate of the page Lighthouse observed was invalid, so the URL cannot be accessed. securityMessages will be replaced with one or more strings from the browser explaining what was insecure about the page load.
* @example {net::ERR_CERT_DATE_INVALID} securityMessages
* */
pageLoadFailedInsecure: 'The URL you have provided does not have a valid security certificate. {securityMessages}',
/** Error message explaining that Chrome prevented the page from loading and displayed an interstitial screen instead, so the URL cannot be accessed. */
pageLoadFailedInterstitial: 'Chrome prevented page load with an interstitial. Make sure you are testing the correct URL and that the server is properly responding to all requests.',
/** Error message explaining that Chrome has encountered an error during the Lighthouse run, and that Chrome should be restarted. */
internalChromeError: 'An internal Chrome error occurred. Please restart Chrome and try re-running Lighthouse.',
/** Error message explaining that fetching the resources of the webpage has taken longer than the maximum time. */
requestContentTimeout: 'Fetching resource content has exceeded the allotted time',
/**
* @description Error message explaining that the webpage is non-HTML, so audits are ill-defined.
* @example {application/xml} mimeType
* */
notHtml: 'The page provided is not HTML (served as MIME type {mimeType}).',
/** Error message explaining that the provided URL Lighthouse points to is not valid, and cannot be loaded. */
urlInvalid: 'The URL you have provided appears to be invalid.',
/**
* @description Error message explaining that the Chrome Devtools protocol has exceeded the maximum timeout allowed.
* @example {Network.enable} protocolMethod
* */
protocolTimeout: 'Waiting for DevTools protocol response has exceeded the allotted time. (Method: {protocolMethod})',
/** Error message explaining that the requested page could not be resolved by the DNS server. */
dnsFailure: 'DNS servers could not resolve the provided domain.',
/** Error message explaining that Lighthouse couldn't complete because the page has stopped responding to its instructions. */
pageLoadFailedHung: 'Lighthouse was unable to reliably load the URL you requested because the page stopped responding.',
/** Error message explaining that Lighthouse timed out while waiting for the initial connection to the Chrome Devtools protocol. */
criTimeout: 'Timeout waiting for initial Debugger Protocol connection.',
/**
* @description Error message explaining that a resource that was required for testing was never collected. "artifactName" will be replaced with the name of the resource that wasn't collected.
* @example {WebAppManifest} artifactName
* */
missingRequiredArtifact: 'Required {artifactName} gatherer did not run.',
/**
* @description Error message explaining that there was an error while trying to collect a resource that was required for testing. "artifactName" will be replaced with the name of the resource that wasn't collected; "errorMessage" will be replaced with a string description of the error that occurred.
* @example {WebAppManifest} artifactName
* @example {Manifest invalid} errorMessage
* */
erroredRequiredArtifact: 'Required {artifactName} gatherer encountered an error: {errorMessage}',
/**
* @description Error message explaining that a feature is unavailable due to an old version of Chrome. "featureName" will be replaced by the name of the feature which is not supported.
* @example {Largest Contentful Paint} featureName
* */
oldChromeDoesNotSupportFeature: 'This version of Chrome is too old to support \'{featureName}\'. Use a newer version to see full results.',
};
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
/**
* @typedef LighthouseErrorDefinition
* @property {string} code
* @property {string} message
* @property {RegExp} [pattern]
* @property {boolean} [lhrRuntimeError] True if it should appear in the top-level LHR.runtimeError property.
*/
const LHERROR_SENTINEL = '__LighthouseErrorSentinel';
const ERROR_SENTINEL = '__ErrorSentinel';
/**
* @typedef {{sentinel: '__LighthouseErrorSentinel', code: string, stack?: string, cause?: unknown, properties?: {[p: string]: string|undefined}}} SerializedLighthouseError
* @typedef {{sentinel: '__ErrorSentinel', message: string, code?: string, stack?: string, cause?: unknown}} SerializedBaseError
*/
/**
* The {@link ErrorOptions} type wasn't added until es2022 (Node 16), so we recreate it here to support ts targets before es2022.
* TODO: Just use `ErrorOptions` if we can't support targets before es2022 in the docs test.
* @typedef {{cause: unknown}} LHErrorOptions
*/
class LighthouseError extends Error {
/**
* @param {LighthouseErrorDefinition} errorDefinition
* @param {Record<string, string|undefined>=} properties
* @param {LHErrorOptions=} options
*/
constructor(errorDefinition, properties, options) {
super(errorDefinition.code, options);
this.name = 'LighthouseError';
this.code = errorDefinition.code;
// Add additional properties to be ICU replacements in the error string.
// `code` is always added as `errorCode` so callers don't need to specify the code multiple times.
this.friendlyMessage = str_(errorDefinition.message, {errorCode: this.code, ...properties});
this.lhrRuntimeError = !!errorDefinition.lhrRuntimeError;
if (properties) Object.assign(this, properties);
Error.captureStackTrace(this, LighthouseError);
}
/**
* @param {string} method
* @param {{message: string, data?: string|undefined}} protocolError
* @return {Error|LighthouseError}
*/
static fromProtocolMessage(method, protocolError) {
// extract all errors with a regex pattern to match against.
// if we find one, use the friendly LighthouseError definition
const matchedErrorDefinition = Object.values(LighthouseError.errors)
.filter(e => e.pattern)
.find(e => e.pattern && e.pattern.test(protocolError.message));
if (matchedErrorDefinition) {
return new LighthouseError(matchedErrorDefinition);
}
// otherwise fallback to building a generic Error
let errMsg = `(${method}): ${protocolError.message}`;
if (protocolError.data) errMsg += ` (${protocolError.data})`;
const error = new Error(`Protocol error ${errMsg}`);
return Object.assign(error, {protocolMethod: method, protocolError: protocolError.message});
}
/**
* A JSON.stringify replacer to serialize LighthouseErrors and (as a fallback) Errors.
* Returns a simplified version of the error object that can be reconstituted
* as a copy of the original error at parse time.
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The_replacer_parameter
* @param {Error|LighthouseError} err
* @return {SerializedBaseError|SerializedLighthouseError}
*/
static stringifyReplacer(err) {
if (err instanceof LighthouseError) {
// Remove class props so that remaining values were what was passed in as `properties`.
// eslint-disable-next-line no-unused-vars
const {name, code, message, friendlyMessage, lhrRuntimeError, stack, cause, ...properties} = err;
return {
sentinel: LHERROR_SENTINEL,
code,
stack,
cause,
properties: /** @type {{ [p: string]: string | undefined }} */ (properties),
};
}
// Unexpected errors won't be LighthouseErrors, but we want them serialized as well.
if (err instanceof Error) {
const {message, stack, cause} = err;
// @ts-expect-error - code can be helpful for e.g. node errors, so preserve it if it's present.
const code = err.code;
return {
sentinel: ERROR_SENTINEL,
message,
code,
stack,
cause,
};
}
throw new Error('Invalid value for LighthouseError stringification');
}
/**
* A JSON.parse reviver. If any value passed in is a serialized Error or
* LighthouseError, the error is recreated as the original object. Otherwise, the
* value is passed through unchanged.
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Using_the_reviver_parameter
* @param {string} key
* @param {any} possibleError
* @return {any}
*/
static parseReviver(key, possibleError) {
if (typeof possibleError === 'object' && possibleError !== null) {
if (possibleError.sentinel === LHERROR_SENTINEL) {
// Include sentinel in destructuring so it doesn't end up in `properties`.
// eslint-disable-next-line no-unused-vars
const {code, stack, cause, properties} = /** @type {SerializedLighthouseError} */ (possibleError);
const errorDefinition = LighthouseError.errors[/** @type {keyof typeof ERRORS} */ (code)];
const lhError = new LighthouseError(errorDefinition, properties, {cause});
lhError.stack = stack;
return lhError;
}
if (possibleError.sentinel === ERROR_SENTINEL) {
const {message, code, stack, cause} = /** @type {SerializedBaseError} */ (possibleError);
const opts = cause ? {cause} : undefined;
const error = new Error(message, opts);
Object.assign(error, {code, stack});
return error;
}
}
return possibleError;
}
}
const ERRORS = {
// Screenshot/speedline errors
NO_SPEEDLINE_FRAMES: {
code: 'NO_SPEEDLINE_FRAMES',
message: UIStrings.didntCollectScreenshots,
lhrRuntimeError: true,
},
SPEEDINDEX_OF_ZERO: {
code: 'SPEEDINDEX_OF_ZERO',
message: UIStrings.didntCollectScreenshots,
lhrRuntimeError: true,
},
NO_SCREENSHOTS: {
code: 'NO_SCREENSHOTS',
message: UIStrings.didntCollectScreenshots,
lhrRuntimeError: true,
},
INVALID_SPEEDLINE: {
code: 'INVALID_SPEEDLINE',
message: UIStrings.didntCollectScreenshots,
lhrRuntimeError: true,
},
// Trace parsing errors
NO_TRACING_STARTED: {
code: 'NO_TRACING_STARTED',
message: UIStrings.badTraceRecording,
lhrRuntimeError: true,
},
NO_RESOURCE_REQUEST: {
code: 'NO_RESOURCE_REQUEST',
message: UIStrings.badTraceRecording,
lhrRuntimeError: true,
},
NO_NAVSTART: {
code: 'NO_NAVSTART',
message: UIStrings.badTraceRecording,
lhrRuntimeError: true,
},
NO_FCP: {
code: 'NO_FCP',
message: UIStrings.noFcp,
lhrRuntimeError: true,
},
NO_DCL: {
code: 'NO_DCL',
message: UIStrings.badTraceRecording,
lhrRuntimeError: true,
},
NO_FMP: {
code: 'NO_FMP',
message: UIStrings.badTraceRecording,
},
NO_LCP: {
code: 'NO_LCP',
message: UIStrings.noLcp,
},
NO_LCP_ALL_FRAMES: {
code: 'NO_LCP_ALL_FRAMES',
message: UIStrings.noLcp,
},
UNSUPPORTED_OLD_CHROME: {
code: 'UNSUPPORTED_OLD_CHROME',
message: UIStrings.oldChromeDoesNotSupportFeature,
},
// TTI calculation failures
NO_TTI_CPU_IDLE_PERIOD: {code: 'NO_TTI_CPU_IDLE_PERIOD', message: UIStrings.pageLoadTookTooLong},
NO_TTI_NETWORK_IDLE_PERIOD: {
code: 'NO_TTI_NETWORK_IDLE_PERIOD',
message: UIStrings.pageLoadTookTooLong,
},
// Page load failures
NO_DOCUMENT_REQUEST: {
code: 'NO_DOCUMENT_REQUEST',
message: UIStrings.pageLoadFailed,
lhrRuntimeError: true,
},
/* Used when DevTools reports loading failed. Usually an internal (Chrome) issue.
* Requries an additional `errorDetails` field for translation.
*/
FAILED_DOCUMENT_REQUEST: {
code: 'FAILED_DOCUMENT_REQUEST',
message: UIStrings.pageLoadFailedWithDetails,
lhrRuntimeError: true,
},
/* Used when status code is 4xx or 5xx.
* Requires an additional `statusCode` field for translation.
*/
ERRORED_DOCUMENT_REQUEST: {
code: 'ERRORED_DOCUMENT_REQUEST',
message: UIStrings.pageLoadFailedWithStatusCode,
lhrRuntimeError: true,
},
/* Used when security error prevents page load.
* Requires an additional `securityMessages` field for translation.
*/
INSECURE_DOCUMENT_REQUEST: {
code: 'INSECURE_DOCUMENT_REQUEST',
message: UIStrings.pageLoadFailedInsecure,
lhrRuntimeError: true,
},
/* Used when any Chrome interstitial error prevents page load.
*/
CHROME_INTERSTITIAL_ERROR: {
code: 'CHROME_INTERSTITIAL_ERROR',
message: UIStrings.pageLoadFailedInterstitial,
lhrRuntimeError: true,
},
/* Used when the page stopped responding and did not finish loading. */
PAGE_HUNG: {
code: 'PAGE_HUNG',
message: UIStrings.pageLoadFailedHung,
lhrRuntimeError: true,
},
/* Used when the page is non-HTML. */
NOT_HTML: {
code: 'NOT_HTML',
message: UIStrings.notHtml,
lhrRuntimeError: true,
},
// Protocol internal failures
TRACING_ALREADY_STARTED: {
code: 'TRACING_ALREADY_STARTED',
message: UIStrings.internalChromeError,
pattern: /Tracing.*started/,
lhrRuntimeError: true,
},
PARSING_PROBLEM: {
code: 'PARSING_PROBLEM',
message: UIStrings.internalChromeError,
pattern: /Parsing problem/,
lhrRuntimeError: true,
},
READ_FAILED: {
code: 'READ_FAILED',
message: UIStrings.internalChromeError,
pattern: /Read failed/,
lhrRuntimeError: true,
},
// URL parsing failures
INVALID_URL: {
code: 'INVALID_URL',
message: UIStrings.urlInvalid,
},
/* Protocol timeout failures
* Requires an additional `protocolMethod` field for translation.
*/
PROTOCOL_TIMEOUT: {
code: 'PROTOCOL_TIMEOUT',
message: UIStrings.protocolTimeout,
lhrRuntimeError: true,
},
// DNS failure on main document (no resolution, timed out, etc)
DNS_FAILURE: {
code: 'DNS_FAILURE',
message: UIStrings.dnsFailure,
lhrRuntimeError: true,
},
/** A timeout in the initial connection to the debugger protocol. */
CRI_TIMEOUT: {
code: 'CRI_TIMEOUT',
message: UIStrings.criTimeout,
lhrRuntimeError: true,
},
/**
* Error internal to Runner used when an artifact required for an audit is missing.
* Requires an additional `artifactName` field for translation.
*/
MISSING_REQUIRED_ARTIFACT: {
code: 'MISSING_REQUIRED_ARTIFACT',
message: UIStrings.missingRequiredArtifact,
},
/**
* Error internal to Runner used when an artifact required for an audit was an error.
* Requires additional `artifactName` and `errorMessage` fields for translation.
*/
ERRORED_REQUIRED_ARTIFACT: {
code: 'ERRORED_REQUIRED_ARTIFACT',
message: UIStrings.erroredRequiredArtifact,
},
// Hey! When adding a new error type, update lighthouse-result.proto too.
// Only necessary for runtime errors, which come from artifacts or pageLoadErrors.
};
/** @type {Record<keyof typeof ERRORS, LighthouseErrorDefinition>} */
LighthouseError.errors = ERRORS;
LighthouseError.NO_ERROR = 'NO_ERROR';
LighthouseError.UNKNOWN_ERROR = 'UNKNOWN_ERROR';
export {LighthouseError, UIStrings};

View File

@@ -0,0 +1,5 @@
export default LHTraceProcessor;
declare class LHTraceProcessor extends TraceProcessor {
}
import { TraceProcessor } from '../lib/tracehouse/trace-processor.js';
//# sourceMappingURL=lh-trace-processor.d.ts.map

45
node_modules/lighthouse/core/lib/lh-trace-processor.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
/**
* @license Copyright 2021 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {LighthouseError} from '../lib/lh-error.js';
import {TraceProcessor} from '../lib/tracehouse/trace-processor.js';
// TraceProcessor throws generic errors, but we'd like our special localized and code-specific LighthouseError
// objects to be thrown instead.
class LHTraceProcessor extends TraceProcessor {
/**
* @return {Error}
*/
static createNoNavstartError() {
return new LighthouseError(LighthouseError.errors.NO_NAVSTART);
}
/**
* This isn't currently used, but will be when the time origin of trace processing is changed.
* @see {TraceProcessor.computeTimeOrigin}
* @see https://github.com/GoogleChrome/lighthouse/pull/11253#discussion_r507985527
* @return {Error}
*/
static createNoResourceSendRequestError() {
return new LighthouseError(LighthouseError.errors.NO_RESOURCE_REQUEST);
}
/**
* @return {Error}
*/
static createNoTracingStartedError() {
return new LighthouseError(LighthouseError.errors.NO_TRACING_STARTED);
}
/**
* @return {Error}
*/
static createNoFirstContentfulPaintError() {
return new LighthouseError(LighthouseError.errors.NO_FCP);
}
}
export default LHTraceProcessor;

View File

@@ -0,0 +1,8 @@
export type ItemValueType = import('../../types/lhr/audit-details').default.ItemValueType;
/** @typedef {import('../../types/lhr/audit-details').default.ItemValueType} ItemValueType */
/**
* Upgrades an lhr object in-place to account for changes in the data structure over major versions.
* @param {LH.Result} lhr
*/
export function upgradeLhrForCompatibility(lhr: LH.Result): void;
//# sourceMappingURL=lighthouse-compatibility.d.ts.map

View File

@@ -0,0 +1,164 @@
/**
* @license Copyright 2023 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {Util} from '../../shared/util.js';
const SCREENSHOT_PREFIX = 'data:image/jpeg;base64,';
/** @typedef {import('../../types/lhr/audit-details').default.ItemValueType} ItemValueType */
/**
* Upgrades an lhr object in-place to account for changes in the data structure over major versions.
* @param {LH.Result} lhr
*/
function upgradeLhrForCompatibility(lhr) {
// If LHR is older (≤3.0.3), it has no locale setting. Set default.
if (!lhr.configSettings.locale) {
lhr.configSettings.locale = 'en';
}
if (!lhr.configSettings.formFactor) {
// @ts-expect-error fallback handling for emulatedFormFactor
lhr.configSettings.formFactor = lhr.configSettings.emulatedFormFactor;
}
lhr.finalDisplayedUrl = Util.getFinalDisplayedUrl(lhr);
lhr.mainDocumentUrl = Util.getMainDocumentUrl(lhr);
for (const audit of Object.values(lhr.audits)) {
// Turn 'not-applicable' (LHR <4.0) and 'not_applicable' (older proto versions)
// into 'notApplicable' (LHR ≥4.0).
// @ts-expect-error tsc rightly flags that these values shouldn't occur.
// eslint-disable-next-line max-len
if (audit.scoreDisplayMode === 'not_applicable' || audit.scoreDisplayMode === 'not-applicable') {
audit.scoreDisplayMode = 'notApplicable';
}
if (audit.details) {
// Turn `auditDetails.type` of undefined (LHR <4.2) and 'diagnostic' (LHR <5.0)
// into 'debugdata' (LHR ≥5.0).
// @ts-expect-error tsc rightly flags that these values shouldn't occur.
if (audit.details.type === undefined || audit.details.type === 'diagnostic') {
// @ts-expect-error details is of type never.
audit.details.type = 'debugdata';
}
// Add the jpg data URL prefix to filmstrip screenshots without them (LHR <5.0).
if (audit.details.type === 'filmstrip') {
for (const screenshot of audit.details.items) {
if (!screenshot.data.startsWith(SCREENSHOT_PREFIX)) {
screenshot.data = SCREENSHOT_PREFIX + screenshot.data;
}
}
}
// Circa 10.0, table items were refactored.
if (audit.details.type === 'table') {
for (const heading of audit.details.headings) {
/** @type {{itemType: ItemValueType|undefined, text: string|undefined}} */
// @ts-expect-error
const {itemType, text} = heading;
if (itemType !== undefined) {
heading.valueType = itemType;
// @ts-expect-error
delete heading.itemType;
}
if (text !== undefined) {
heading.label = text;
// @ts-expect-error
delete heading.text;
}
// @ts-expect-error
const subItemsItemType = heading.subItemsHeading?.itemType;
if (heading.subItemsHeading && subItemsItemType !== undefined) {
heading.subItemsHeading.valueType = subItemsItemType;
// @ts-expect-error
delete heading.subItemsHeading.itemType;
}
}
}
// In 10.0, third-party-summary deprecated entity: LinkValue and switched to entity name string
if (audit.id === 'third-party-summary') {
if (audit.details.type === 'opportunity' || audit.details.type === 'table') {
const {headings, items} = audit.details;
if (headings[0].valueType === 'link') {
// Apply upgrade only if we are dealing with an older version (valueType=link marker).
headings[0].valueType = 'text';
for (const item of items) {
if (typeof item.entity === 'object' && item.entity.type === 'link') {
item.entity = item.entity.text;
}
}
audit.details.isEntityGrouped = true;
}
}
}
// TODO: convert printf-style displayValue.
// Added: #5099, v3
// Removed: #6767, v4
}
}
// This backcompat converts old LHRs (<9.0.0) to use the new "hidden" group.
// Old LHRs used "no group" to identify audits that should be hidden in performance instead of the "hidden" group.
// Newer LHRs use "no group" to identify opportunities and diagnostics whose groups are assigned by details type.
const [majorVersion] = lhr.lighthouseVersion.split('.').map(Number);
const perfCategory = lhr.categories['performance'];
if (majorVersion < 9 && perfCategory) {
if (!lhr.categoryGroups) lhr.categoryGroups = {};
lhr.categoryGroups['hidden'] = {title: ''};
for (const auditRef of perfCategory.auditRefs) {
if (!auditRef.group) {
auditRef.group = 'hidden';
} else if (['load-opportunities', 'diagnostics'].includes(auditRef.group)) {
delete auditRef.group;
}
}
}
// Add some minimal stuff so older reports still work.
if (!lhr.environment) {
lhr.environment = {
benchmarkIndex: 0,
networkUserAgent: lhr.userAgent,
hostUserAgent: lhr.userAgent,
};
}
if (!lhr.configSettings.screenEmulation) {
lhr.configSettings.screenEmulation = {
width: -1,
height: -1,
deviceScaleFactor: -1,
mobile: /mobile/i.test(lhr.environment.hostUserAgent),
disabled: false,
};
}
if (!lhr.i18n) {
// @ts-expect-error
lhr.i18n = {};
}
// In 10.0, full-page-screenshot became a top-level property on the LHR.
if (lhr.audits['full-page-screenshot']) {
const details = /** @type {LH.Result.FullPageScreenshot=} */ (
lhr.audits['full-page-screenshot'].details);
if (details) {
lhr.fullPageScreenshot = {
screenshot: details.screenshot,
nodes: details.nodes,
};
} else {
lhr.fullPageScreenshot = null;
}
delete lhr.audits['full-page-screenshot'];
}
}
export {
upgradeLhrForCompatibility,
};

186
node_modules/lighthouse/core/lib/manifest-parser.d.ts generated vendored Normal file
View File

@@ -0,0 +1,186 @@
/**
* Parse a manifest from the given inputs.
* @param {string} string Manifest JSON string.
* @param {string} manifestUrl URL of manifest file.
* @param {string} documentUrl URL of document containing manifest link element.
*/
export function parseManifest(string: string, manifestUrl: string, documentUrl: string): {
raw: string;
value: undefined;
warning: string;
url: string;
} | {
raw: string;
value: {
name: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
short_name: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
start_url: {
raw: any;
value: string;
warning?: string | undefined;
};
display: {
raw: any;
value: string;
warning: string | undefined;
};
orientation: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
icons: {
raw: any;
/** @type {Array<ReturnType<typeof parseIcon>>} */
value: Array<ReturnType<typeof parseIcon>>;
warning: undefined;
} | {
raw: any;
/** @type {Array<ReturnType<typeof parseIcon>>} */
value: Array<ReturnType<typeof parseIcon>>;
warning: string;
} | {
raw: any[];
value: {
raw: any;
value: {
src: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
type: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
density: {
raw: any;
value: number;
/** @type {string|undefined} */
warning: string | undefined;
};
sizes: {
raw: any;
value: string[] | undefined;
warning: undefined;
} | {
value: undefined;
raw: any;
warning: string | undefined;
};
purpose: {
raw: any;
value: string[];
/** @type {string|undefined} */
warning: string | undefined;
};
};
warning: undefined;
}[];
warning: string | undefined;
};
related_applications: {
raw: any;
value: undefined;
warning: undefined;
} | {
raw: any;
value: undefined;
warning: string;
} | {
raw: any[];
value: {
raw: any;
value: {
platform: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
id: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
url: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
};
warning: undefined;
}[];
warning: undefined;
};
prefer_related_applications: {
raw: any;
value: boolean | undefined;
warning: string | undefined;
};
theme_color: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
background_color: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
};
warning: string | undefined;
url: string;
};
/**
* @see https://www.w3.org/TR/2016/WD-appmanifest-20160825/#src-member
* @param {*} raw
* @param {string} manifestUrl
*/
declare function parseIcon(raw: any, manifestUrl: string): {
raw: any;
value: {
src: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
type: {
raw: any;
value: string | undefined;
warning: string | undefined;
};
density: {
raw: any;
value: number;
/** @type {string|undefined} */
warning: string | undefined;
};
sizes: {
raw: any;
value: string[] | undefined;
warning: undefined;
} | {
value: undefined;
raw: any;
warning: string | undefined;
};
purpose: {
raw: any;
value: string[];
/** @type {string|undefined} */
warning: string | undefined;
};
};
warning: undefined;
};
export {};
//# sourceMappingURL=manifest-parser.d.ts.map

495
node_modules/lighthouse/core/lib/manifest-parser.js generated vendored Normal file
View File

@@ -0,0 +1,495 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
const ALLOWED_DISPLAY_VALUES = [
'fullscreen',
'standalone',
'minimal-ui',
'browser',
];
/**
* All display-mode fallbacks, including when unset, lead to default display mode 'browser'.
* @see https://www.w3.org/TR/2016/WD-appmanifest-20160825/#dfn-default-display-mode
*/
const DEFAULT_DISPLAY_MODE = 'browser';
const ALLOWED_ORIENTATION_VALUES = [
'any',
'natural',
'landscape',
'portrait',
'portrait-primary',
'portrait-secondary',
'landscape-primary',
'landscape-secondary',
];
/**
* @param {*} raw
* @param {boolean=} trim
*/
function parseString(raw, trim) {
let value;
let warning;
if (typeof raw === 'string') {
value = trim ? raw.trim() : raw;
} else {
if (raw !== undefined) {
warning = 'ERROR: expected a string.';
}
value = undefined;
}
return {
raw,
value,
warning,
};
}
/**
* @param {*} raw
*/
function parseColor(raw) {
const color = parseString(raw);
// Finished if color missing or not a string.
if (color.value === undefined) {
return color;
}
return color;
}
/**
* @param {*} jsonInput
*/
function parseName(jsonInput) {
return parseString(jsonInput.name, true);
}
/**
* @param {*} jsonInput
*/
function parseShortName(jsonInput) {
return parseString(jsonInput.short_name, true);
}
/**
* Returns whether the urls are of the same origin. See https://html.spec.whatwg.org/#same-origin
* @param {string} url1
* @param {string} url2
* @return {boolean}
*/
function checkSameOrigin(url1, url2) {
const parsed1 = new URL(url1);
const parsed2 = new URL(url2);
return parsed1.origin === parsed2.origin;
}
/**
* https://www.w3.org/TR/2016/WD-appmanifest-20160825/#start_url-member
* @param {*} jsonInput
* @param {string} manifestUrl
* @param {string} documentUrl
* @return {{raw: any, value: string, warning?: string}}
*/
function parseStartUrl(jsonInput, manifestUrl, documentUrl) {
const raw = jsonInput.start_url;
// 8.10(3) - discard the empty string and non-strings.
if (raw === '') {
return {
raw,
value: documentUrl,
warning: 'ERROR: start_url string empty',
};
}
if (raw === undefined) {
return {
raw,
value: documentUrl,
};
}
if (typeof raw !== 'string') {
return {
raw,
value: documentUrl,
warning: 'ERROR: expected a string.',
};
}
// 8.10(4) - construct URL with raw as input and manifestUrl as the base.
let startUrl;
try {
startUrl = new URL(raw, manifestUrl).href;
} catch (e) {
// 8.10(5) - discard invalid URLs.
return {
raw,
value: documentUrl,
warning: `ERROR: invalid start_url relative to ${manifestUrl}`,
};
}
// 8.10(6) - discard start_urls that are not same origin as documentUrl.
if (!checkSameOrigin(startUrl, documentUrl)) {
return {
raw,
value: documentUrl,
warning: 'ERROR: start_url must be same-origin as document',
};
}
return {
raw,
value: startUrl,
};
}
/**
* @param {*} jsonInput
*/
function parseDisplay(jsonInput) {
const parsedString = parseString(jsonInput.display, true);
const stringValue = parsedString.value;
if (!stringValue) {
return {
raw: jsonInput,
value: DEFAULT_DISPLAY_MODE,
warning: parsedString.warning,
};
}
const displayValue = stringValue.toLowerCase();
if (!ALLOWED_DISPLAY_VALUES.includes(displayValue)) {
return {
raw: jsonInput,
value: DEFAULT_DISPLAY_MODE,
warning: 'ERROR: \'display\' has invalid value ' + displayValue +
`. will fall back to ${DEFAULT_DISPLAY_MODE}.`,
};
}
return {
raw: jsonInput,
value: displayValue,
warning: undefined,
};
}
/**
* @param {*} jsonInput
*/
function parseOrientation(jsonInput) {
const orientation = parseString(jsonInput.orientation, true);
if (orientation.value &&
!ALLOWED_ORIENTATION_VALUES.includes(orientation.value.toLowerCase())) {
orientation.value = undefined;
orientation.warning = 'ERROR: \'orientation\' has an invalid value, will be ignored.';
}
return orientation;
}
/**
* @see https://www.w3.org/TR/2016/WD-appmanifest-20160825/#src-member
* @param {*} raw
* @param {string} manifestUrl
*/
function parseIcon(raw, manifestUrl) {
// 9.4(3)
const src = parseString(raw.src, true);
// 9.4(4) - discard if trimmed value is the empty string.
if (src.value === '') {
src.value = undefined;
}
if (src.value) {
try {
// 9.4(4) - construct URL with manifest URL as the base
src.value = new URL(src.value, manifestUrl).href;
} catch (_) {
// 9.4 "This algorithm will return a URL or undefined."
src.warning = `ERROR: invalid icon url will be ignored: '${raw.src}'`;
src.value = undefined;
}
}
const type = parseString(raw.type, true);
const parsedPurpose = parseString(raw.purpose);
const purpose = {
raw: raw.purpose,
value: ['any'],
/** @type {string|undefined} */
warning: undefined,
};
if (parsedPurpose.value !== undefined) {
purpose.value = parsedPurpose.value.split(/\s+/).map(value => value.toLowerCase());
}
const density = {
raw: raw.density,
value: 1,
/** @type {string|undefined} */
warning: undefined,
};
if (density.raw !== undefined) {
density.value = parseFloat(density.raw);
if (isNaN(density.value) || !isFinite(density.value) || density.value <= 0) {
density.value = 1;
density.warning = 'ERROR: icon density cannot be NaN, +∞, or less than or equal to +0.';
}
}
let sizes;
const parsedSizes = parseString(raw.sizes);
if (parsedSizes.value !== undefined) {
/** @type {Set<string>} */
const set = new Set();
parsedSizes.value.trim().split(/\s+/).forEach(size => set.add(size.toLowerCase()));
sizes = {
raw: raw.sizes,
value: set.size > 0 ? Array.from(set) : undefined,
warning: undefined,
};
} else {
sizes = {...parsedSizes, value: undefined};
}
return {
raw,
value: {
src,
type,
density,
sizes,
purpose,
},
warning: undefined,
};
}
/**
* @param {*} jsonInput
* @param {string} manifestUrl
*/
function parseIcons(jsonInput, manifestUrl) {
const raw = jsonInput.icons;
if (raw === undefined) {
return {
raw,
/** @type {Array<ReturnType<typeof parseIcon>>} */
value: [],
warning: undefined,
};
}
if (!Array.isArray(raw)) {
return {
raw,
/** @type {Array<ReturnType<typeof parseIcon>>} */
value: [],
warning: 'ERROR: \'icons\' expected to be an array but is not.',
};
}
const parsedIcons = raw
// 9.6(3)(1)
.filter(icon => icon.src !== undefined)
// 9.6(3)(2)(1)
.map(icon => parseIcon(icon, manifestUrl));
// NOTE: we still lose the specific message on these icons, but it's not possible to surface them
// without a massive change to the structure and paradigms of `manifest-parser`.
const ignoredIconsWithWarnings = parsedIcons
.filter(icon => {
const possibleWarnings = [icon.warning, icon.value.type.warning, icon.value.src.warning,
icon.value.sizes.warning, icon.value.density.warning].filter(Boolean);
const hasSrc = !!icon.value.src.value;
return !!possibleWarnings.length && !hasSrc;
});
const value = parsedIcons
// 9.6(3)(2)(2)
.filter(parsedIcon => parsedIcon.value.src.value !== undefined);
return {
raw,
value,
warning: ignoredIconsWithWarnings.length ?
'WARNING: Some icons were ignored due to warnings.' : undefined,
};
}
/**
* @param {*} raw
*/
function parseApplication(raw) {
const platform = parseString(raw.platform, true);
const id = parseString(raw.id, true);
// 10.2.(2) and 10.2.(3)
const appUrl = parseString(raw.url, true);
if (appUrl.value) {
try {
// 10.2.(4) - attempt to construct URL.
appUrl.value = new URL(appUrl.value).href;
} catch (e) {
appUrl.value = undefined;
appUrl.warning = `ERROR: invalid application URL ${raw.url}`;
}
}
return {
raw,
value: {
platform,
id,
url: appUrl,
},
warning: undefined,
};
}
/**
* @param {*} jsonInput
*/
function parseRelatedApplications(jsonInput) {
const raw = jsonInput.related_applications;
if (raw === undefined) {
return {
raw,
value: undefined,
warning: undefined,
};
}
if (!Array.isArray(raw)) {
return {
raw,
value: undefined,
warning: 'ERROR: \'related_applications\' expected to be an array but is not.',
};
}
// TODO(bckenny): spec says to skip apps missing `platform`, so debug messages
// on individual apps are lost. Warn instead?
const value = raw
.filter(application => !!application.platform)
.map(parseApplication)
.filter(parsedApp => !!parsedApp.value.id.value || !!parsedApp.value.url.value);
return {
raw,
value,
warning: undefined,
};
}
/**
* @param {*} jsonInput
*/
function parsePreferRelatedApplications(jsonInput) {
const raw = jsonInput.prefer_related_applications;
let value;
let warning;
if (typeof raw === 'boolean') {
value = raw;
} else {
if (raw !== undefined) {
warning = 'ERROR: \'prefer_related_applications\' expected to be a boolean.';
}
value = undefined;
}
return {
raw,
value,
warning,
};
}
/**
* @param {*} jsonInput
*/
function parseThemeColor(jsonInput) {
return parseColor(jsonInput.theme_color);
}
/**
* @param {*} jsonInput
*/
function parseBackgroundColor(jsonInput) {
return parseColor(jsonInput.background_color);
}
/**
* Parse a manifest from the given inputs.
* @param {string} string Manifest JSON string.
* @param {string} manifestUrl URL of manifest file.
* @param {string} documentUrl URL of document containing manifest link element.
*/
function parseManifest(string, manifestUrl, documentUrl) {
if (manifestUrl === undefined || documentUrl === undefined) {
throw new Error('Manifest and document URLs required for manifest parsing.');
}
let jsonInput;
try {
jsonInput = JSON.parse(string);
} catch (e) {
return {
raw: string,
value: undefined,
warning: 'ERROR: file isn\'t valid JSON: ' + e,
url: manifestUrl,
};
}
/* eslint-disable camelcase */
const manifest = {
name: parseName(jsonInput),
short_name: parseShortName(jsonInput),
start_url: parseStartUrl(jsonInput, manifestUrl, documentUrl),
display: parseDisplay(jsonInput),
orientation: parseOrientation(jsonInput),
icons: parseIcons(jsonInput, manifestUrl),
related_applications: parseRelatedApplications(jsonInput),
prefer_related_applications: parsePreferRelatedApplications(jsonInput),
theme_color: parseThemeColor(jsonInput),
background_color: parseBackgroundColor(jsonInput),
};
/* eslint-enable camelcase */
/** @type {string|undefined} */
let manifestUrlWarning;
try {
const manifestUrlParsed = new URL(manifestUrl);
if (!manifestUrlParsed.protocol.startsWith('http')) {
manifestUrlWarning = `WARNING: manifest URL not available over a valid network protocol`;
}
} catch (_) {
manifestUrlWarning = `ERROR: invalid manifest URL: '${manifestUrl}'`;
}
return {
raw: string,
value: manifest,
warning: manifestUrlWarning,
url: manifestUrl,
};
}
export {parseManifest};

17
node_modules/lighthouse/core/lib/median-run.d.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
/**
* We want the run that's closest to the median of the FCP and the median of the TTI.
* We're using the Euclidean distance for that (https://en.wikipedia.org/wiki/Euclidean_distance).
* We use FCP and TTI because they represent the earliest and latest moments in the page lifecycle.
* We avoid the median of single measures like the performance score because they can still exhibit
* outlier behavior at the beginning or end of load.
*
* @param {Array<LH.Result>} runs
* @return {LH.Result}
*/
export function computeMedianRun(runs: Array<LH.Result>): LH.Result;
/**
* @param {Array<LH.Result>} runs
* @return {Array<LH.Result>}
*/
export function filterToValidRuns(runs: Array<LH.Result>): Array<LH.Result>;
//# sourceMappingURL=median-run.d.ts.map

92
node_modules/lighthouse/core/lib/median-run.js generated vendored Normal file
View File

@@ -0,0 +1,92 @@
/**
* @license Copyright 2020 Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @param {LH.Result} lhr @param {string} auditName */
const getNumericValue = (lhr, auditName) => lhr.audits[auditName]?.numericValue || NaN;
/**
* @param {Array<number>} numbers
* @return {number}
*/
function getMedianValue(numbers) {
const sorted = numbers.slice().sort((a, b) => a - b);
if (sorted.length % 2 === 1) return sorted[(sorted.length - 1) / 2];
const lowerValue = sorted[sorted.length / 2 - 1];
const upperValue = sorted[sorted.length / 2];
return (lowerValue + upperValue) / 2;
}
/**
* @param {LH.Result} lhr
* @param {number} medianFcp
* @param {number} medianInteractive
*/
function getMedianSortValue(lhr, medianFcp, medianInteractive) {
const distanceFcp =
medianFcp - getNumericValue(lhr, 'first-contentful-paint');
const distanceInteractive =
medianInteractive - getNumericValue(lhr, 'interactive');
return distanceFcp * distanceFcp + distanceInteractive * distanceInteractive;
}
/**
* We want the run that's closest to the median of the FCP and the median of the TTI.
* We're using the Euclidean distance for that (https://en.wikipedia.org/wiki/Euclidean_distance).
* We use FCP and TTI because they represent the earliest and latest moments in the page lifecycle.
* We avoid the median of single measures like the performance score because they can still exhibit
* outlier behavior at the beginning or end of load.
*
* @param {Array<LH.Result>} runs
* @return {LH.Result}
*/
function computeMedianRun(runs) {
const missingFcp = runs.some(run =>
Number.isNaN(getNumericValue(run, 'first-contentful-paint'))
);
const missingTti = runs.some(run =>
Number.isNaN(getNumericValue(run, 'interactive'))
);
if (!runs.length) throw new Error('No runs provided');
if (missingFcp) throw new Error(`Some runs were missing an FCP value`);
if (missingTti) throw new Error(`Some runs were missing a TTI value`);
const medianFcp = getMedianValue(
runs.map(run => getNumericValue(run, 'first-contentful-paint'))
);
const medianInteractive = getMedianValue(
runs.map(run => getNumericValue(run, 'interactive'))
);
// Sort by proximity to the medians, breaking ties with the minimum TTI.
const sortedByProximityToMedian = runs
.slice()
.sort(
(a, b) =>
getMedianSortValue(a, medianFcp, medianInteractive) -
getMedianSortValue(b, medianFcp, medianInteractive) ||
getNumericValue(a, 'interactive') - getNumericValue(b, 'interactive')
);
return sortedByProximityToMedian[0];
}
/**
* @param {Array<LH.Result>} runs
* @return {Array<LH.Result>}
*/
function filterToValidRuns(runs) {
return runs
.filter(run =>
Number.isFinite(getNumericValue(run, 'first-contentful-paint'))
)
.filter(run => Number.isFinite(getNumericValue(run, 'interactive')));
}
export {computeMedianRun, filterToValidRuns};

View File

@@ -0,0 +1,9 @@
/**
* @param {string} content
*/
export function computeJSTokenLength(content: string): number;
/**
* @param {string} content
*/
export function computeCSSTokenLength(content: string): number;
//# sourceMappingURL=minification-estimator.d.ts.map

View File

@@ -0,0 +1,192 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
// https://www.ecma-international.org/ecma-262/9.0/index.html#sec-punctuators
// eslint-disable-next-line max-len
const PUNCTUATOR_REGEX = /(return|case|{|\(|\[|\.\.\.|;|,|<|>|<=|>=|==|!=|===|!==|\+|-|\*|%|\*\*|\+\+|--|<<|>>|>>>|&|\||\^|!|~|&&|\|\||\?|:|=|\+=|-=|\*=|%=|\*\*=|<<=|>>=|>>>=|&=|\|=|\^=|=>|\/|\/=|\})$/;
const WHITESPACE_REGEX = /( |\n|\t)+$/;
/**
* Look backwards from `startPosition` in `content` for an ECMAScript punctuator.
* This is used to differentiate a RegExp from a divide statement.
* If a punctuator immediately precedes a lone `/`, the `/` must be the start of a RegExp.
*
* @param {string} content
* @param {number} startPosition
*/
function hasPunctuatorBefore(content, startPosition) {
for (let i = startPosition; i > 0; i--) {
// Try to grab at least 6 characters so we can check for `return`
const sliceStart = Math.max(0, i - 6);
const precedingCharacters = content.slice(sliceStart, i);
// Skip over any ending whitespace
if (WHITESPACE_REGEX.test(precedingCharacters)) continue;
// Check if it's a punctuator
return PUNCTUATOR_REGEX.test(precedingCharacters);
}
// The beginning of the content counts too for our purposes.
// i.e. a script can't start with a divide symbol
return true;
}
/**
*
* @param {string} content
* @param {{singlelineComments: boolean, regex: boolean}} features
*/
function computeTokenLength(content, features) {
let totalTokenLength = 0;
let isInSinglelineComment = false;
let isInMultilineComment = false;
let isInLicenseComment = false;
let isInString = false;
let isInRegex = false;
let isInRegexCharacterClass = false;
let stringOpenChar = null;
/**
* Acts as stack for brace tracking.
* @type {('templateBrace'|'normalBrace')[]}
*/
const templateLiteralDepth = [];
for (let i = 0; i < content.length; i++) {
const twoChars = content.substr(i, 2);
const char = twoChars.charAt(0);
const isWhitespace = char === ' ' || char === '\n' || char === '\t';
const isAStringOpenChar = char === `'` || char === '"' || char === '`';
if (isInSinglelineComment) {
if (char === '\n') {
// End the comment when you hit a newline
isInSinglelineComment = false;
}
} else if (isInMultilineComment) {
// License comments count
if (isInLicenseComment) totalTokenLength++;
if (twoChars === '*/') {
// License comments count, account for the '/' character we're skipping over
if (isInLicenseComment) totalTokenLength++;
// End the comment when we hit the closing sequence
isInMultilineComment = false;
// Skip over the '/' character since we've already processed it
i++;
}
} else if (isInString) {
// String characters count
totalTokenLength++;
if (stringOpenChar === '`' && twoChars === '${') {
// Start new template literal
templateLiteralDepth.push('templateBrace');
isInString = false;
totalTokenLength++;
i++;
} else if (char === '\\') {
// Skip over any escaped characters
totalTokenLength++;
i++;
} else if (char === stringOpenChar) {
// End the string when we hit the same stringOpenCharacter
isInString = false;
// console.log(i, 'exiting string', stringOpenChar)
}
} else if (isInRegex) {
// Regex characters count
totalTokenLength++;
if (char === '\\') {
// Skip over any escaped characters
totalTokenLength++;
i++;
} else if (char === '[') {
// Register that we're entering a character class so we don't leave the regex prematurely
isInRegexCharacterClass = true;
} else if (char === ']' && isInRegexCharacterClass) {
// Register that we're exiting the character class
isInRegexCharacterClass = false;
} else if (char === '/' && !isInRegexCharacterClass) {
// End the string when we hit the regex close character
isInRegex = false;
// console.log(i, 'leaving regex', char)
}
} else {
// We're not in any particular token mode, look for the start of different
if (twoChars === '/*') {
// Start the multi-line comment
isInMultilineComment = true;
// Check if it's a license comment so we know whether to count it
isInLicenseComment = content.charAt(i + 2) === '!';
// += 2 because we are processing 2 characters, not just 1
if (isInLicenseComment) totalTokenLength += 2;
// Skip over the '*' character since we've already processed it
i++;
} else if (twoChars === '//' && features.singlelineComments) {
// Start the single-line comment
isInSinglelineComment = true;
isInMultilineComment = false;
isInLicenseComment = false;
// Skip over the second '/' character since we've already processed it
i++;
} else if (char === '/' && features.regex && hasPunctuatorBefore(content, i)) {
// Start the regex
isInRegex = true;
// Regex characters count
totalTokenLength++;
} else if (char === '{' && templateLiteralDepth.length) {
// Start normal code brace if inside a template literal
templateLiteralDepth.push('normalBrace');
totalTokenLength++;
} else if (char === '}' && templateLiteralDepth.length) {
// End one template literal if closing brace is for a template literal
if (templateLiteralDepth[templateLiteralDepth.length - 1] === 'templateBrace') {
isInString = true;
stringOpenChar = '`';
}
templateLiteralDepth.pop();
totalTokenLength++;
} else if (isAStringOpenChar) {
// Start the string
isInString = true;
// Save the open character for later so we know when to close it
stringOpenChar = char;
// String characters count
totalTokenLength++;
} else if (!isWhitespace) {
// All non-whitespace characters count
totalTokenLength++;
}
}
}
// If the content contained unbalanced comments, it's either invalid or we had a parsing error.
// Report the token length as the entire string so it will be ignored.
if (isInMultilineComment || isInString) {
return content.length;
}
return totalTokenLength;
}
/**
* @param {string} content
*/
function computeJSTokenLength(content) {
return computeTokenLength(content, {singlelineComments: true, regex: true});
}
/**
* @param {string} content
*/
function computeCSSTokenLength(content) {
return computeTokenLength(content, {singlelineComments: false, regex: false});
}
export {computeJSTokenLength, computeCSSTokenLength};

View File

@@ -0,0 +1,6 @@
/**
* @param {LH.DevtoolsLog} log
* @return {LH.DevtoolsLog}
*/
export function minifyDevtoolsLog(log: import("..").DevtoolsLog): import("..").DevtoolsLog;
//# sourceMappingURL=minify-devtoolslog.d.ts.map

86
node_modules/lighthouse/core/lib/minify-devtoolslog.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
/**
* @license Copyright 2019 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/* eslint-disable no-console */
/**
* @fileoverview Minifies a devtools log by removing noisy header values, eliminating data URIs, etc.
*/
const headersToKeep = new Set([
// Request headers
'accept',
'accept-encoding',
'accept-ranges',
// Response headers
'status',
'content-length',
'content-type',
'content-encoding',
'content-range',
'etag',
'cache-control',
'last-modified',
'link',
'x-robots-tag',
]);
/** @param {LH.Crdp.Network.Headers} [headers] */
function cleanHeaders(headers) {
if (!headers) return;
for (const key of Object.keys(headers)) {
if (!headersToKeep.has(key.toLowerCase())) delete headers[key];
}
}
/** @param {{url: string}} obj */
function cleanDataURI(obj) {
obj.url = obj.url.replace(/^(data:.*?base64,).*/, '$1FILLER');
}
/** @param {LH.Crdp.Network.Response} [response] */
function cleanResponse(response) {
if (!response) return;
cleanDataURI(response);
cleanHeaders(response.requestHeaders);
cleanHeaders(response.headers);
response.securityDetails = undefined;
response.headersText = undefined;
response.requestHeadersText = undefined;
/** @type {any} */
const timing = response.timing || {};
for (const [k, v] of Object.entries(timing)) {
if (v === -1) timing[k] = undefined;
}
}
/**
* @param {LH.DevtoolsLog} log
* @return {LH.DevtoolsLog}
*/
function minifyDevtoolsLog(log) {
return log.map(original => {
/** @type {LH.Protocol.RawEventMessage} */
const entry = JSON.parse(JSON.stringify(original));
switch (entry.method) {
case 'Network.requestWillBeSent':
cleanDataURI(entry.params.request);
cleanHeaders(entry.params.request.headers);
cleanResponse(entry.params.redirectResponse);
break;
case 'Network.responseReceived':
cleanResponse(entry.params.response);
break;
}
return entry;
});
}
export {minifyDevtoolsLog};

39
node_modules/lighthouse/core/lib/navigation-error.d.ts generated vendored Normal file
View File

@@ -0,0 +1,39 @@
/**
* Returns an error if the original network request failed or wasn't found.
* @param {LH.Artifacts.NetworkRequest|undefined} mainRecord
* @return {LH.LighthouseError|undefined}
*/
export function getNetworkError(mainRecord: LH.Artifacts.NetworkRequest | undefined): LH.LighthouseError | undefined;
/**
* Returns an error if we ended up on the `chrome-error` page and all other requests failed.
* @param {LH.Artifacts.NetworkRequest|undefined} mainRecord
* @param {Array<LH.Artifacts.NetworkRequest>} networkRecords
* @return {LH.LighthouseError|undefined}
*/
export function getInterstitialError(mainRecord: LH.Artifacts.NetworkRequest | undefined, networkRecords: Array<LH.Artifacts.NetworkRequest>): LH.LighthouseError | undefined;
/**
* Returns an error if the page load should be considered failed, e.g. from a
* main document request failure, a security issue, etc.
* @param {LH.LighthouseError|undefined} navigationError
* @param {{url: string, loadFailureMode: LH.Config.SharedPassNavigationJson['loadFailureMode'], networkRecords: Array<LH.Artifacts.NetworkRequest>, warnings: Array<string | LH.IcuMessage>}} context
* @return {LH.LighthouseError|undefined}
*/
export function getPageLoadError(navigationError: LH.LighthouseError | undefined, context: {
url: string;
loadFailureMode: LH.Config.SharedPassNavigationJson['loadFailureMode'];
networkRecords: Array<LH.Artifacts.NetworkRequest>;
warnings: Array<string | LH.IcuMessage>;
}): LH.LighthouseError | undefined;
/**
* Returns an error if we try to load a non-HTML page.
* Expects a network request with all redirects resolved, otherwise the MIME type may be incorrect.
* @param {LH.Artifacts.NetworkRequest|undefined} finalRecord
* @return {LH.LighthouseError|undefined}
*/
export function getNonHtmlError(finalRecord: LH.Artifacts.NetworkRequest | undefined): LH.LighthouseError | undefined;
export namespace UIStrings {
const warningXhtml: string;
}
import { NetworkRequest } from './network-request.js';
import { LighthouseError } from './lh-error.js';
//# sourceMappingURL=navigation-error.d.ts.map

175
node_modules/lighthouse/core/lib/navigation-error.js generated vendored Normal file
View File

@@ -0,0 +1,175 @@
/**
* @license Copyright 2021 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {LighthouseError} from './lh-error.js';
import {NetworkAnalyzer} from './dependency-graph/simulator/network-analyzer.js';
import {NetworkRequest} from './network-request.js';
import * as i18n from '../lib/i18n/i18n.js';
const UIStrings = {
/**
* Warning shown in report when the page under test is an XHTML document, which Lighthouse does not directly support
* so we display a warning.
*/
warningXhtml:
'The page MIME type is XHTML: Lighthouse does not explicitly support this document type',
};
const str_ = i18n.createIcuMessageFn(import.meta.url, UIStrings);
// MIME types are case-insensitive but Chrome normalizes MIME types to be lowercase.
const HTML_MIME_TYPE = 'text/html';
const XHTML_MIME_TYPE = 'application/xhtml+xml';
/**
* Returns an error if the original network request failed or wasn't found.
* @param {LH.Artifacts.NetworkRequest|undefined} mainRecord
* @return {LH.LighthouseError|undefined}
*/
function getNetworkError(mainRecord) {
if (!mainRecord) {
return new LighthouseError(LighthouseError.errors.NO_DOCUMENT_REQUEST);
} else if (mainRecord.failed) {
const netErr = mainRecord.localizedFailDescription;
// Match all resolution and DNS failures
// https://cs.chromium.org/chromium/src/net/base/net_error_list.h?rcl=cd62979b
if (
netErr === 'net::ERR_NAME_NOT_RESOLVED' ||
netErr === 'net::ERR_NAME_RESOLUTION_FAILED' ||
netErr.startsWith('net::ERR_DNS_')
) {
return new LighthouseError(LighthouseError.errors.DNS_FAILURE);
} else {
return new LighthouseError(
LighthouseError.errors.FAILED_DOCUMENT_REQUEST, {errorDetails: netErr});
}
} else if (mainRecord.hasErrorStatusCode()) {
return new LighthouseError(LighthouseError.errors.ERRORED_DOCUMENT_REQUEST, {
statusCode: `${mainRecord.statusCode}`,
});
}
}
/**
* Returns an error if we ended up on the `chrome-error` page and all other requests failed.
* @param {LH.Artifacts.NetworkRequest|undefined} mainRecord
* @param {Array<LH.Artifacts.NetworkRequest>} networkRecords
* @return {LH.LighthouseError|undefined}
*/
function getInterstitialError(mainRecord, networkRecords) {
// If we never requested a document, there's no interstitial error, let other cases handle it.
if (!mainRecord) return undefined;
const interstitialRequest = networkRecords.find(record =>
record.documentURL.startsWith('chrome-error://')
);
// If the page didn't end up on a chrome interstitial, there's no error here.
if (!interstitialRequest) return undefined;
// If the main document didn't fail, we didn't end up on an interstitial.
// FIXME: This doesn't handle client-side redirects.
// None of our error-handling deals with this case either because passContext.url doesn't handle non-network redirects.
if (!mainRecord.failed) return undefined;
// If a request failed with the `net::ERR_CERT_*` collection of errors, then it's a security issue.
if (mainRecord.localizedFailDescription.startsWith('net::ERR_CERT')) {
return new LighthouseError(LighthouseError.errors.INSECURE_DOCUMENT_REQUEST, {
securityMessages: mainRecord.localizedFailDescription,
});
}
// If we made it this far, it's a generic Chrome interstitial error.
return new LighthouseError(LighthouseError.errors.CHROME_INTERSTITIAL_ERROR);
}
/**
* Returns an error if we try to load a non-HTML page.
* Expects a network request with all redirects resolved, otherwise the MIME type may be incorrect.
* @param {LH.Artifacts.NetworkRequest|undefined} finalRecord
* @return {LH.LighthouseError|undefined}
*/
function getNonHtmlError(finalRecord) {
// If we never requested a document, there's no doctype error, let other cases handle it.
if (!finalRecord) return undefined;
// mimeType is determined by the browser, we assume Chrome is determining mimeType correctly,
// independently of 'Content-Type' response headers, and always sending mimeType if well-formed.
if (finalRecord.mimeType !== HTML_MIME_TYPE && finalRecord.mimeType !== XHTML_MIME_TYPE) {
return new LighthouseError(LighthouseError.errors.NOT_HTML, {
mimeType: finalRecord.mimeType,
});
}
return undefined;
}
/**
* Returns an error if the page load should be considered failed, e.g. from a
* main document request failure, a security issue, etc.
* @param {LH.LighthouseError|undefined} navigationError
* @param {{url: string, loadFailureMode: LH.Config.SharedPassNavigationJson['loadFailureMode'], networkRecords: Array<LH.Artifacts.NetworkRequest>, warnings: Array<string | LH.IcuMessage>}} context
* @return {LH.LighthouseError|undefined}
*/
function getPageLoadError(navigationError, context) {
const {url, loadFailureMode, networkRecords} = context;
/** @type {LH.Artifacts.NetworkRequest|undefined} */
let mainRecord = NetworkAnalyzer.findResourceForUrl(networkRecords, url);
// If the url doesn't give us a network request, it's possible we landed on a chrome-error:// page
// In this case, just get the first document request.
if (!mainRecord) {
const documentRequests = networkRecords.filter(record =>
record.resourceType === NetworkRequest.TYPES.Document
);
if (documentRequests.length) {
mainRecord = documentRequests.reduce((min, r) => {
return r.networkRequestTime < min.networkRequestTime ? r : min;
});
}
}
// MIME Type is only set on the final redirected document request. Use this for the HTML check instead of root.
let finalRecord;
if (mainRecord) {
finalRecord = NetworkAnalyzer.resolveRedirects(mainRecord);
}
if (finalRecord?.mimeType === XHTML_MIME_TYPE) {
context.warnings.push(str_(UIStrings.warningXhtml));
}
const networkError = getNetworkError(mainRecord);
const interstitialError = getInterstitialError(mainRecord, networkRecords);
const nonHtmlError = getNonHtmlError(finalRecord);
// Check to see if we need to ignore the page load failure.
// e.g. When the driver is offline, the load will fail without page offline support.
if (loadFailureMode === 'ignore') return;
// We want to special-case the interstitial beyond FAILED_DOCUMENT_REQUEST. See https://github.com/GoogleChrome/lighthouse/pull/8865#issuecomment-497507618
if (interstitialError) return interstitialError;
// Network errors are usually the most specific and provide the best reason for why the page failed to load.
// Prefer networkError over navigationError.
// Example: `DNS_FAILURE` is better than `NO_FCP`.
if (networkError) return networkError;
// Error if page is not HTML.
if (nonHtmlError) return nonHtmlError;
// Navigation errors are rather generic and express some failure of the page to render properly.
// Use `navigationError` as the last resort.
// Example: `NO_FCP`, the page never painted content for some unknown reason.
return navigationError;
}
export {
getNetworkError,
getInterstitialError,
getPageLoadError,
getNonHtmlError,
UIStrings,
};

121
node_modules/lighthouse/core/lib/network-recorder.d.ts generated vendored Normal file
View File

@@ -0,0 +1,121 @@
export type NetworkRecorderEventMap = {
requeststarted: [NetworkRequest];
requestfinished: [NetworkRequest];
};
export type RequestEmitter = LH.Protocol.StrictEventEmitterClass<NetworkRecorderEventMap>;
declare const NetworkRecorder_base: RequestEmitter;
export class NetworkRecorder extends NetworkRecorder_base {
/**
* @param {NetworkRequest} record The record to find the initiator of
* @param {Map<string, NetworkRequest[]>} recordsByURL
* @return {NetworkRequest|null}
* @private
*/
private static _chooseInitiatorRequest;
/**
* Construct network records from a log of devtools protocol messages.
* @param {LH.DevtoolsLog} devtoolsLog
* @return {Array<LH.Artifacts.NetworkRequest>}
*/
static recordsFromLogs(devtoolsLog: import("../index.js").DevtoolsLog): Array<LH.Artifacts.NetworkRequest>;
/** @type {NetworkRequest[]} */
_records: NetworkRequest[];
/** @type {Map<string, NetworkRequest>} */
_recordsById: Map<string, NetworkRequest>;
/**
* Returns the array of raw network request data without finalizing the initiator and
* redirect chain.
* @return {Array<NetworkRequest>}
*/
getRawRecords(): Array<NetworkRequest>;
/**
* Listener for the DevTools SDK NetworkManager's RequestStarted event, which includes both
* web socket and normal request creation.
* @param {NetworkRequest} request
* @private
*/
private onRequestStarted;
/**
* Listener for the DevTools SDK NetworkManager's RequestFinished event, which includes
* request finish, failure, and redirect, as well as the closing of web sockets.
* @param {NetworkRequest} request
* @private
*/
private onRequestFinished;
/**
* @param {{params: LH.Crdp.Network.RequestWillBeSentEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onRequestWillBeSent(event: {
params: LH.Crdp.Network.RequestWillBeSentEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.RequestServedFromCacheEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onRequestServedFromCache(event: {
params: LH.Crdp.Network.RequestServedFromCacheEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.ResponseReceivedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onResponseReceived(event: {
params: LH.Crdp.Network.ResponseReceivedEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.DataReceivedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onDataReceived(event: {
params: LH.Crdp.Network.DataReceivedEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.LoadingFinishedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onLoadingFinished(event: {
params: LH.Crdp.Network.LoadingFinishedEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.LoadingFailedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onLoadingFailed(event: {
params: LH.Crdp.Network.LoadingFailedEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* @param {{params: LH.Crdp.Network.ResourceChangedPriorityEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onResourceChangedPriority(event: {
params: LH.Crdp.Network.ResourceChangedPriorityEvent;
targetType: LH.Protocol.TargetType;
sessionId?: string;
}): void;
/**
* Routes network events to their handlers, so we can construct networkRecords
* @param {LH.Protocol.RawEventMessage} event
*/
dispatch(event: LH.Protocol.RawEventMessage): void;
/**
* Redirected requests all have identical requestIds over the protocol. Once a request has been
* redirected all future messages referrencing that requestId are about the new destination, not
* the original. This method is a helper for finding the real request object to which the current
* message is referring.
*
* @param {string} requestId
* @param {LH.Protocol.TargetType} targetType
* @param {string|undefined} sessionId
* @return {NetworkRequest|undefined}
*/
_findRealRequestAndSetSession(requestId: string, targetType: LH.Protocol.TargetType, sessionId: string | undefined): NetworkRequest | undefined;
}
import { NetworkRequest } from './network-request.js';
export {};
//# sourceMappingURL=network-recorder.d.ts.map

338
node_modules/lighthouse/core/lib/network-recorder.js generated vendored Normal file
View File

@@ -0,0 +1,338 @@
/**
* @license Copyright 2016 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {EventEmitter} from 'events';
import log from 'lighthouse-logger';
import {NetworkRequest} from './network-request.js';
import {PageDependencyGraph} from '../computed/page-dependency-graph.js';
/**
* @typedef {{
* requeststarted: [NetworkRequest],
* requestfinished: [NetworkRequest],
* }} NetworkRecorderEventMap
*/
/** @typedef {LH.Protocol.StrictEventEmitterClass<NetworkRecorderEventMap>} RequestEmitter */
const RequestEventEmitter = /** @type {RequestEmitter} */ (EventEmitter);
class NetworkRecorder extends RequestEventEmitter {
/**
* Creates an instance of NetworkRecorder.
*/
constructor() {
super();
/** @type {NetworkRequest[]} */
this._records = [];
/** @type {Map<string, NetworkRequest>} */
this._recordsById = new Map();
}
/**
* Returns the array of raw network request data without finalizing the initiator and
* redirect chain.
* @return {Array<NetworkRequest>}
*/
getRawRecords() {
return Array.from(this._records);
}
/**
* Listener for the DevTools SDK NetworkManager's RequestStarted event, which includes both
* web socket and normal request creation.
* @param {NetworkRequest} request
* @private
*/
onRequestStarted(request) {
this._records.push(request);
this._recordsById.set(request.requestId, request);
this.emit('requeststarted', request);
}
/**
* Listener for the DevTools SDK NetworkManager's RequestFinished event, which includes
* request finish, failure, and redirect, as well as the closing of web sockets.
* @param {NetworkRequest} request
* @private
*/
onRequestFinished(request) {
this.emit('requestfinished', request);
}
// The below methods proxy network data into the NetworkRequest object which mimics the
// DevTools SDK network layer.
/**
* @param {{params: LH.Crdp.Network.RequestWillBeSentEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onRequestWillBeSent(event) {
const data = event.params;
const originalRequest = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
// This is a simple new request, create the NetworkRequest object and finish.
if (!originalRequest) {
const request = new NetworkRequest();
request.onRequestWillBeSent(data);
request.sessionId = event.sessionId;
request.sessionTargetType = event.targetType;
this.onRequestStarted(request);
log.verbose('network', `request will be sent to ${request.url}`);
return;
}
// TODO: beacon to Sentry, https://github.com/GoogleChrome/lighthouse/issues/7041
if (!data.redirectResponse) {
return;
}
// On redirect, another requestWillBeSent message is fired for the same requestId.
// Update/finish the previous network request and create a new one for the redirect.
const modifiedData = {
...data,
// Copy over the initiator as well to match DevTools behavior
initiator: originalRequest.initiator,
requestId: `${originalRequest.requestId}:redirect`,
};
const redirectedRequest = new NetworkRequest();
redirectedRequest.onRequestWillBeSent(modifiedData);
originalRequest.onRedirectResponse(data);
log.verbose('network', `${originalRequest.url} redirected to ${redirectedRequest.url}`);
originalRequest.redirectDestination = redirectedRequest;
redirectedRequest.redirectSource = originalRequest;
// Start the redirect request before finishing the original so we don't get erroneous quiet periods
this.onRequestStarted(redirectedRequest);
this.onRequestFinished(originalRequest);
}
/**
* @param {{params: LH.Crdp.Network.RequestServedFromCacheEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onRequestServedFromCache(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} served from cache`);
request.onRequestServedFromCache();
}
/**
* @param {{params: LH.Crdp.Network.ResponseReceivedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onResponseReceived(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} response received`);
request.onResponseReceived(data);
}
/**
* @param {{params: LH.Crdp.Network.DataReceivedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onDataReceived(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} data received`);
request.onDataReceived(data);
}
/**
* @param {{params: LH.Crdp.Network.LoadingFinishedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onLoadingFinished(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} loading finished`);
request.onLoadingFinished(data);
this.onRequestFinished(request);
}
/**
* @param {{params: LH.Crdp.Network.LoadingFailedEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onLoadingFailed(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
log.verbose('network', `${request.url} loading failed`);
request.onLoadingFailed(data);
this.onRequestFinished(request);
}
/**
* @param {{params: LH.Crdp.Network.ResourceChangedPriorityEvent, targetType: LH.Protocol.TargetType, sessionId?: string}} event
*/
onResourceChangedPriority(event) {
const data = event.params;
const request = this._findRealRequestAndSetSession(
data.requestId, event.targetType, event.sessionId);
if (!request) return;
request.onResourceChangedPriority(data);
}
/**
* Routes network events to their handlers, so we can construct networkRecords
* @param {LH.Protocol.RawEventMessage} event
*/
dispatch(event) {
switch (event.method) {
case 'Network.requestWillBeSent': return this.onRequestWillBeSent(event);
case 'Network.requestServedFromCache': return this.onRequestServedFromCache(event);
case 'Network.responseReceived': return this.onResponseReceived(event);
case 'Network.dataReceived': return this.onDataReceived(event);
case 'Network.loadingFinished': return this.onLoadingFinished(event);
case 'Network.loadingFailed': return this.onLoadingFailed(event);
case 'Network.resourceChangedPriority': return this.onResourceChangedPriority(event);
default: return;
}
}
/**
* Redirected requests all have identical requestIds over the protocol. Once a request has been
* redirected all future messages referrencing that requestId are about the new destination, not
* the original. This method is a helper for finding the real request object to which the current
* message is referring.
*
* @param {string} requestId
* @param {LH.Protocol.TargetType} targetType
* @param {string|undefined} sessionId
* @return {NetworkRequest|undefined}
*/
_findRealRequestAndSetSession(requestId, targetType, sessionId) {
let request = this._recordsById.get(requestId);
if (!request || !request.isValid) return undefined;
while (request.redirectDestination) {
request = request.redirectDestination;
}
request.setSession(sessionId);
request.sessionTargetType = targetType;
return request;
}
/**
* @param {NetworkRequest} record The record to find the initiator of
* @param {Map<string, NetworkRequest[]>} recordsByURL
* @return {NetworkRequest|null}
* @private
*/
static _chooseInitiatorRequest(record, recordsByURL) {
if (record.redirectSource) {
return record.redirectSource;
}
const initiatorURL = PageDependencyGraph.getNetworkInitiators(record)[0];
let candidates = recordsByURL.get(initiatorURL) || [];
// The (valid) initiator must come before the initiated request.
candidates = candidates.filter(c => {
return c.responseHeadersEndTime <= record.networkRequestTime &&
c.finished && !c.failed;
});
if (candidates.length > 1) {
// Disambiguate based on prefetch. Prefetch requests have type 'Other' and cannot
// initiate requests, so we drop them here.
const nonPrefetchCandidates = candidates.filter(
cand => cand.resourceType !== NetworkRequest.TYPES.Other);
if (nonPrefetchCandidates.length) {
candidates = nonPrefetchCandidates;
}
}
if (candidates.length > 1) {
// Disambiguate based on frame. It's likely that the initiator comes from the same frame.
const sameFrameCandidates = candidates.filter(cand => cand.frameId === record.frameId);
if (sameFrameCandidates.length) {
candidates = sameFrameCandidates;
}
}
if (candidates.length > 1 && record.initiator.type === 'parser') {
// Filter to just Documents when initiator type is parser.
const documentCandidates = candidates.filter(cand =>
cand.resourceType === NetworkRequest.TYPES.Document);
if (documentCandidates.length) {
candidates = documentCandidates;
}
}
if (candidates.length > 1) {
// If all real loads came from successful preloads (url preloaded and
// loads came from the cache), filter to link rel=preload request(s).
const linkPreloadCandidates = candidates.filter(c => c.isLinkPreload);
if (linkPreloadCandidates.length) {
const nonPreloadCandidates = candidates.filter(c => !c.isLinkPreload);
const allPreloaded = nonPreloadCandidates.every(c => c.fromDiskCache || c.fromMemoryCache);
if (nonPreloadCandidates.length && allPreloaded) {
candidates = linkPreloadCandidates;
}
}
}
// Only return an initiator if the result is unambiguous.
return candidates.length === 1 ? candidates[0] : null;
}
/**
* Construct network records from a log of devtools protocol messages.
* @param {LH.DevtoolsLog} devtoolsLog
* @return {Array<LH.Artifacts.NetworkRequest>}
*/
static recordsFromLogs(devtoolsLog) {
const networkRecorder = new NetworkRecorder();
// playback all the devtools messages to recreate network records
devtoolsLog.forEach(message => networkRecorder.dispatch(message));
// get out the list of records & filter out invalid records
const records = networkRecorder.getRawRecords().filter(record => record.isValid);
/** @type {Map<string, NetworkRequest[]>} */
const recordsByURL = new Map();
for (const record of records) {
const records = recordsByURL.get(record.url) || [];
records.push(record);
recordsByURL.set(record.url, records);
}
// set the initiatorRequest and redirects array
for (const record of records) {
const initiatorRequest = NetworkRecorder._chooseInitiatorRequest(record, recordsByURL);
if (initiatorRequest) {
record.setInitiatorRequest(initiatorRequest);
}
let finalRecord = record;
while (finalRecord.redirectDestination) finalRecord = finalRecord.redirectDestination;
if (finalRecord === record || finalRecord.redirects) continue;
const redirects = [];
for (
let redirect = finalRecord.redirectSource;
redirect;
redirect = redirect.redirectSource
) {
redirects.unshift(redirect);
}
finalRecord.redirects = redirects;
}
return records;
}
}
export {NetworkRecorder};

253
node_modules/lighthouse/core/lib/network-request.d.ts generated vendored Normal file
View File

@@ -0,0 +1,253 @@
export type HeaderEntry = {
name: string;
value: string;
};
export type ParsedURL = {
/**
* Equivalent to a `new URL(url).protocol` BUT w/o the trailing colon (:)
*/
scheme: string;
/**
* Equivalent to a `new URL(url).hostname`
*/
host: string;
securityOrigin: string;
};
export type LightriderStatistics = {
/**
* The difference in networkEndTime between the observed Lighthouse networkEndTime and Lightrider's derived networkEndTime.
*/
endTimeDeltaMs: number;
/**
* The time spent making a TCP connection (connect + SSL). Note: this is poorly named.
*/
TCPMs: number;
/**
* The time spent requesting a resource from a remote server, we use this to approx RTT. Note: this is poorly names, it really should be "server response time".
*/
requestMs: number;
/**
* Time to receive the entire response payload starting the clock on receiving the first fragment (first non-header byte).
*/
responseMs: number;
};
export class NetworkRequest {
/**
* Convert the requestId to backend-version by removing the `:redirect` portion
*
* @param {string} requestId
* @return {string}
*/
static getRequestIdForBackend(requestId: string): string;
/**
* Based on DevTools NetworkManager.
* @see https://github.com/ChromeDevTools/devtools-frontend/blob/3415ee28e86a3f4bcc2e15b652d22069938df3a6/front_end/sdk/NetworkManager.js#L285-L297
* @param {LH.Crdp.Network.Headers} headersDict
* @return {Array<HeaderEntry>}
*/
static _headersDictToHeadersArray(headersDict: LH.Crdp.Network.Headers): Array<HeaderEntry>;
static get TYPES(): LH.Util.SelfMap<LH.Crdp.Network.ResourceType>;
/**
* @param {NetworkRequest} record
* @return {boolean}
*/
static isNonNetworkRequest(record: NetworkRequest): boolean;
/**
* Technically there's not alignment on URLs that create "secure connections" vs "secure contexts"
* https://github.com/GoogleChrome/lighthouse/pull/11766#discussion_r582340683
* But for our purposes, we don't need to worry too much.
* @param {NetworkRequest} record
* @return {boolean}
*/
static isSecureRequest(record: NetworkRequest): boolean;
/**
* Returns whether the network request was an HSTS redirect request.
* @param {NetworkRequest} record
* @return {boolean}
*/
static isHstsRequest(record: NetworkRequest): boolean;
/**
* Resource size is almost always the right one to be using because of the below:
* `transferSize = resourceSize + headers.length`.
* HOWEVER, there are some cases where an image is compressed again over the network and transfer size
* is smaller (see https://github.com/GoogleChrome/lighthouse/pull/4968).
* Use the min of the two numbers to be safe.
* `tranferSize` of cached records is 0
* @param {NetworkRequest} networkRecord
* @return {number}
*/
static getResourceSizeOnNetwork(networkRecord: NetworkRequest): number;
requestId: string;
connectionId: string;
connectionReused: boolean;
url: string;
protocol: string;
isSecure: boolean;
isValid: boolean;
parsedURL: ParsedURL;
documentURL: string;
/** When the renderer process initially discovers a network request, in milliseconds. */
rendererStartTime: number;
/**
* When the network service is about to handle a request, ie. just before going to the
* HTTP cache or going to the network for DNS/connection setup, in milliseconds.
*/
networkRequestTime: number;
/** When the last byte of the response headers is received, in milliseconds. */
responseHeadersEndTime: number;
/** When the last byte of the response body is received, in milliseconds. */
networkEndTime: number;
transferSize: number;
resourceSize: number;
fromDiskCache: boolean;
fromMemoryCache: boolean;
fromPrefetchCache: boolean;
/** @type {LightriderStatistics|undefined} Extra timing information available only when run in Lightrider. */
lrStatistics: LightriderStatistics | undefined;
finished: boolean;
requestMethod: string;
statusCode: number;
/** @type {NetworkRequest|undefined} The network request that redirected to this one */
redirectSource: NetworkRequest | undefined;
/** @type {NetworkRequest|undefined} The network request that this one redirected to */
redirectDestination: NetworkRequest | undefined;
/** @type {NetworkRequest[]|undefined} The chain of network requests that redirected to this one */
redirects: NetworkRequest[] | undefined;
failed: boolean;
localizedFailDescription: string;
/** @type {LH.Crdp.Network.Initiator} */
initiator: LH.Crdp.Network.Initiator;
/** @type {LH.Crdp.Network.ResourceTiming|undefined} */
timing: LH.Crdp.Network.ResourceTiming | undefined;
/** @type {LH.Crdp.Network.ResourceType|undefined} */
resourceType: LH.Crdp.Network.ResourceType | undefined;
mimeType: string;
/** @type {LH.Crdp.Network.ResourcePriority} */
priority: LH.Crdp.Network.ResourcePriority;
/** @type {NetworkRequest|undefined} */
initiatorRequest: NetworkRequest | undefined;
/** @type {HeaderEntry[]} */
responseHeaders: HeaderEntry[];
/** @type {string} */
responseHeadersText: string;
fetchedViaServiceWorker: boolean;
/** @type {string|undefined} */
frameId: string | undefined;
/** @type {string|undefined} */
sessionId: string | undefined;
/** @type {LH.Protocol.TargetType|undefined} */
sessionTargetType: LH.Protocol.TargetType | undefined;
isLinkPreload: boolean;
/**
* @return {boolean}
*/
hasErrorStatusCode(): boolean;
/**
* @param {NetworkRequest} initiatorRequest
*/
setInitiatorRequest(initiatorRequest: NetworkRequest): void;
/**
* @param {LH.Crdp.Network.RequestWillBeSentEvent} data
*/
onRequestWillBeSent(data: LH.Crdp.Network.RequestWillBeSentEvent): void;
onRequestServedFromCache(): void;
/**
* @param {LH.Crdp.Network.ResponseReceivedEvent} data
*/
onResponseReceived(data: LH.Crdp.Network.ResponseReceivedEvent): void;
/**
* @param {LH.Crdp.Network.DataReceivedEvent} data
*/
onDataReceived(data: LH.Crdp.Network.DataReceivedEvent): void;
/**
* @param {LH.Crdp.Network.LoadingFinishedEvent} data
*/
onLoadingFinished(data: LH.Crdp.Network.LoadingFinishedEvent): void;
/**
* @param {LH.Crdp.Network.LoadingFailedEvent} data
*/
onLoadingFailed(data: LH.Crdp.Network.LoadingFailedEvent): void;
/**
* @param {LH.Crdp.Network.ResourceChangedPriorityEvent} data
*/
onResourceChangedPriority(data: LH.Crdp.Network.ResourceChangedPriorityEvent): void;
/**
* @param {LH.Crdp.Network.RequestWillBeSentEvent} data
*/
onRedirectResponse(data: LH.Crdp.Network.RequestWillBeSentEvent): void;
/**
* @param {string|undefined} sessionId
*/
setSession(sessionId: string | undefined): void;
get isOutOfProcessIframe(): boolean;
/**
* @param {LH.Crdp.Network.Response} response
* @param {number} timestamp in seconds
* @param {LH.Crdp.Network.ResponseReceivedEvent['type']=} resourceType
*/
_onResponse(response: LH.Crdp.Network.Response, timestamp: number, resourceType?: LH.Crdp.Network.ResponseReceivedEvent['type'] | undefined): void;
/**
* Resolve differences between conflicting timing signals. Based on the property setters in DevTools.
* @see https://github.com/ChromeDevTools/devtools-frontend/blob/56a99365197b85c24b732ac92b0ac70feed80179/front_end/sdk/NetworkRequest.js#L485-L502
* @param {LH.Crdp.Network.ResourceTiming} timing
*/
_recomputeTimesWithResourceTiming(timing: LH.Crdp.Network.ResourceTiming): void;
/**
* Update responseHeadersEndTime to the networkEndTime if networkEndTime is earlier.
* A response can't be received after the entire request finished.
*/
_updateResponseHeadersEndTimeIfNecessary(): void;
/**
* LR loses transfer size information, but passes it in the 'X-TotalFetchedSize' header.
* 'X-TotalFetchedSize' is the canonical transfer size in LR. Nothing should supersede it.
*
* The total length of the encoded data is spread out among multiple events. The sum of the
* values in onResponseReceived and all the onDataReceived events typically equals the value
* seen on the onLoadingFinished event. In <1% of cases we see the values differ. As we process
* onResponseReceived and onDataReceived we accumulate the total encodedDataLength. When we
* process onLoadingFinished, we override the accumulated total. We do this so that if the
* request is aborted or fails, we still get a value via the accumulation.
*
* In Lightrider, due to instrumentation limitations, our values for encodedDataLength are bogus
* and not valid. However the resource's true encodedDataLength/transferSize is shared via a
* special response header, X-TotalFetchedSize. In this situation, we read this value from
* responseReceived, use it for the transferSize and ignore the encodedDataLength values in
* both dataReceived and loadingFinished.
*/
_updateTransferSizeForLightrider(): void;
/**
* LR loses protocol information.
*/
_updateProtocolForLightrider(): void;
/**
* TODO(compat): remove M116.
* `timing.receiveHeadersStart` was added recently, and will be in M116. Until then,
* set it to receiveHeadersEnd, which is close enough, to allow consumers of NetworkRequest
* to use the new field without accounting for this backcompat.
*/
_backfillReceiveHeaderStartTiming(): void;
/**
* LR gets additional, accurate timing information from its underlying fetch infrastructure. This
* is passed in via X-Headers similar to 'X-TotalFetchedSize'.
*/
_updateTimingsForLightrider(): void;
}
export namespace NetworkRequest {
export { HEADER_TCP };
export { HEADER_SSL };
export { HEADER_REQ };
export { HEADER_RES };
export { HEADER_TOTAL };
export { HEADER_FETCHED_SIZE };
export { HEADER_PROTOCOL_IS_H2 };
}
import * as LH from '../../types/lh.js';
declare const HEADER_TCP: "X-TCPMs";
declare const HEADER_SSL: "X-SSLMs";
declare const HEADER_REQ: "X-RequestMs";
declare const HEADER_RES: "X-ResponseMs";
declare const HEADER_TOTAL: "X-TotalMs";
declare const HEADER_FETCHED_SIZE: "X-TotalFetchedSize";
declare const HEADER_PROTOCOL_IS_H2: "X-ProtocolIsH2";
export {};
//# sourceMappingURL=network-request.d.ts.map

617
node_modules/lighthouse/core/lib/network-request.js generated vendored Normal file
View File

@@ -0,0 +1,617 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @fileoverview Fills most of the role of NetworkManager and NetworkRequest classes from DevTools.
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/sdk/NetworkRequest.js
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/sdk/NetworkManager.js
A detailed overview of the Chromium networking layer can be found here:
https://raw.githubusercontent.com/GoogleChrome/lighthouse/main/docs/Network-Timings.svg
Below is a simplified model.
DevTools box-whisker
|-------[xxxxxXXXXXX]-|
(1) (2) (3) (4)
(1) leading whisker
Covers various stages:
- Queuing (delta between renderer knowing about request and network manager knowing about it)
- DNS
- Connection setup cost (TCP, TLS, SSL, etc.)
CDP: left whisker edge is Network.requestWillBeSent timestamp
(2) light shaded region
browser network manager has initiated the request, hasn't recieved any bytes back yet
Note: even with early-hint response, only the "real" response is considered here
CDP: Network.requestWillBeSentExtraInfo timing.requestTime + timing.sendStart
(3) dark shaded region
browser network manager has recieved the very first header byte
CDP: Network.requestWillBeSentExtraInfo timing.requestTime + timing.recievedHeadersEnd
CDP: (right edge of box) Network.finished/Network.failed timestamp
Trace: ResourceFinish.finishedTime
(4) trailing whisker
Marks time when render process main thread is available to use the resource. Could be long
if main thread is busy. Currently don't use this anywhere.
Trace: ResourceFinish.ts
*/
import * as LH from '../../types/lh.js';
import UrlUtils from './url-utils.js';
// Lightrider X-Header names for timing information.
// See: _updateTransferSizeForLightrider and _updateTimingsForLightrider.
const HEADER_TCP = 'X-TCPMs'; // Note: this should have been called something like ConnectMs, as it includes SSL.
const HEADER_SSL = 'X-SSLMs';
const HEADER_REQ = 'X-RequestMs';
const HEADER_RES = 'X-ResponseMs';
const HEADER_TOTAL = 'X-TotalMs';
const HEADER_FETCHED_SIZE = 'X-TotalFetchedSize';
const HEADER_PROTOCOL_IS_H2 = 'X-ProtocolIsH2';
/**
* @typedef HeaderEntry
* @property {string} name
* @property {string} value
*/
/**
* @typedef ParsedURL
* @property {string} scheme Equivalent to a `new URL(url).protocol` BUT w/o the trailing colon (:)
* @property {string} host Equivalent to a `new URL(url).hostname`
* @property {string} securityOrigin
*/
/**
* @typedef LightriderStatistics
* @property {number} endTimeDeltaMs The difference in networkEndTime between the observed Lighthouse networkEndTime and Lightrider's derived networkEndTime.
* @property {number} TCPMs The time spent making a TCP connection (connect + SSL). Note: this is poorly named.
* @property {number} requestMs The time spent requesting a resource from a remote server, we use this to approx RTT. Note: this is poorly names, it really should be "server response time".
* @property {number} responseMs Time to receive the entire response payload starting the clock on receiving the first fragment (first non-header byte).
*/
/** @type {LH.Util.SelfMap<LH.Crdp.Network.ResourceType>} */
const RESOURCE_TYPES = {
XHR: 'XHR',
Fetch: 'Fetch',
EventSource: 'EventSource',
Script: 'Script',
Stylesheet: 'Stylesheet',
Image: 'Image',
Media: 'Media',
Font: 'Font',
Document: 'Document',
TextTrack: 'TextTrack',
WebSocket: 'WebSocket',
Other: 'Other',
Manifest: 'Manifest',
SignedExchange: 'SignedExchange',
Ping: 'Ping',
Preflight: 'Preflight',
CSPViolationReport: 'CSPViolationReport',
Prefetch: 'Prefetch',
};
class NetworkRequest {
constructor() {
this.requestId = '';
this.connectionId = '0';
this.connectionReused = false;
this.url = '';
this.protocol = '';
this.isSecure = false;
this.isValid = false;
this.parsedURL = /** @type {ParsedURL} */ ({scheme: ''});
this.documentURL = '';
/** When the renderer process initially discovers a network request, in milliseconds. */
this.rendererStartTime = -1;
/**
* When the network service is about to handle a request, ie. just before going to the
* HTTP cache or going to the network for DNS/connection setup, in milliseconds.
*/
this.networkRequestTime = -1;
/** When the last byte of the response headers is received, in milliseconds. */
this.responseHeadersEndTime = -1;
/** When the last byte of the response body is received, in milliseconds. */
this.networkEndTime = -1;
// Go read the comment on _updateTransferSizeForLightrider.
this.transferSize = 0;
this.resourceSize = 0;
this.fromDiskCache = false;
this.fromMemoryCache = false;
this.fromPrefetchCache = false;
/** @type {LightriderStatistics|undefined} Extra timing information available only when run in Lightrider. */
this.lrStatistics = undefined;
this.finished = false;
this.requestMethod = '';
this.statusCode = -1;
/** @type {NetworkRequest|undefined} The network request that redirected to this one */
this.redirectSource = undefined;
/** @type {NetworkRequest|undefined} The network request that this one redirected to */
this.redirectDestination = undefined;
/** @type {NetworkRequest[]|undefined} The chain of network requests that redirected to this one */
this.redirects = undefined;
this.failed = false;
this.localizedFailDescription = '';
/** @type {LH.Crdp.Network.Initiator} */
this.initiator = {type: 'other'};
/** @type {LH.Crdp.Network.ResourceTiming|undefined} */
this.timing = undefined;
/** @type {LH.Crdp.Network.ResourceType|undefined} */
this.resourceType = undefined;
this.mimeType = '';
/** @type {LH.Crdp.Network.ResourcePriority} */
this.priority = 'Low';
/** @type {NetworkRequest|undefined} */
this.initiatorRequest = undefined;
/** @type {HeaderEntry[]} */
this.responseHeaders = [];
/** @type {string} */
this.responseHeadersText = '';
this.fetchedViaServiceWorker = false;
/** @type {string|undefined} */
this.frameId = '';
/** @type {string|undefined} */
this.sessionId = undefined;
/** @type {LH.Protocol.TargetType|undefined} */
this.sessionTargetType = undefined;
this.isLinkPreload = false;
}
/**
* @return {boolean}
*/
hasErrorStatusCode() {
return this.statusCode >= 400;
}
/**
* @param {NetworkRequest} initiatorRequest
*/
setInitiatorRequest(initiatorRequest) {
this.initiatorRequest = initiatorRequest;
}
/**
* @param {LH.Crdp.Network.RequestWillBeSentEvent} data
*/
onRequestWillBeSent(data) {
this.requestId = data.requestId;
let url;
try {
// try to construct the url and fill in request
url = new URL(data.request.url);
} catch (e) {
// isValid left false, all other data is blank
return;
}
this.url = data.request.url;
this.documentURL = data.documentURL;
this.parsedURL = {
scheme: url.protocol.split(':')[0],
// Intentional, DevTools uses different terminology
host: url.hostname,
securityOrigin: url.origin,
};
this.isSecure = UrlUtils.isSecureScheme(this.parsedURL.scheme);
this.rendererStartTime = data.timestamp * 1000;
// Expected to be overridden with better value in `_recomputeTimesWithResourceTiming`.
this.networkRequestTime = this.rendererStartTime;
this.requestMethod = data.request.method;
this.initiator = data.initiator;
this.resourceType = data.type && RESOURCE_TYPES[data.type];
this.priority = data.request.initialPriority;
this.frameId = data.frameId;
this.isLinkPreload = data.initiator.type === 'preload' || !!data.request.isLinkPreload;
this.isValid = true;
}
onRequestServedFromCache() {
this.fromMemoryCache = true;
}
/**
* @param {LH.Crdp.Network.ResponseReceivedEvent} data
*/
onResponseReceived(data) {
this._onResponse(data.response, data.timestamp, data.type);
this._updateProtocolForLightrider();
this.frameId = data.frameId;
}
/**
* @param {LH.Crdp.Network.DataReceivedEvent} data
*/
onDataReceived(data) {
this.resourceSize += data.dataLength;
if (data.encodedDataLength !== -1) {
this.transferSize += data.encodedDataLength;
}
}
/**
* @param {LH.Crdp.Network.LoadingFinishedEvent} data
*/
onLoadingFinished(data) {
// On some requests DevTools can send duplicate events, prefer the first one for best timing data
if (this.finished) return;
this.finished = true;
this.networkEndTime = data.timestamp * 1000;
if (data.encodedDataLength >= 0) {
this.transferSize = data.encodedDataLength;
}
this._updateResponseHeadersEndTimeIfNecessary();
this._backfillReceiveHeaderStartTiming();
this._updateTransferSizeForLightrider();
this._updateTimingsForLightrider();
}
/**
* @param {LH.Crdp.Network.LoadingFailedEvent} data
*/
onLoadingFailed(data) {
// On some requests DevTools can send duplicate events, prefer the first one for best timing data
if (this.finished) return;
this.finished = true;
this.networkEndTime = data.timestamp * 1000;
this.failed = true;
this.resourceType = data.type && RESOURCE_TYPES[data.type];
this.localizedFailDescription = data.errorText;
this._updateResponseHeadersEndTimeIfNecessary();
this._backfillReceiveHeaderStartTiming();
this._updateTransferSizeForLightrider();
this._updateTimingsForLightrider();
}
/**
* @param {LH.Crdp.Network.ResourceChangedPriorityEvent} data
*/
onResourceChangedPriority(data) {
this.priority = data.newPriority;
}
/**
* @param {LH.Crdp.Network.RequestWillBeSentEvent} data
*/
onRedirectResponse(data) {
if (!data.redirectResponse) throw new Error('Missing redirectResponse data');
this._onResponse(data.redirectResponse, data.timestamp, data.type);
this.resourceType = undefined;
this.finished = true;
this.networkEndTime = data.timestamp * 1000;
this._updateResponseHeadersEndTimeIfNecessary();
this._backfillReceiveHeaderStartTiming();
}
/**
* @param {string|undefined} sessionId
*/
setSession(sessionId) {
this.sessionId = sessionId;
}
get isOutOfProcessIframe() {
return this.sessionTargetType === 'iframe';
}
/**
* @param {LH.Crdp.Network.Response} response
* @param {number} timestamp in seconds
* @param {LH.Crdp.Network.ResponseReceivedEvent['type']=} resourceType
*/
_onResponse(response, timestamp, resourceType) {
this.url = response.url;
this.connectionId = String(response.connectionId);
this.connectionReused = response.connectionReused;
if (response.protocol) this.protocol = response.protocol;
// This is updated in _recomputeTimesWithResourceTiming, if timings are present.
this.responseHeadersEndTime = timestamp * 1000;
this.transferSize = response.encodedDataLength;
if (typeof response.fromDiskCache === 'boolean') this.fromDiskCache = response.fromDiskCache;
if (typeof response.fromPrefetchCache === 'boolean') {
this.fromPrefetchCache = response.fromPrefetchCache;
}
this.statusCode = response.status;
this.timing = response.timing;
if (resourceType) this.resourceType = RESOURCE_TYPES[resourceType];
this.mimeType = response.mimeType;
this.responseHeadersText = response.headersText || '';
this.responseHeaders = NetworkRequest._headersDictToHeadersArray(response.headers);
this.fetchedViaServiceWorker = !!response.fromServiceWorker;
if (this.fromMemoryCache) this.timing = undefined;
if (this.timing) this._recomputeTimesWithResourceTiming(this.timing);
}
/**
* Resolve differences between conflicting timing signals. Based on the property setters in DevTools.
* @see https://github.com/ChromeDevTools/devtools-frontend/blob/56a99365197b85c24b732ac92b0ac70feed80179/front_end/sdk/NetworkRequest.js#L485-L502
* @param {LH.Crdp.Network.ResourceTiming} timing
*/
_recomputeTimesWithResourceTiming(timing) {
// Don't recompute times if the data is invalid. RequestTime should always be a thread timestamp.
// If we don't have receiveHeadersEnd, we really don't have more accurate data.
if (timing.requestTime === 0 || timing.receiveHeadersEnd === -1) return;
// Take networkRequestTime and responseHeadersEndTime from timing data for better accuracy.
// Before this, networkRequestTime and responseHeadersEndTime were set to bogus values based on
// CDP event timestamps, though they should be somewhat close to the network timings.
// Note: requests served from cache never run this function, so they use the "bogus" values.
// Timing's requestTime is a baseline in seconds, rest of the numbers there are ticks in millis.
// See https://raw.githubusercontent.com/GoogleChrome/lighthouse/main/docs/Network-Timings.svg
this.networkRequestTime = timing.requestTime * 1000;
const headersReceivedTime = this.networkRequestTime + timing.receiveHeadersEnd;
// This was set in `_onResponse` as that event's timestamp.
const responseTimestamp = this.responseHeadersEndTime;
// Update this.responseHeadersEndTime. All timing values from the netstack (timing) are well-ordered, and
// so are the timestamps from CDP (which this.responseHeadersEndTime belongs to). It shouldn't be possible
// that this timing from the netstack is greater than the onResponse timestamp, but just to ensure proper order
// is maintained we bound the new timing by the network request time and the response timestamp.
this.responseHeadersEndTime = headersReceivedTime;
this.responseHeadersEndTime = Math.min(this.responseHeadersEndTime, responseTimestamp);
this.responseHeadersEndTime = Math.max(this.responseHeadersEndTime, this.networkRequestTime);
// We're only at responseReceived (_onResponse) at this point.
// This networkEndTime may be redefined again after onLoading is done.
this.networkEndTime = Math.max(this.networkEndTime, this.responseHeadersEndTime);
}
/**
* Update responseHeadersEndTime to the networkEndTime if networkEndTime is earlier.
* A response can't be received after the entire request finished.
*/
_updateResponseHeadersEndTimeIfNecessary() {
this.responseHeadersEndTime = Math.min(this.networkEndTime, this.responseHeadersEndTime);
}
/**
* LR loses transfer size information, but passes it in the 'X-TotalFetchedSize' header.
* 'X-TotalFetchedSize' is the canonical transfer size in LR. Nothing should supersede it.
*
* The total length of the encoded data is spread out among multiple events. The sum of the
* values in onResponseReceived and all the onDataReceived events typically equals the value
* seen on the onLoadingFinished event. In <1% of cases we see the values differ. As we process
* onResponseReceived and onDataReceived we accumulate the total encodedDataLength. When we
* process onLoadingFinished, we override the accumulated total. We do this so that if the
* request is aborted or fails, we still get a value via the accumulation.
*
* In Lightrider, due to instrumentation limitations, our values for encodedDataLength are bogus
* and not valid. However the resource's true encodedDataLength/transferSize is shared via a
* special response header, X-TotalFetchedSize. In this situation, we read this value from
* responseReceived, use it for the transferSize and ignore the encodedDataLength values in
* both dataReceived and loadingFinished.
*/
_updateTransferSizeForLightrider() {
// Bail if we aren't in Lightrider.
if (!global.isLightrider) return;
const totalFetchedSize = this.responseHeaders.find(item => item.name === HEADER_FETCHED_SIZE);
// Bail if the header was missing.
if (!totalFetchedSize) return;
const floatValue = parseFloat(totalFetchedSize.value);
// Bail if the header cannot be parsed.
if (isNaN(floatValue)) return;
this.transferSize = floatValue;
}
/**
* LR loses protocol information.
*/
_updateProtocolForLightrider() {
// Bail if we aren't in Lightrider.
if (!global.isLightrider) return;
if (this.responseHeaders.some(item => item.name === HEADER_PROTOCOL_IS_H2)) {
this.protocol = 'h2';
}
}
/**
* TODO(compat): remove M116.
* `timing.receiveHeadersStart` was added recently, and will be in M116. Until then,
* set it to receiveHeadersEnd, which is close enough, to allow consumers of NetworkRequest
* to use the new field without accounting for this backcompat.
*/
_backfillReceiveHeaderStartTiming() {
// Do nothing if a value is already present!
if (!this.timing || this.timing.receiveHeadersStart !== undefined) return;
this.timing.receiveHeadersStart = this.timing.receiveHeadersEnd;
}
/**
* LR gets additional, accurate timing information from its underlying fetch infrastructure. This
* is passed in via X-Headers similar to 'X-TotalFetchedSize'.
*/
_updateTimingsForLightrider() {
// Bail if we aren't in Lightrider.
if (!global.isLightrider) return;
// For more info on timing nomenclature: https://www.w3.org/TR/resource-timing-2/#processing-model
// StartTime
// | ConnectStart
// | | SSLStart SSLEnd
// | | | | ConnectEnd
// | | | | | SendStart/End ReceiveHeadersEnd
// | | | | | | | EndTime
// ▼ ▼ ▼ ▼ ▼ ▼ ▼ ▼
// [ [TCP [ SSL ] ] [ Request ] [ Response ] ]
// ▲ ▲ ▲ ▲ ▲ ▲ ▲ ▲ ▲ ▲
// | | '-SSLMs---' | '-requestMs---' '-responseMs---' |
// | '----TCPMs--------' |
// | |
// '------------------------TotalMs-----------------------'
const totalHeader = this.responseHeaders.find(item => item.name === HEADER_TOTAL);
// Bail if there was no totalTime.
if (!totalHeader) return;
const totalMs = parseInt(totalHeader.value);
const TCPMsHeader = this.responseHeaders.find(item => item.name === HEADER_TCP);
const SSLMsHeader = this.responseHeaders.find(item => item.name === HEADER_SSL);
const requestMsHeader = this.responseHeaders.find(item => item.name === HEADER_REQ);
const responseMsHeader = this.responseHeaders.find(item => item.name === HEADER_RES);
// Make sure all times are initialized and are non-negative.
const TCPMs = TCPMsHeader ? Math.max(0, parseInt(TCPMsHeader.value)) : 0;
// This is missing for h2 requests, but present for h1. See b/283843975
const SSLMs = SSLMsHeader ? Math.max(0, parseInt(SSLMsHeader.value)) : 0;
const requestMs = requestMsHeader ? Math.max(0, parseInt(requestMsHeader.value)) : 0;
const responseMs = responseMsHeader ? Math.max(0, parseInt(responseMsHeader.value)) : 0;
// Bail if the timings don't add up.
if (TCPMs + requestMs + responseMs !== totalMs) {
return;
}
// Bail if SSL time is > TCP time.
if (SSLMs > TCPMs) {
return;
}
this.lrStatistics = {
endTimeDeltaMs: this.networkEndTime - (this.networkRequestTime + totalMs),
TCPMs: TCPMs,
requestMs: requestMs,
responseMs: responseMs,
};
}
/**
* Convert the requestId to backend-version by removing the `:redirect` portion
*
* @param {string} requestId
* @return {string}
*/
static getRequestIdForBackend(requestId) {
return requestId.replace(/(:redirect)+$/, '');
}
/**
* Based on DevTools NetworkManager.
* @see https://github.com/ChromeDevTools/devtools-frontend/blob/3415ee28e86a3f4bcc2e15b652d22069938df3a6/front_end/sdk/NetworkManager.js#L285-L297
* @param {LH.Crdp.Network.Headers} headersDict
* @return {Array<HeaderEntry>}
*/
static _headersDictToHeadersArray(headersDict) {
const result = [];
for (const name of Object.keys(headersDict)) {
const values = headersDict[name].split('\n');
for (let i = 0; i < values.length; ++i) {
result.push({name: name, value: values[i]});
}
}
return result;
}
static get TYPES() {
return RESOURCE_TYPES;
}
/**
* @param {NetworkRequest} record
* @return {boolean}
*/
static isNonNetworkRequest(record) {
// The 'protocol' field in devtools a string more like a `scheme`
return UrlUtils.isNonNetworkProtocol(record.protocol) ||
// But `protocol` can fail to be populated if the request fails, so fallback to scheme.
UrlUtils.isNonNetworkProtocol(record.parsedURL.scheme);
}
/**
* Technically there's not alignment on URLs that create "secure connections" vs "secure contexts"
* https://github.com/GoogleChrome/lighthouse/pull/11766#discussion_r582340683
* But for our purposes, we don't need to worry too much.
* @param {NetworkRequest} record
* @return {boolean}
*/
static isSecureRequest(record) {
return UrlUtils.isSecureScheme(record.parsedURL.scheme) ||
UrlUtils.isSecureScheme(record.protocol) ||
UrlUtils.isLikeLocalhost(record.parsedURL.host) ||
NetworkRequest.isHstsRequest(record);
}
/**
* Returns whether the network request was an HSTS redirect request.
* @param {NetworkRequest} record
* @return {boolean}
*/
static isHstsRequest(record) {
const destination = record.redirectDestination;
if (!destination) return false;
const reasonHeader = record.responseHeaders
.find(header => header.name === 'Non-Authoritative-Reason');
const reason = reasonHeader?.value;
return reason === 'HSTS' && NetworkRequest.isSecureRequest(destination);
}
/**
* Resource size is almost always the right one to be using because of the below:
* `transferSize = resourceSize + headers.length`.
* HOWEVER, there are some cases where an image is compressed again over the network and transfer size
* is smaller (see https://github.com/GoogleChrome/lighthouse/pull/4968).
* Use the min of the two numbers to be safe.
* `tranferSize` of cached records is 0
* @param {NetworkRequest} networkRecord
* @return {number}
*/
static getResourceSizeOnNetwork(networkRecord) {
return Math.min(networkRecord.resourceSize || 0, networkRecord.transferSize || Infinity);
}
}
NetworkRequest.HEADER_TCP = HEADER_TCP;
NetworkRequest.HEADER_SSL = HEADER_SSL;
NetworkRequest.HEADER_REQ = HEADER_REQ;
NetworkRequest.HEADER_RES = HEADER_RES;
NetworkRequest.HEADER_TOTAL = HEADER_TOTAL;
NetworkRequest.HEADER_FETCHED_SIZE = HEADER_FETCHED_SIZE;
NetworkRequest.HEADER_PROTOCOL_IS_H2 = HEADER_PROTOCOL_IS_H2;
export {NetworkRequest};

161
node_modules/lighthouse/core/lib/page-functions.d.ts generated vendored Normal file
View File

@@ -0,0 +1,161 @@
export namespace pageFunctions {
export { wrapRuntimeEvalErrorInBrowser };
export { getElementsInDocument };
export { getOuterHTMLSnippet };
export { computeBenchmarkIndex };
export { getNodeDetails };
export { getNodePath };
export { getNodeSelector };
export { getNodeLabel };
export { isPositionFixed };
export { wrapRequestIdleCallback };
export { getBoundingClientRect };
export { truncate };
}
/**
* `typed-query-selector`'s CSS selector parser.
*/
export type ParseSelector<T extends string> = import('typed-query-selector/parser').ParseSelector<T>;
/**
* @fileoverview
* Helper functions that are passed by `toString()` by Driver to be evaluated in target page.
*
* Every function in this module only runs in the browser, so it is ignored from
* the c8 code coverage tool. See c8.sh
*
* Important: this module should only be imported like this:
* const pageFunctions = require('...');
* Never like this:
* const {justWhatINeed} = require('...');
* Otherwise, minification will mangle the variable names and break usage.
*/
/**
* `typed-query-selector`'s CSS selector parser.
* @template {string} T
* @typedef {import('typed-query-selector/parser').ParseSelector<T>} ParseSelector
*/
/**
* The `exceptionDetails` provided by the debugger protocol does not contain the useful
* information such as name, message, and stack trace of the error when it's wrapped in a
* promise. Instead, map to a successful object that contains this information.
* @param {string|Error} [err] The error to convert
* @return {{__failedInBrowser: boolean, name: string, message: string, stack: string|undefined}}
*/
declare function wrapRuntimeEvalErrorInBrowser(err?: string | Error | undefined): {
__failedInBrowser: boolean;
name: string;
message: string;
stack: string | undefined;
};
/**
* @template {string} T
* @param {T} selector Optional simple CSS selector to filter nodes on.
* Combinators are not supported.
* @return {Array<ParseSelector<T>>}
*/
declare function getElementsInDocument<T extends string>(selector: T): import("typed-query-selector/parser").ParseSelector<T, Element>[];
/**
* Gets the opening tag text of the given node.
* @param {Element|ShadowRoot} element
* @param {Array<string>=} ignoreAttrs An optional array of attribute tags to not include in the HTML snippet.
* @return {string}
*/
declare function getOuterHTMLSnippet(element: Element | ShadowRoot, ignoreAttrs?: Array<string> | undefined, snippetCharacterLimit?: number): string;
declare namespace getOuterHTMLSnippet {
function toString(): string;
}
/**
* Computes a memory/CPU performance benchmark index to determine rough device class.
* @see https://github.com/GoogleChrome/lighthouse/issues/9085
* @see https://docs.google.com/spreadsheets/d/1E0gZwKsxegudkjJl8Fki_sOwHKpqgXwt8aBAfuUaB8A/edit?usp=sharing
*
* Historically (until LH 6.3), this benchmark created a string of length 100,000 in a loop, and returned
* the number of times per second the string can be created.
*
* Changes to v8 in 8.6.106 changed this number and also made Chrome more variable w.r.t GC interupts.
* This benchmark now is a hybrid of a similar GC-heavy approach to the original benchmark and an array
* copy benchmark.
*
* As of Chrome m86...
*
* - 1000+ is a desktop-class device, Core i3 PC, iPhone X, etc
* - 800+ is a high-end Android phone, Galaxy S8, low-end Chromebook, etc
* - 125+ is a mid-tier Android phone, Moto G4, etc
* - <125 is a budget Android phone, Alcatel Ideal, Galaxy J2, etc
* @return {number}
*/
declare function computeBenchmarkIndex(): number;
/**
* @param {Element|ShadowRoot} element
* @return {LH.Artifacts.NodeDetails}
*/
declare function getNodeDetails(element: Element | ShadowRoot): LH.Artifacts.NodeDetails;
declare namespace getNodeDetails {
function toString(): string;
}
/**
* Adapted from DevTools' SDK.DOMNode.prototype.path
* https://github.com/ChromeDevTools/devtools-frontend/blob/4fff931bb/front_end/sdk/DOMModel.js#L625-L647
* Backend: https://source.chromium.org/search?q=f:node.cc%20symbol:PrintNodePathTo&sq=&ss=chromium%2Fchromium%2Fsrc
*
* TODO: DevTools nodePath handling doesn't support iframes, but probably could. https://crbug.com/1127635
* @param {Node} node
* @return {string}
*/
declare function getNodePath(node: Node): string;
/**
* @param {Element} element
* @return {string}
*
* Note: CSS Selectors having no standard mechanism to describe shadow DOM piercing. So we can't.
*
* If the node resides within shadow DOM, the selector *only* starts from the shadow root.
* For example, consider this img within a <section> within a shadow root..
* - DOM: <html> <body> <div> #shadow-root <section> <img/>
* - nodePath: 0,HTML,1,BODY,1,DIV,a,#document-fragment,0,SECTION,0,IMG
* - nodeSelector: section > img
*/
declare function getNodeSelector(element: Element): string;
/**
* Generate a human-readable label for the given element, based on end-user facing
* strings like the innerText or alt attribute.
* Returns label string or null if no useful label is found.
* @param {Element} element
* @return {string | null}
*/
declare function getNodeLabel(element: Element): string | null;
declare namespace getNodeLabel {
function toString(): string;
}
/**
* This function checks if an element or an ancestor of an element is `position:fixed`.
* In addition we ensure that the element is capable of behaving as a `position:fixed`
* element, checking that it lives within a scrollable ancestor.
* @param {HTMLElement} element
* @return {boolean}
*/
declare function isPositionFixed(element: HTMLElement): boolean;
/**
* RequestIdleCallback shim that calculates the remaining deadline time in order to avoid a potential lighthouse
* penalty for tests run with simulated throttling. Reduces the deadline time to (50 - safetyAllowance) / cpuSlowdownMultiplier to
* ensure a long task is very unlikely if using the API correctly.
* @param {number} cpuSlowdownMultiplier
*/
declare function wrapRequestIdleCallback(cpuSlowdownMultiplier: number): void;
/**
* @param {Element} element
* @return {LH.Artifacts.Rect}
*/
declare function getBoundingClientRect(element: Element): LH.Artifacts.Rect;
/**
*
* @param {string} string
* @param {number} characterLimit
* @return {string}
*/
declare function truncate(string: string, characterLimit: number): string;
declare namespace truncate {
function toString(): string;
}
export {};
//# sourceMappingURL=page-functions.d.ts.map

563
node_modules/lighthouse/core/lib/page-functions.js generated vendored Normal file
View File

@@ -0,0 +1,563 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {Util} from '../../shared/util.js';
/**
* @fileoverview
* Helper functions that are passed by `toString()` by Driver to be evaluated in target page.
*
* Every function in this module only runs in the browser, so it is ignored from
* the c8 code coverage tool. See c8.sh
*
* Important: this module should only be imported like this:
* const pageFunctions = require('...');
* Never like this:
* const {justWhatINeed} = require('...');
* Otherwise, minification will mangle the variable names and break usage.
*/
/**
* `typed-query-selector`'s CSS selector parser.
* @template {string} T
* @typedef {import('typed-query-selector/parser').ParseSelector<T>} ParseSelector
*/
/* global window document Node ShadowRoot HTMLElement */
/**
* The `exceptionDetails` provided by the debugger protocol does not contain the useful
* information such as name, message, and stack trace of the error when it's wrapped in a
* promise. Instead, map to a successful object that contains this information.
* @param {string|Error} [err] The error to convert
* @return {{__failedInBrowser: boolean, name: string, message: string, stack: string|undefined}}
*/
function wrapRuntimeEvalErrorInBrowser(err) {
if (!err || typeof err === 'string') {
err = new Error(err);
}
return {
__failedInBrowser: true,
name: err.name || 'Error',
message: err.message || 'unknown error',
stack: err.stack,
};
}
/**
* @template {string} T
* @param {T} selector Optional simple CSS selector to filter nodes on.
* Combinators are not supported.
* @return {Array<ParseSelector<T>>}
*/
function getElementsInDocument(selector) {
const realMatchesFn = window.__ElementMatches || window.Element.prototype.matches;
/** @type {Array<ParseSelector<T>>} */
const results = [];
/** @param {NodeListOf<Element>} nodes */
const _findAllElements = nodes => {
for (const el of nodes) {
if (!selector || realMatchesFn.call(el, selector)) {
/** @type {ParseSelector<T>} */
// @ts-expect-error - el is verified as matching above, tsc just can't verify it through the .call().
const matchedEl = el;
results.push(matchedEl);
}
// If the element has a shadow root, dig deeper.
if (el.shadowRoot) {
_findAllElements(el.shadowRoot.querySelectorAll('*'));
}
}
};
_findAllElements(document.querySelectorAll('*'));
return results;
}
/**
* Gets the opening tag text of the given node.
* @param {Element|ShadowRoot} element
* @param {Array<string>=} ignoreAttrs An optional array of attribute tags to not include in the HTML snippet.
* @return {string}
*/
function getOuterHTMLSnippet(element, ignoreAttrs = [], snippetCharacterLimit = 500) {
const ATTRIBUTE_CHAR_LIMIT = 75;
// Autofill information that is injected into the snippet via AutofillShowTypePredictions
// TODO(paulirish): Don't clean title attribute from all elements if it's unnecessary
const autoFillIgnoreAttrs = ['autofill-information', 'autofill-prediction', 'title'];
// ShadowRoots are sometimes passed in; use their hosts' outerHTML.
if (element instanceof ShadowRoot) {
element = element.host;
}
try {
/** @type {Element} */
// @ts-expect-error - clone will be same type as element - see https://github.com/microsoft/TypeScript/issues/283
const clone = element.cloneNode();
// Prevent any potential side-effects by appending to a template element.
// See https://github.com/GoogleChrome/lighthouse/issues/11465
const template = element.ownerDocument.createElement('template');
template.content.append(clone);
ignoreAttrs.concat(autoFillIgnoreAttrs).forEach(attribute =>{
clone.removeAttribute(attribute);
});
let charCount = 0;
for (const attributeName of clone.getAttributeNames()) {
if (charCount > snippetCharacterLimit) {
clone.removeAttribute(attributeName);
continue;
}
let attributeValue = clone.getAttribute(attributeName);
if (attributeValue === null) continue; // Can't happen.
let dirty = false;
// Replace img.src with img.currentSrc. Same for audio and video.
if (attributeName === 'src' && 'currentSrc' in element) {
const elementWithSrc = /** @type {HTMLImageElement|HTMLMediaElement} */ (element);
const currentSrc = elementWithSrc.currentSrc;
// Only replace if the two URLs do not resolve to the same location.
const documentHref = elementWithSrc.ownerDocument.location.href;
if (new URL(attributeValue, documentHref).toString() !== currentSrc) {
attributeValue = currentSrc;
dirty = true;
}
}
// Elide attribute value if too long.
const truncatedString = truncate(attributeValue, ATTRIBUTE_CHAR_LIMIT);
if (truncatedString !== attributeValue) dirty = true;
attributeValue = truncatedString;
if (dirty) {
// Style attributes can be blocked by the CSP if they are set via `setAttribute`.
// If we are trying to set the style attribute, use `el.style.cssText` instead.
// https://github.com/GoogleChrome/lighthouse/issues/13630
if (attributeName === 'style') {
const elementWithStyle = /** @type {HTMLElement} */ (clone);
elementWithStyle.style.cssText = attributeValue;
} else {
clone.setAttribute(attributeName, attributeValue);
}
}
charCount += attributeName.length + attributeValue.length;
}
const reOpeningTag = /^[\s\S]*?>/;
const [match] = clone.outerHTML.match(reOpeningTag) || [];
if (match && charCount > snippetCharacterLimit) {
return match.slice(0, match.length - 1) + ' …>';
}
return match || '';
} catch (_) {
// As a last resort, fall back to localName.
return `<${element.localName}>`;
}
}
/**
* Computes a memory/CPU performance benchmark index to determine rough device class.
* @see https://github.com/GoogleChrome/lighthouse/issues/9085
* @see https://docs.google.com/spreadsheets/d/1E0gZwKsxegudkjJl8Fki_sOwHKpqgXwt8aBAfuUaB8A/edit?usp=sharing
*
* Historically (until LH 6.3), this benchmark created a string of length 100,000 in a loop, and returned
* the number of times per second the string can be created.
*
* Changes to v8 in 8.6.106 changed this number and also made Chrome more variable w.r.t GC interupts.
* This benchmark now is a hybrid of a similar GC-heavy approach to the original benchmark and an array
* copy benchmark.
*
* As of Chrome m86...
*
* - 1000+ is a desktop-class device, Core i3 PC, iPhone X, etc
* - 800+ is a high-end Android phone, Galaxy S8, low-end Chromebook, etc
* - 125+ is a mid-tier Android phone, Moto G4, etc
* - <125 is a budget Android phone, Alcatel Ideal, Galaxy J2, etc
* @return {number}
*/
function computeBenchmarkIndex() {
/**
* The GC-heavy benchmark that creates a string of length 10000 in a loop.
* The returned index is the number of times per second the string can be created divided by 10.
* The division by 10 is to keep similar magnitudes to an earlier version of BenchmarkIndex that
* used a string length of 100000 instead of 10000.
*/
function benchmarkIndexGC() {
const start = Date.now();
let iterations = 0;
while (Date.now() - start < 500) {
let s = '';
for (let j = 0; j < 10000; j++) s += 'a';
if (s.length === 1) throw new Error('will never happen, but prevents compiler optimizations');
iterations++;
}
const durationInSeconds = (Date.now() - start) / 1000;
return Math.round(iterations / 10 / durationInSeconds);
}
/**
* The non-GC-dependent benchmark that copies integers back and forth between two arrays of length 100000.
* The returned index is the number of times per second a copy can be made, divided by 10.
* The division by 10 is to keep similar magnitudes to the GC-dependent version.
*/
function benchmarkIndexNoGC() {
const arrA = [];
const arrB = [];
for (let i = 0; i < 100000; i++) arrA[i] = arrB[i] = i;
const start = Date.now();
let iterations = 0;
// Some Intel CPUs have a performance cliff due to unlucky JCC instruction alignment.
// Two possible fixes: call Date.now less often, or manually unroll the inner loop a bit.
// We'll call Date.now less and only check the duration on every 10th iteration for simplicity.
// See https://bugs.chromium.org/p/v8/issues/detail?id=10954#c1.
while (iterations % 10 !== 0 || Date.now() - start < 500) {
const src = iterations % 2 === 0 ? arrA : arrB;
const tgt = iterations % 2 === 0 ? arrB : arrA;
for (let j = 0; j < src.length; j++) tgt[j] = src[j];
iterations++;
}
const durationInSeconds = (Date.now() - start) / 1000;
return Math.round(iterations / 10 / durationInSeconds);
}
// The final BenchmarkIndex is a simple average of the two components.
return (benchmarkIndexGC() + benchmarkIndexNoGC()) / 2;
}
/**
* Adapted from DevTools' SDK.DOMNode.prototype.path
* https://github.com/ChromeDevTools/devtools-frontend/blob/4fff931bb/front_end/sdk/DOMModel.js#L625-L647
* Backend: https://source.chromium.org/search?q=f:node.cc%20symbol:PrintNodePathTo&sq=&ss=chromium%2Fchromium%2Fsrc
*
* TODO: DevTools nodePath handling doesn't support iframes, but probably could. https://crbug.com/1127635
* @param {Node} node
* @return {string}
*/
function getNodePath(node) {
// For our purposes, there's no worthwhile difference between shadow root and document fragment
// We can consider them entirely synonymous.
/** @param {Node} node @return {node is ShadowRoot} */
const isShadowRoot = node => node.nodeType === Node.DOCUMENT_FRAGMENT_NODE;
/** @param {Node} node */
const getNodeParent = node => isShadowRoot(node) ? node.host : node.parentNode;
/** @param {Node} node @return {number|'a'} */
function getNodeIndex(node) {
if (isShadowRoot(node)) {
// User-agent shadow roots get 'u'. Non-UA shadow roots get 'a'.
return 'a';
}
let index = 0;
let prevNode;
while (prevNode = node.previousSibling) { // eslint-disable-line no-cond-assign
node = prevNode;
// skip empty text nodes
if (node.nodeType === Node.TEXT_NODE && (node.nodeValue || '').trim().length === 0) continue;
index++;
}
return index;
}
/** @type {Node|null} */
let currentNode = node;
const path = [];
while (currentNode && getNodeParent(currentNode)) {
const index = getNodeIndex(currentNode);
path.push([index, currentNode.nodeName]);
currentNode = getNodeParent(currentNode);
}
path.reverse();
return path.join(',');
}
/**
* @param {Element} element
* @return {string}
*
* Note: CSS Selectors having no standard mechanism to describe shadow DOM piercing. So we can't.
*
* If the node resides within shadow DOM, the selector *only* starts from the shadow root.
* For example, consider this img within a <section> within a shadow root..
* - DOM: <html> <body> <div> #shadow-root <section> <img/>
* - nodePath: 0,HTML,1,BODY,1,DIV,a,#document-fragment,0,SECTION,0,IMG
* - nodeSelector: section > img
*/
function getNodeSelector(element) {
/**
* @param {Element} element
*/
function getSelectorPart(element) {
let part = element.tagName.toLowerCase();
if (element.id) {
part += '#' + element.id;
} else if (element.classList.length > 0) {
part += '.' + element.classList[0];
}
return part;
}
const parts = [];
while (parts.length < 4) {
parts.unshift(getSelectorPart(element));
if (!element.parentElement) {
break;
}
element = element.parentElement;
if (element.tagName === 'HTML') {
break;
}
}
return parts.join(' > ');
}
/**
* This function checks if an element or an ancestor of an element is `position:fixed`.
* In addition we ensure that the element is capable of behaving as a `position:fixed`
* element, checking that it lives within a scrollable ancestor.
* @param {HTMLElement} element
* @return {boolean}
*/
function isPositionFixed(element) {
/**
* @param {HTMLElement} element
* @param {'overflowY'|'position'} attr
* @return {string}
*/
function getStyleAttrValue(element, attr) {
// Check style before computedStyle as computedStyle is expensive.
return element.style[attr] || window.getComputedStyle(element)[attr];
}
// Position fixed/sticky has no effect in case when document does not scroll.
const htmlEl = document.querySelector('html');
if (!htmlEl) throw new Error('html element not found in document');
if (htmlEl.scrollHeight <= htmlEl.clientHeight ||
!['scroll', 'auto', 'visible'].includes(getStyleAttrValue(htmlEl, 'overflowY'))) {
return false;
}
/** @type {HTMLElement | null} */
let currentEl = element;
while (currentEl) {
const position = getStyleAttrValue(currentEl, 'position');
if ((position === 'fixed' || position === 'sticky')) {
return true;
}
currentEl = currentEl.parentElement;
}
return false;
}
/**
* Generate a human-readable label for the given element, based on end-user facing
* strings like the innerText or alt attribute.
* Returns label string or null if no useful label is found.
* @param {Element} element
* @return {string | null}
*/
function getNodeLabel(element) {
const tagName = element.tagName.toLowerCase();
// html and body content is too broad to be useful, since they contain all page content
if (tagName !== 'html' && tagName !== 'body') {
const nodeLabel = element instanceof HTMLElement && element.innerText ||
element.getAttribute('alt') || element.getAttribute('aria-label');
if (nodeLabel) {
return truncate(nodeLabel, 80);
} else {
// If no useful label was found then try to get one from a child.
// E.g. if an a tag contains an image but no text we want the image alt/aria-label attribute.
const nodeToUseForLabel = element.querySelector('[alt], [aria-label]');
if (nodeToUseForLabel) {
return getNodeLabel(nodeToUseForLabel);
}
}
}
return null;
}
/**
* @param {Element} element
* @return {LH.Artifacts.Rect}
*/
function getBoundingClientRect(element) {
const realBoundingClientRect = window.__HTMLElementBoundingClientRect ||
window.HTMLElement.prototype.getBoundingClientRect;
// The protocol does not serialize getters, so extract the values explicitly.
const rect = realBoundingClientRect.call(element);
return {
top: Math.round(rect.top),
bottom: Math.round(rect.bottom),
left: Math.round(rect.left),
right: Math.round(rect.right),
width: Math.round(rect.width),
height: Math.round(rect.height),
};
}
/**
* RequestIdleCallback shim that calculates the remaining deadline time in order to avoid a potential lighthouse
* penalty for tests run with simulated throttling. Reduces the deadline time to (50 - safetyAllowance) / cpuSlowdownMultiplier to
* ensure a long task is very unlikely if using the API correctly.
* @param {number} cpuSlowdownMultiplier
*/
function wrapRequestIdleCallback(cpuSlowdownMultiplier) {
const safetyAllowanceMs = 10;
const maxExecutionTimeMs = Math.floor((50 - safetyAllowanceMs) / cpuSlowdownMultiplier);
const nativeRequestIdleCallback = window.requestIdleCallback;
window.requestIdleCallback = (cb, options) => {
/**
* @type {Parameters<typeof window['requestIdleCallback']>[0]}
*/
const cbWrap = (deadline) => {
const start = Date.now();
// @ts-expect-error - save original on non-standard property.
deadline.__timeRemaining = deadline.timeRemaining;
deadline.timeRemaining = () => {
// @ts-expect-error - access non-standard property.
const timeRemaining = deadline.__timeRemaining();
return Math.min(timeRemaining, Math.max(0, maxExecutionTimeMs - (Date.now() - start))
);
};
deadline.timeRemaining.toString = () => {
return 'function timeRemaining() { [native code] }';
};
cb(deadline);
};
return nativeRequestIdleCallback(cbWrap, options);
};
window.requestIdleCallback.toString = () => {
return 'function requestIdleCallback() { [native code] }';
};
}
/**
* @param {Element|ShadowRoot} element
* @return {LH.Artifacts.NodeDetails}
*/
function getNodeDetails(element) {
// This bookkeeping is for the FullPageScreenshot gatherer.
if (!window.__lighthouseNodesDontTouchOrAllVarianceGoesAway) {
window.__lighthouseNodesDontTouchOrAllVarianceGoesAway = new Map();
}
element = element instanceof ShadowRoot ? element.host : element;
const selector = getNodeSelector(element);
// Create an id that will be unique across all execution contexts.
//
// Made up of 3 components:
// - prefix unique to specific execution context
// - nth unique node seen by this function for this execution context
// - node tagName
//
// Every page load only has up to two associated contexts - the page context
// (denoted as `__lighthouseExecutionContextUniqueIdentifier` being undefined)
// and the isolated context. The id must be unique to distinguish gatherers running
// on different page loads that identify the same logical element, for purposes
// of the full page screenshot node lookup; hence the prefix.
//
// The id could be any arbitrary string, the exact value is not important.
// For example, tagName is added only because it might be useful for debugging.
// But execution id and map size are added to ensure uniqueness.
// We also dedupe this id so that details collected for an element within the same
// pass and execution context will share the same id. Not technically important, but
// cuts down on some duplication.
let lhId = window.__lighthouseNodesDontTouchOrAllVarianceGoesAway.get(element);
if (!lhId) {
lhId = [
window.__lighthouseExecutionContextUniqueIdentifier === undefined ?
'page' :
window.__lighthouseExecutionContextUniqueIdentifier,
window.__lighthouseNodesDontTouchOrAllVarianceGoesAway.size,
element.tagName,
].join('-');
window.__lighthouseNodesDontTouchOrAllVarianceGoesAway.set(element, lhId);
}
const details = {
lhId,
devtoolsNodePath: getNodePath(element),
selector: selector,
boundingRect: getBoundingClientRect(element),
snippet: getOuterHTMLSnippet(element),
nodeLabel: getNodeLabel(element) || selector,
};
return details;
}
/**
*
* @param {string} string
* @param {number} characterLimit
* @return {string}
*/
function truncate(string, characterLimit) {
return Util.truncate(string, characterLimit);
}
/** @type {string} */
const truncateRawString = truncate.toString();
truncate.toString = () => `function truncate(string, characterLimit) {
const Util = { ${Util.truncate} };
return (${truncateRawString})(string, characterLimit);
}`;
/** @type {string} */
const getNodeLabelRawString = getNodeLabel.toString();
getNodeLabel.toString = () => `function getNodeLabel(element) {
${truncate};
return (${getNodeLabelRawString})(element);
}`;
/** @type {string} */
const getOuterHTMLSnippetRawString = getOuterHTMLSnippet.toString();
// eslint-disable-next-line max-len
getOuterHTMLSnippet.toString = () => `function getOuterHTMLSnippet(element, ignoreAttrs = [], snippetCharacterLimit = 500) {
${truncate};
return (${getOuterHTMLSnippetRawString})(element, ignoreAttrs, snippetCharacterLimit);
}`;
/** @type {string} */
const getNodeDetailsRawString = getNodeDetails.toString();
getNodeDetails.toString = () => `function getNodeDetails(element) {
${truncate};
${getNodePath};
${getNodeSelector};
${getBoundingClientRect};
${getOuterHTMLSnippetRawString};
${getNodeLabelRawString};
return (${getNodeDetailsRawString})(element);
}`;
export const pageFunctions = {
wrapRuntimeEvalErrorInBrowser,
getElementsInDocument,
getOuterHTMLSnippet,
computeBenchmarkIndex,
getNodeDetails,
getNodePath,
getNodeSelector,
getNodeLabel,
isPositionFixed,
wrapRequestIdleCallback,
getBoundingClientRect,
truncate,
};

View File

@@ -0,0 +1,13 @@
/**
* @fileoverview Helper functions to transform an LHR into a proto-ready LHR.
*
* FIXME: This file is 100% technical debt. Our eventual goal is for the
* roundtrip JSON to match the Golden LHR 1:1.
*/
/**
* Transform an LHR into a proto-friendly, mostly-compatible LHR.
* @param {LH.Result} lhr
* @return {LH.Result}
*/
export function processForProto(lhr: LH.Result): LH.Result;
//# sourceMappingURL=proto-preprocessor.d.ts.map

135
node_modules/lighthouse/core/lib/proto-preprocessor.js generated vendored Normal file
View File

@@ -0,0 +1,135 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import fs from 'fs';
import esMain from 'es-main';
/**
* @fileoverview Helper functions to transform an LHR into a proto-ready LHR.
*
* FIXME: This file is 100% technical debt. Our eventual goal is for the
* roundtrip JSON to match the Golden LHR 1:1.
*/
/**
* Transform an LHR into a proto-friendly, mostly-compatible LHR.
* @param {LH.Result} lhr
* @return {LH.Result}
*/
function processForProto(lhr) {
/** @type {LH.Result} */
const reportJson = JSON.parse(JSON.stringify(lhr));
// Clean up the configSettings
// Note: This is not strictly required for conversion if protobuf parsing is set to
// 'ignore unknown fields' in the language of conversion.
if (reportJson.configSettings) {
// The settings that are in both proto and LHR
const {
formFactor,
locale,
onlyCategories,
channel,
throttling,
screenEmulation,
throttlingMethod} = reportJson.configSettings;
// @ts-expect-error - intentionally only a subset of settings.
reportJson.configSettings = {
formFactor,
locale,
onlyCategories,
channel,
throttling,
screenEmulation,
throttlingMethod};
}
// Remove runtimeError if it is NO_ERROR
if (reportJson.runtimeError && reportJson.runtimeError.code === 'NO_ERROR') {
delete reportJson.runtimeError;
}
// Clean up actions that require 'audits' to exist
if (reportJson.audits) {
Object.keys(reportJson.audits).forEach(auditName => {
const audit = reportJson.audits[auditName];
// Rewrite 'not-applicable' and 'not_applicable' scoreDisplayMode to 'notApplicable'. #6201, #6783.
if (audit.scoreDisplayMode) {
// @ts-expect-error ts properly flags this as invalid as it should not happen,
// but remains in preprocessor to protect from proto translation errors from
// old LHRs.
// eslint-disable-next-line max-len
if (audit.scoreDisplayMode === 'not-applicable' || audit.scoreDisplayMode === 'not_applicable') {
audit.scoreDisplayMode = 'notApplicable';
}
}
// Normalize displayValue to always be a string, not an array. #6200
if (Array.isArray(audit.displayValue)) {
/** @type {Array<any>}*/
const values = [];
audit.displayValue.forEach(item => {
values.push(item);
});
audit.displayValue = values.join(' | ');
}
});
}
/**
* Remove any found empty strings, as they are dropped after round-tripping anyway
* @param {any} obj
*/
function removeStrings(obj) {
if (obj && typeof obj === 'object' && !Array.isArray(obj)) {
Object.keys(obj).forEach(key => {
if (typeof obj[key] === 'string' && obj[key] === '') {
delete obj[key];
} else if (typeof obj[key] === 'object' || Array.isArray(obj[key])) {
removeStrings(obj[key]);
}
});
} else if (Array.isArray(obj)) {
obj.forEach(item => {
if (typeof item === 'object' || Array.isArray(item)) {
removeStrings(item);
}
});
}
}
removeStrings(reportJson);
return reportJson;
}
// Test if called from the CLI or as a module.
if (esMain(import.meta)) {
// read in the argv for the input & output
const args = process.argv.slice(2);
let input;
let output;
if (args.length) {
// find can return undefined, so default it to '' with OR
input = (args.find(flag => flag.startsWith('--in')) || '').replace('--in=', '');
output = (args.find(flag => flag.startsWith('--out')) || '').replace('--out=', '');
}
if (input && output) {
// process the file
const report = processForProto(JSON.parse(fs.readFileSync(input, 'utf-8')));
// write to output from argv
fs.writeFileSync(output, JSON.stringify(report), 'utf-8');
}
}
export {
processForProto,
};

100
node_modules/lighthouse/core/lib/rect-helpers.d.ts generated vendored Normal file
View File

@@ -0,0 +1,100 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {LH.Artifacts.Rect} rect
* @param {{x:number, y:number}} point
*/
export function rectContainsPoint(rect: LH.Artifacts.Rect, { x, y }: {
x: number;
y: number;
}): boolean;
/**
* Returns whether rect2 is contained entirely within rect1;
* @param {LH.Artifacts.Rect} rect1
* @param {LH.Artifacts.Rect} rect2
* @return {boolean}
*/
export function rectContains(rect1: LH.Artifacts.Rect, rect2: LH.Artifacts.Rect): boolean;
/**
* @param {{left:number, top:number, right:number, bottom: number}} rect
* @return {LH.Artifacts.Rect}
*/
export function addRectWidthAndHeight({ left, top, right, bottom }: {
left: number;
top: number;
right: number;
bottom: number;
}): LH.Artifacts.Rect;
/**
* @param {{x:number, y:number, width:number, height: number}} rect
* @return {LH.Artifacts.Rect}
*/
export function addRectTopAndBottom({ x, y, width, height }: {
x: number;
y: number;
width: number;
height: number;
}): LH.Artifacts.Rect;
/**
* @param {LH.Artifacts.Rect} rect1
* @param {LH.Artifacts.Rect} rect2
*/
export function getRectOverlapArea(rect1: LH.Artifacts.Rect, rect2: LH.Artifacts.Rect): number;
/**
* @param {LH.Artifacts.Rect} rect
* @param {number} centerRectSize
*/
export function getRectAtCenter(rect: LH.Artifacts.Rect, centerRectSize: number): import("../../types/lhr/audit-details").default.Rect;
/**
* @param {LH.Artifacts.Rect[]} rects
*/
export function getLargestRect(rects: LH.Artifacts.Rect[]): import("../../types/lhr/audit-details").default.Rect;
/**
* @param {LH.Artifacts.Rect} rect
*/
export function getRectArea(rect: LH.Artifacts.Rect): number;
/**
* @param {LH.Artifacts.Rect} rect
*/
export function getRectCenterPoint(rect: LH.Artifacts.Rect): {
x: number;
y: number;
};
/**
* @param {LH.Artifacts.Rect[]} rects
*/
export function getBoundingRect(rects: LH.Artifacts.Rect[]): import("../../types/lhr/audit-details").default.Rect;
/**
* Returns a bounding rect for all the passed in rects, with padded with half of
* `padding` on all sides.
* @param {LH.Artifacts.Rect[]} rects
* @param {number} padding
* @return {LH.Artifacts.Rect}
*/
export function getBoundingRectWithPadding(rects: LH.Artifacts.Rect[], padding: number): LH.Artifacts.Rect;
/**
* @param {LH.Artifacts.Rect} rectA
* @param {LH.Artifacts.Rect} rectB
* @return {boolean}
*/
export function rectsTouchOrOverlap(rectA: LH.Artifacts.Rect, rectB: LH.Artifacts.Rect): boolean;
/**
*
* @param {LH.Artifacts.Rect[]} rectListA
* @param {LH.Artifacts.Rect[]} rectListB
*/
export function allRectsContainedWithinEachOther(rectListA: LH.Artifacts.Rect[], rectListB: LH.Artifacts.Rect[]): boolean;
/**
* @param {LH.Artifacts.Rect[]} rects
* @return {LH.Artifacts.Rect[]}
*/
export function filterOutRectsContainedByOthers(rects: LH.Artifacts.Rect[]): LH.Artifacts.Rect[];
/**
* @param {LH.Artifacts.Rect[]} rects
* @return {LH.Artifacts.Rect[]}
*/
export function filterOutTinyRects(rects: LH.Artifacts.Rect[]): LH.Artifacts.Rect[];
//# sourceMappingURL=rect-helpers.d.ts.map

251
node_modules/lighthouse/core/lib/rect-helpers.js generated vendored Normal file
View File

@@ -0,0 +1,251 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {LH.Artifacts.Rect} rect
* @param {{x:number, y:number}} point
*/
function rectContainsPoint(rect, {x, y}) {
return rect.left <= x && rect.right >= x && rect.top <= y && rect.bottom >= y;
}
/**
* Returns whether rect2 is contained entirely within rect1;
* @param {LH.Artifacts.Rect} rect1
* @param {LH.Artifacts.Rect} rect2
* @return {boolean}
*/
// We sometimes run this as a part of a gatherer script injected into the page, so prevent
// renaming the function for code coverage.
/* c8 ignore start */
function rectContains(rect1, rect2) {
return rect2.top >= rect1.top &&
rect2.right <= rect1.right &&
rect2.bottom <= rect1.bottom &&
rect2.left >= rect1.left;
}
/* c8 ignore stop */
/**
* @param {LH.Artifacts.Rect[]} rects
* @return {LH.Artifacts.Rect[]}
*/
function filterOutTinyRects(rects) {
return rects.filter(
rect => rect.width > 1 && rect.height > 1
);
}
/**
* @param {LH.Artifacts.Rect[]} rects
* @return {LH.Artifacts.Rect[]}
*/
function filterOutRectsContainedByOthers(rects) {
const rectsToKeep = new Set(rects);
for (const rect of rects) {
for (const possiblyContainingRect of rects) {
if (rect === possiblyContainingRect) continue;
if (!rectsToKeep.has(possiblyContainingRect)) continue;
if (rectContains(possiblyContainingRect, rect)) {
rectsToKeep.delete(rect);
break;
}
}
}
return Array.from(rectsToKeep);
}
/**
* @param {LH.Artifacts.Rect} rect
*/
/* c8 ignore start */
function getRectCenterPoint(rect) {
return {
x: rect.left + rect.width / 2,
y: rect.top + rect.height / 2,
};
}
/* c8 ignore stop */
/**
* @param {LH.Artifacts.Rect} rectA
* @param {LH.Artifacts.Rect} rectB
* @return {boolean}
*/
function rectsTouchOrOverlap(rectA, rectB) {
// https://stackoverflow.com/questions/2752349/fast-rectangle-to-rectangle-intersection
return (
rectA.left <= rectB.right &&
rectB.left <= rectA.right &&
rectA.top <= rectB.bottom &&
rectB.top <= rectA.bottom
);
}
/**
* Returns a bounding rect for all the passed in rects, with padded with half of
* `padding` on all sides.
* @param {LH.Artifacts.Rect[]} rects
* @param {number} padding
* @return {LH.Artifacts.Rect}
*/
function getBoundingRectWithPadding(rects, padding) {
if (rects.length === 0) {
throw new Error('No rects to take bounds of');
}
let left = Number.MAX_VALUE;
let right = -Number.MAX_VALUE;
let top = Number.MAX_VALUE;
let bottom = -Number.MAX_VALUE;
for (const rect of rects) {
left = Math.min(left, rect.left);
right = Math.max(right, rect.right);
top = Math.min(top, rect.top);
bottom = Math.max(bottom, rect.bottom);
}
// Pad on all sides.
const halfMinSize = padding / 2;
left -= halfMinSize;
right += halfMinSize;
top -= halfMinSize;
bottom += halfMinSize;
return {
left,
right,
top,
bottom,
width: right - left,
height: bottom - top,
};
}
/**
* @param {LH.Artifacts.Rect[]} rects
*/
function getBoundingRect(rects) {
return getBoundingRectWithPadding(rects, 0);
}
/**
* @param {{left:number, top:number, right:number, bottom: number}} rect
* @return {LH.Artifacts.Rect}
*/
function addRectWidthAndHeight({left, top, right, bottom}) {
return {
left,
top,
right,
bottom,
width: right - left,
height: bottom - top,
};
}
/**
* @param {{x:number, y:number, width:number, height: number}} rect
* @return {LH.Artifacts.Rect}
*/
function addRectTopAndBottom({x, y, width, height}) {
return {
left: x,
top: y,
right: x + width,
bottom: y + height,
width,
height,
};
}
/**
* @param {LH.Artifacts.Rect} rect1
* @param {LH.Artifacts.Rect} rect2
*/
function getRectOverlapArea(rect1, rect2) {
// https://stackoverflow.com/a/9325084/1290545
const rectYOverlap = Math.min(rect1.bottom, rect2.bottom) - Math.max(rect1.top, rect2.top);
if (rectYOverlap <= 0) return 0;
const rectXOverlap = Math.min(rect1.right, rect2.right) - Math.max(rect1.left, rect2.left);
if (rectXOverlap <= 0) return 0;
return rectXOverlap * rectYOverlap;
}
/**
* @param {LH.Artifacts.Rect} rect
* @param {number} centerRectSize
*/
function getRectAtCenter(rect, centerRectSize) {
return addRectWidthAndHeight({
left: rect.left + rect.width / 2 - centerRectSize / 2,
top: rect.top + rect.height / 2 - centerRectSize / 2,
right: rect.right - rect.width / 2 + centerRectSize / 2,
bottom: rect.bottom - rect.height / 2 + centerRectSize / 2,
});
}
/**
* @param {LH.Artifacts.Rect} rect
*/
/* c8 ignore start */
function getRectArea(rect) {
return rect.width * rect.height;
}
/* c8 ignore stop */
/**
* @param {LH.Artifacts.Rect[]} rects
*/
/* c8 ignore start */
function getLargestRect(rects) {
let largestRect = rects[0];
for (const rect of rects) {
if (getRectArea(rect) > getRectArea(largestRect)) {
largestRect = rect;
}
}
return largestRect;
}
/* c8 ignore stop */
/**
*
* @param {LH.Artifacts.Rect[]} rectListA
* @param {LH.Artifacts.Rect[]} rectListB
*/
function allRectsContainedWithinEachOther(rectListA, rectListB) {
for (const rectA of rectListA) {
for (const rectB of rectListB) {
if (!rectContains(rectA, rectB) && !rectContains(rectB, rectA)) {
return false;
}
}
}
return true;
}
export {
rectContainsPoint,
rectContains,
addRectWidthAndHeight,
addRectTopAndBottom,
getRectOverlapArea,
getRectAtCenter,
getLargestRect,
getRectArea,
getRectCenterPoint,
getBoundingRect,
getBoundingRectWithPadding,
rectsTouchOrOverlap,
allRectsContainedWithinEachOther,
filterOutRectsContainedByOthers,
filterOutTinyRects,
};

17
node_modules/lighthouse/core/lib/script-helpers.d.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
/**
* @param {LH.Artifacts.NetworkRequest[]} networkRecords
* @param {LH.Artifacts.Script} script
* @return {LH.Artifacts.NetworkRequest|undefined}
*/
export function getRequestForScript(networkRecords: LH.Artifacts.NetworkRequest[], script: LH.Artifacts.Script): LH.Artifacts.NetworkRequest | undefined;
/**
* @license Copyright 2022 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {LH.Artifacts.Script} script
* @return {boolean}
*/
export function isInline(script: LH.Artifacts.Script): boolean;
//# sourceMappingURL=script-helpers.d.ts.map

31
node_modules/lighthouse/core/lib/script-helpers.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
/**
* @license Copyright 2022 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* @param {LH.Artifacts.Script} script
* @return {boolean}
*/
function isInline(script) {
return Boolean(script.startLine || script.startColumn);
}
/**
* @param {LH.Artifacts.NetworkRequest[]} networkRecords
* @param {LH.Artifacts.Script} script
* @return {LH.Artifacts.NetworkRequest|undefined}
*/
function getRequestForScript(networkRecords, script) {
let networkRequest = networkRecords.find(request => request.url === script.url);
while (networkRequest?.redirectDestination) {
networkRequest = networkRequest.redirectDestination;
}
return networkRequest;
}
export {
getRequestForScript,
isInline,
};

32
node_modules/lighthouse/core/lib/sentry.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
export namespace Sentry {
export { init };
export { noop as captureMessage };
export { noop as captureBreadcrumb };
export { noop as getContext };
export const captureException: (error: Error, options: {
level?: string | undefined;
tags?: {
[key: string]: any;
} | undefined;
extra?: {
[key: string]: any;
} | undefined;
}) => Promise<void>;
export function _shouldSample(): boolean;
}
export type Breadcrumb = import('@sentry/node').Breadcrumb;
export type NodeClient = import('@sentry/node').NodeClient;
export type NodeOptions = import('@sentry/node').NodeOptions;
export type Severity = import('@sentry/node').Severity;
/**
* When called, replaces noops with actual Sentry implementation.
* @param {{url: string, flags: LH.CliFlags, environmentData: NodeOptions}} opts
*/
declare function init(opts: {
url: string;
flags: LH.CliFlags;
environmentData: NodeOptions;
}): Promise<void>;
declare function noop(): void;
export {};
//# sourceMappingURL=sentry.d.ts.map

143
node_modules/lighthouse/core/lib/sentry.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import log from 'lighthouse-logger';
/** @typedef {import('@sentry/node').Breadcrumb} Breadcrumb */
/** @typedef {import('@sentry/node').NodeClient} NodeClient */
/** @typedef {import('@sentry/node').NodeOptions} NodeOptions */
/** @typedef {import('@sentry/node').Severity} Severity */
const SENTRY_URL = 'https://a6bb0da87ee048cc9ae2a345fc09ab2e:63a7029f46f74265981b7e005e0f69f8@sentry.io/174697';
// Per-run chance of capturing errors (if enabled).
const SAMPLE_RATE = 0.01;
/** @type {Array<{pattern: RegExp, rate: number}>} */
const SAMPLED_ERRORS = [
// Error code based sampling. Delete if still unused after 2019-01-01.
// e.g.: {pattern: /No.*node with given id/, rate: 0.01},
];
const noop = () => { };
/**
* A delegate for sentry so that environments without error reporting enabled will use
* noop functions and environments with error reporting will call the actual Sentry methods.
*/
const sentryDelegate = {
init,
/** @type {(message: string, level?: Severity) => void} */
captureMessage: noop,
/** @type {(breadcrumb: Breadcrumb) => void} */
captureBreadcrumb: noop,
/** @type {() => any} */
getContext: noop,
/** @type {(error: Error, options: {level?: string, tags?: {[key: string]: any}, extra?: {[key: string]: any}}) => Promise<void>} */
captureException: async () => { },
_shouldSample() {
return SAMPLE_RATE >= Math.random();
},
};
/**
* When called, replaces noops with actual Sentry implementation.
* @param {{url: string, flags: LH.CliFlags, environmentData: NodeOptions}} opts
*/
async function init(opts) {
// If error reporting is disabled, leave the functions as a noop
if (!opts.flags.enableErrorReporting) {
return;
}
// If not selected for samping, leave the functions as a noop.
if (!sentryDelegate._shouldSample()) {
return;
}
try {
const Sentry = await import('@sentry/node');
Sentry.init({
...opts.environmentData,
dsn: SENTRY_URL,
});
const extras = {
...opts.flags.throttling,
channel: opts.flags.channel || 'cli',
url: opts.url,
formFactor: opts.flags.formFactor,
throttlingMethod: opts.flags.throttlingMethod,
};
Sentry.setExtras(extras);
// Have each delegate function call the corresponding sentry function by default
sentryDelegate.captureMessage = (...args) => Sentry.captureMessage(...args);
sentryDelegate.captureBreadcrumb = (...args) => Sentry.addBreadcrumb(...args);
sentryDelegate.getContext = () => extras;
// Keep a record of exceptions per audit/gatherer so we can just report once
const sentryExceptionCache = new Map();
// Special case captureException to return a Promise so we don't process.exit too early
sentryDelegate.captureException = async (err, opts = {}) => {
// Ignore if there wasn't an error
if (!err) return;
// Ignore expected errors
// @ts-expect-error Non-standard property added to flag error as not needing capturing.
if (err.expected) return;
const tags = opts.tags || {};
if (tags.audit) {
const key = `audit-${tags.audit}-${err.message}`;
if (sentryExceptionCache.has(key)) return;
sentryExceptionCache.set(key, true);
}
if (tags.gatherer) {
const key = `gatherer-${tags.gatherer}-${err.message}`;
if (sentryExceptionCache.has(key)) return;
sentryExceptionCache.set(key, true);
}
// Sample known errors that occur at a high frequency.
const sampledErrorMatch = SAMPLED_ERRORS.find(sample => sample.pattern.test(err.message));
if (sampledErrorMatch && sampledErrorMatch.rate <= Math.random()) return;
// @ts-expect-error - properties added to protocol method LighthouseErrors.
if (err.protocolMethod) {
// Protocol errors all share same stack trace, so add more to fingerprint
// @ts-expect-error - properties added to protocol method LighthouseErrors.
opts.fingerprint = ['{{ default }}', err.protocolMethod, err.protocolError];
opts.tags = opts.tags || {};
// @ts-expect-error - properties added to protocol method LighthouseErrors.
opts.tags.protocolMethod = err.protocolMethod;
}
Sentry.withScope(scope => {
if (opts.level) {
// @ts-expect-error - allow any string.
scope.setLevel(opts.level);
}
if (opts.tags) {
scope.setTags(opts.tags);
}
if (opts.extra) {
scope.setExtras(opts.extra);
}
Sentry.captureException(err);
});
};
} catch (e) {
log.warn(
'sentry',
'Could not load Sentry, errors will not be reported.'
);
}
}
export const Sentry = sentryDelegate;

16
node_modules/lighthouse/core/lib/stack-packs.d.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
/**
* Returns all packs that match the stacks found in the page.
* @param {LH.Artifacts['Stacks']|undefined} pageStacks
* @return {LH.RawIcu<Array<LH.Result.StackPack>>}
*/
export function getStackPacks(pageStacks: LH.Artifacts['Stacks'] | undefined): LH.RawIcu<Array<LH.Result.StackPack>>;
/**
* Pairs consisting of a stack pack's ID and the set of stacks needed to be
* detected in a page to display that pack's advice.
* @type {Array<{packId: string, requiredStacks: Array<string>}>}
*/
export const stackPacksToInclude: Array<{
packId: string;
requiredStacks: Array<string>;
}>;
//# sourceMappingURL=stack-packs.d.ts.map

138
node_modules/lighthouse/core/lib/stack-packs.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
/**
* @license Copyright 2019 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import log from 'lighthouse-logger';
import stackPacks from 'lighthouse-stack-packs';
import * as i18n from './i18n/i18n.js';
/**
* Pairs consisting of a stack pack's ID and the set of stacks needed to be
* detected in a page to display that pack's advice.
* @type {Array<{packId: string, requiredStacks: Array<string>}>}
*/
const stackPacksToInclude = [
{
packId: 'gatsby',
requiredStacks: ['js:gatsby'],
},
{
packId: 'wordpress',
requiredStacks: ['js:wordpress'],
},
{
packId: 'wix',
requiredStacks: ['js:wix'],
},
{
packId: 'wp-rocket',
requiredStacks: ['js:wp-rocket'],
},
{
packId: 'ezoic',
requiredStacks: ['js:ezoic'],
},
{
packId: 'drupal',
requiredStacks: ['js:drupal'],
},
{
packId: 'amp',
requiredStacks: ['js:amp'],
},
{
packId: 'magento',
requiredStacks: ['js:magento'],
},
{
packId: 'octobercms',
requiredStacks: ['js:octobercms'],
},
{
packId: 'joomla',
requiredStacks: ['js:joomla'],
},
{
packId: 'next.js',
requiredStacks: ['js:next'],
},
{
packId: 'nuxt',
requiredStacks: ['js:nuxt'],
},
{
packId: 'angular',
requiredStacks: ['js:@angular/core'],
},
{
packId: 'react',
requiredStacks: ['js:react'],
},
];
/**
* Returns all packs that match the stacks found in the page.
* @param {LH.Artifacts['Stacks']|undefined} pageStacks
* @return {LH.RawIcu<Array<LH.Result.StackPack>>}
*/
function getStackPacks(pageStacks) {
if (!pageStacks) return [];
/** @type {LH.RawIcu<Array<LH.Result.StackPack>>} */
const packs = [];
for (const pageStack of pageStacks) {
const stackPackToIncl = stackPacksToInclude.find(stackPackToIncl =>
stackPackToIncl.requiredStacks.includes(`${pageStack.detector}:${pageStack.id}`));
if (!stackPackToIncl) {
continue;
}
// Grab the full pack definition.
const matchedPack = stackPacks.find(pack => pack.id === stackPackToIncl.packId);
if (!matchedPack) {
log.warn('StackPacks',
`'${stackPackToIncl.packId}' stack pack was matched but is not found in stack-packs lib`);
continue;
}
// Create i18n handler to get translated strings.
const str_ = i18n.createIcuMessageFn(
`node_modules/lighthouse-stack-packs/packs/${matchedPack.id}.js`,
matchedPack.UIStrings
);
/** @type {Record<string, LH.IcuMessage>} */
const descriptions = {};
/** @type {Record<string, string>} */
const UIStrings = matchedPack.UIStrings;
// Convert all strings into the correct translation.
for (const key in UIStrings) {
if (UIStrings[key]) {
descriptions[key] = str_(UIStrings[key]);
}
}
packs.push({
id: matchedPack.id,
title: matchedPack.title,
iconDataURL: matchedPack.icon,
descriptions,
});
}
return packs.sort((a, b) => {
const aVal = stackPacksToInclude.findIndex(p => p.packId === a.id);
const bVal = stackPacksToInclude.findIndex(p => p.packId === b.id);
return aVal - bVal;
});
}
export {
getStackPacks,
stackPacksToInclude,
};

28
node_modules/lighthouse/core/lib/statistics.d.ts generated vendored Normal file
View File

@@ -0,0 +1,28 @@
/**
* Interpolates the y value at a point x on the line defined by (x0, y0) and (x1, y1)
* @param {number} x0
* @param {number} y0
* @param {number} x1
* @param {number} y1
* @param {number} x
* @return {number}
*/
export function linearInterpolation(x0: number, y0: number, x1: number, y1: number, x: number): number;
/**
* Returns the score (1 - percentile) of `value` in a log-normal distribution
* specified by the `median` value, at which the score will be 0.5, and a 10th
* percentile value, at which the score will be 0.9. The score represents the
* amount of the distribution greater than `value`. All values should be in the
* same units (e.g. milliseconds). See
* https://www.desmos.com/calculator/o98tbeyt1t
* for an interactive view of the relationship between these parameters and the
* typical parameterization (location and shape) of the log-normal distribution.
* @param {{median: number, p10: number}} parameters
* @param {number} value
* @return {number}
*/
export function getLogNormalScore({ median, p10 }: {
median: number;
p10: number;
}, value: number): number;
//# sourceMappingURL=statistics.d.ts.map

103
node_modules/lighthouse/core/lib/statistics.js generated vendored Normal file
View File

@@ -0,0 +1,103 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
// The exact double values for the max and min scores possible in each range.
const MIN_PASSING_SCORE = 0.90000000000000002220446049250313080847263336181640625;
const MAX_AVERAGE_SCORE = 0.899999999999999911182158029987476766109466552734375;
const MIN_AVERAGE_SCORE = 0.5;
const MAX_FAILING_SCORE = 0.499999999999999944488848768742172978818416595458984375;
/**
* Approximates the Gauss error function, the probability that a random variable
* from the standard normal distribution lies within [-x, x]. Moved from
* traceviewer.b.math.erf, based on Abramowitz and Stegun, formula 7.1.26.
* @param {number} x
* @return {number}
*/
function erf(x) {
// erf(-x) = -erf(x);
const sign = Math.sign(x);
x = Math.abs(x);
const a1 = 0.254829592;
const a2 = -0.284496736;
const a3 = 1.421413741;
const a4 = -1.453152027;
const a5 = 1.061405429;
const p = 0.3275911;
const t = 1 / (1 + p * x);
const y = t * (a1 + t * (a2 + t * (a3 + t * (a4 + t * a5))));
return sign * (1 - y * Math.exp(-x * x));
}
/**
* Returns the score (1 - percentile) of `value` in a log-normal distribution
* specified by the `median` value, at which the score will be 0.5, and a 10th
* percentile value, at which the score will be 0.9. The score represents the
* amount of the distribution greater than `value`. All values should be in the
* same units (e.g. milliseconds). See
* https://www.desmos.com/calculator/o98tbeyt1t
* for an interactive view of the relationship between these parameters and the
* typical parameterization (location and shape) of the log-normal distribution.
* @param {{median: number, p10: number}} parameters
* @param {number} value
* @return {number}
*/
function getLogNormalScore({median, p10}, value) {
// Required for the log-normal distribution.
if (median <= 0) throw new Error('median must be greater than zero');
if (p10 <= 0) throw new Error('p10 must be greater than zero');
// Not strictly required, but if p10 > median, it flips around and becomes the p90 point.
if (p10 >= median) throw new Error('p10 must be less than the median');
// Non-positive values aren't in the distribution, so always 1.
if (value <= 0) return 1;
// Closest double to `erfc-1(1/5)`.
const INVERSE_ERFC_ONE_FIFTH = 0.9061938024368232;
// Shape (σ) is `|log(p10/median) / (sqrt(2)*erfc^-1(1/5))|` and
// standardizedX is `1/2 erfc(log(value/median) / (sqrt(2)*σ))`, so simplify a bit.
const xRatio = Math.max(Number.MIN_VALUE, value / median); // value and median are > 0, so is ratio.
const xLogRatio = Math.log(xRatio);
const p10Ratio = Math.max(Number.MIN_VALUE, p10 / median); // p10 and median are > 0, so is ratio.
const p10LogRatio = -Math.log(p10Ratio); // negate to keep σ positive.
const standardizedX = xLogRatio * INVERSE_ERFC_ONE_FIFTH / p10LogRatio;
const complementaryPercentile = (1 - erf(standardizedX)) / 2;
// Clamp to avoid floating-point out-of-bounds issues and keep score in expected range.
let score;
if (value <= p10) {
// Passing. Clamp to [0.9, 1].
score = Math.max(MIN_PASSING_SCORE, Math.min(1, complementaryPercentile));
} else if (value <= median) {
// Average. Clamp to [0.5, 0.9).
score = Math.max(MIN_AVERAGE_SCORE, Math.min(MAX_AVERAGE_SCORE, complementaryPercentile));
} else {
// Failing. Clamp to [0, 0.5).
score = Math.max(0, Math.min(MAX_FAILING_SCORE, complementaryPercentile));
}
return score;
}
/**
* Interpolates the y value at a point x on the line defined by (x0, y0) and (x1, y1)
* @param {number} x0
* @param {number} y0
* @param {number} x1
* @param {number} y1
* @param {number} x
* @return {number}
*/
function linearInterpolation(x0, y0, x1, y1, x) {
const slope = (y1 - y0) / (x1 - x0);
return y0 + (x - x0) * slope;
}
export {
linearInterpolation,
getLogNormalScore,
};

8
node_modules/lighthouse/core/lib/tappable-rects.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/**
* Merge client rects together and remove small ones. This may result in a larger overall
* size than that of the individual client rects.
* We use this to simulate a finger tap on those targets later on.
* @param {LH.Artifacts.Rect[]} clientRects
*/
export function getTappableRectsFromClientRects(clientRects: LH.Artifacts.Rect[]): import("../../types/lhr/audit-details.js").default.Rect[];
//# sourceMappingURL=tappable-rects.d.ts.map

98
node_modules/lighthouse/core/lib/tappable-rects.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as RectHelpers from './rect-helpers.js';
/**
* Merge client rects together and remove small ones. This may result in a larger overall
* size than that of the individual client rects.
* We use this to simulate a finger tap on those targets later on.
* @param {LH.Artifacts.Rect[]} clientRects
*/
function getTappableRectsFromClientRects(clientRects) {
// 1x1px rect shouldn't be reason to treat the rect as something the user should tap on.
// Often they're made invisble in some obscure way anyway, and only exist for e.g. accessibiliity.
clientRects = RectHelpers.filterOutTinyRects(clientRects);
clientRects = RectHelpers.filterOutRectsContainedByOthers(clientRects);
clientRects = mergeTouchingClientRects(clientRects);
return clientRects;
}
/**
* Sometimes a child will reach out of the parent by a few px, but still
* clearly belong to the same tap area in the users's eyes.
* We can be quite generous here, since merging too much tends to cause false
* passes instead of false failures (because there are more fingers)
* @param {number} a
* @param {number} b
*/
function almostEqual(a, b) {
return Math.abs(a - b) <= 10;
}
/**
* Merge touching rects based on what appears as one tappable area to the user.
* @param {LH.Artifacts.Rect[]} clientRects
* @return {LH.Artifacts.Rect[]}
*/
function mergeTouchingClientRects(clientRects) {
for (let i = 0; i < clientRects.length; i++) {
for (let j = i + 1; j < clientRects.length; j++) {
const crA = clientRects[i];
const crB = clientRects[j];
/**
* We try to determine whether the rects appear as a single tappable
* area to the user, so that they'd tap in the middle of the merged rect.
* Examples of what we want to merge:
*
* AAABBB
*
* AAA
* AAA
* BBBBB
*/
const rectsLineUpHorizontally =
almostEqual(crA.top, crB.top) || almostEqual(crA.bottom, crB.bottom);
const rectsLineUpVertically =
almostEqual(crA.left, crB.left) || almostEqual(crA.right, crB.right);
const canMerge =
RectHelpers.rectsTouchOrOverlap(crA, crB) &&
(rectsLineUpHorizontally || rectsLineUpVertically);
if (canMerge) {
const replacementClientRect = RectHelpers.getBoundingRect([crA, crB]);
const mergedRectCenter = RectHelpers.getRectCenterPoint(replacementClientRect);
if (
!(
RectHelpers.rectContainsPoint(crA, mergedRectCenter) ||
RectHelpers.rectContainsPoint(crB, mergedRectCenter)
)
) {
// Don't merge because the new shape is too different from the
// merged rects, and tapping in the middle wouldn't actually hit
// either rect
continue;
}
// Replace client rects with merged version
clientRects = clientRects.filter(cr => cr !== crA && cr !== crB);
clientRects.push(replacementClientRect);
// Start over so we don't have to handle complexity introduced by array mutation.
// Client rect ararys rarely contain more than 5 rects, so starting again doesn't cause perf issues.
return mergeTouchingClientRects(clientRects);
}
}
}
return clientRects;
}
export {
getTappableRectsFromClientRects,
};

32
node_modules/lighthouse/core/lib/third-party-web.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
declare namespace _default {
export { getEntity };
export { getProduct };
export { isThirdParty };
export { isFirstParty };
}
export default _default;
export type ThirdPartyEntity = import("third-party-web").IEntity;
export type ThirdPartyProduct = import("third-party-web").IProduct;
/** @typedef {import("third-party-web").IEntity} ThirdPartyEntity */
/** @typedef {import("third-party-web").IProduct} ThirdPartyProduct */
/**
* @param {string} url
* @return {ThirdPartyEntity|undefined}
*/
declare function getEntity(url: string): ThirdPartyEntity | undefined;
/**
* @param {string} url
* @return {ThirdPartyProduct|undefined}
*/
declare function getProduct(url: string): ThirdPartyProduct | undefined;
/**
* @param {string} url
* @param {ThirdPartyEntity | undefined} mainDocumentEntity
*/
declare function isThirdParty(url: string, mainDocumentEntity: ThirdPartyEntity | undefined): boolean;
/**
* @param {string} url
* @param {ThirdPartyEntity | undefined} mainDocumentEntity
*/
declare function isFirstParty(url: string, mainDocumentEntity: ThirdPartyEntity | undefined): boolean;
//# sourceMappingURL=third-party-web.d.ts.map

52
node_modules/lighthouse/core/lib/third-party-web.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import thirdPartyWeb from 'third-party-web/nostats-subset.js';
/** @typedef {import("third-party-web").IEntity} ThirdPartyEntity */
/** @typedef {import("third-party-web").IProduct} ThirdPartyProduct */
/**
* @param {string} url
* @return {ThirdPartyEntity|undefined}
*/
function getEntity(url) {
return thirdPartyWeb.getEntity(url);
}
/**
* @param {string} url
* @return {ThirdPartyProduct|undefined}
*/
function getProduct(url) {
return thirdPartyWeb.getProduct(url);
}
/**
* @param {string} url
* @param {ThirdPartyEntity | undefined} mainDocumentEntity
*/
function isThirdParty(url, mainDocumentEntity) {
const entity = getEntity(url);
if (!entity) return false;
if (entity === mainDocumentEntity) return false;
return true;
}
/**
* @param {string} url
* @param {ThirdPartyEntity | undefined} mainDocumentEntity
*/
function isFirstParty(url, mainDocumentEntity) {
return !isThirdParty(url, mainDocumentEntity);
}
export default {
getEntity,
getProduct,
isThirdParty,
isFirstParty,
};

View File

@@ -0,0 +1,21 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* Generates a chromium trace file from user timing measures
* `threadId` can be provided to separate a series of trace events into another thread, useful
* if timings do not share the same timeOrigin, but should both be "left-aligned".
* Adapted from https://github.com/tdresser/performance-observer-tracing
* @param {LH.Artifacts.MeasureEntry[]} entries user timing entries
* @param {number=} threadId
*/
export function generateTraceEvents(entries: LH.Artifacts.MeasureEntry[], threadId?: number | undefined): import("..").TraceEvent[];
/**
* Writes a trace file to disk
* @param {LH.Result} lhr
* @return {string}
*/
export function createTraceString(lhr: LH.Result): string;
//# sourceMappingURL=timing-trace-saver.d.ts.map

96
node_modules/lighthouse/core/lib/timing-trace-saver.js generated vendored Normal file
View File

@@ -0,0 +1,96 @@
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/**
* Generates a chromium trace file from user timing measures
* `threadId` can be provided to separate a series of trace events into another thread, useful
* if timings do not share the same timeOrigin, but should both be "left-aligned".
* Adapted from https://github.com/tdresser/performance-observer-tracing
* @param {LH.Artifacts.MeasureEntry[]} entries user timing entries
* @param {number=} threadId
*/
function generateTraceEvents(entries, threadId = 0) {
if (!Array.isArray(entries)) return [];
/** @type {LH.TraceEvent[]} */
const currentTrace = [];
entries.sort((a, b) => a.startTime - b.startTime);
entries.forEach((entry, i) => {
/** @type {LH.TraceEvent} */
const startEvt = {
// 1) Remove 'lh:' for readability
// 2) Colons in user_timing names get special handling in traceviewer we don't want. https://goo.gl/m23Vz7
// Replace with a 'Modifier letter colon' ;)
name: entry.name.replace('lh:', '').replace(/:/g, '\ua789'),
cat: 'blink.user_timing',
ts: entry.startTime * 1000,
args: {},
dur: 0,
pid: 0,
tid: threadId,
ph: 'b',
id: '0x' + (i++).toString(16),
};
const endEvt = JSON.parse(JSON.stringify(startEvt));
endEvt.ph = 'e';
endEvt.ts = startEvt.ts + (entry.duration * 1000);
currentTrace.push(startEvt);
currentTrace.push(endEvt);
});
// Add labels
/** @type {LH.TraceEvent} */
const metaEvtBase = {
pid: 0,
tid: threadId,
ts: 0,
dur: 0,
ph: 'M',
cat: '__metadata',
name: 'process_labels',
args: {labels: 'Default'},
};
currentTrace.push(Object.assign({}, metaEvtBase, {args: {labels: 'Lighthouse Timing'}}));
// Only inject TracingStartedInBrowser once
if (threadId === 0) {
currentTrace.push(Object.assign({}, metaEvtBase, {
'cat': 'disabled-by-default-devtools.timeline',
'name': 'TracingStartedInBrowser',
'ph': 'I',
'args': {'data': {
'frameTreeNodeId': 1,
'persistentIds': true,
'frames': [],
}},
}));
}
return currentTrace;
}
/**
* Writes a trace file to disk
* @param {LH.Result} lhr
* @return {string}
*/
function createTraceString(lhr) {
const gatherEntries = lhr.timing.entries.filter(entry => entry.gather);
const entries = lhr.timing.entries.filter(entry => !gatherEntries.includes(entry));
const auditEvents = generateTraceEvents(entries);
const gatherEvents = generateTraceEvents(gatherEntries, 10);
const events = [...auditEvents, ...gatherEvents];
const jsonStr = `{"traceEvents":[
${events.map(evt => JSON.stringify(evt)).join(',\n')}
]}`;
return jsonStr;
}
export {generateTraceEvents, createTraceString};

View File

@@ -0,0 +1,228 @@
export type CpuProfile = {
id: string;
pid: number;
tid: number;
startTime: number;
nodes: Required<LH.TraceCpuProfile>['nodes'];
samples: Array<number>;
timeDeltas: Array<number>;
};
export type ProfilerRange = Required<Required<LH.TraceEvent['args']>['data']>['_syntheticProfilerRange'];
export type SynthethicEvent = LH.TraceEvent & {
args: {
data: {
_syntheticProfilerRange: ProfilerRange;
};
};
};
export type SynthethicTaskNode = Omit<LH.Artifacts.TaskNode, 'event'> & {
event: SynthethicEvent;
endEvent: SynthethicEvent;
};
/**
* @fileoverview
*
* This model converts the `Profile` and `ProfileChunk` mega trace events from the `disabled-by-default-v8.cpu_profiler`
* category into B/E-style trace events that main-thread-tasks.js already knows how to parse into a task tree.
*
* The V8 CPU profiler measures where time is being spent by sampling the stack (See https://www.jetbrains.com/help/profiler/Profiling_Guidelines__Choosing_the_Right_Profiling_Mode.html
* for a generic description of the differences between tracing and sampling).
*
* A `Profile` event is a record of the stack that was being executed at different sample points in time.
* It has a structure like this:
*
* nodes: [function A, function B, function C]
* samples: [node with id 2, node with id 1, ...]
* timeDeltas: [4125μs since last sample, 121μs since last sample, ...]
*
* Note that this is subtly different from the protocol-based Crdp.Profiler.Profile type.
*
* Helpful prior art:
* @see https://cs.chromium.org/chromium/src/third_party/devtools-frontend/src/front_end/sdk/CPUProfileDataModel.js?sq=package:chromium&g=0&l=42
* @see https://github.com/v8/v8/blob/99ca333b0efba3236954b823101315aefeac51ab/tools/profile.js
* @see https://github.com/jlfwong/speedscope/blob/9ed1eb192cb7e9dac43a5f25bd101af169dc654a/src/import/chrome.ts#L200
*/
/**
* @typedef CpuProfile
* @property {string} id
* @property {number} pid
* @property {number} tid
* @property {number} startTime
* @property {Required<LH.TraceCpuProfile>['nodes']} nodes
* @property {Array<number>} samples
* @property {Array<number>} timeDeltas
*/
/** @typedef {Required<Required<LH.TraceEvent['args']>['data']>['_syntheticProfilerRange']} ProfilerRange */
/** @typedef {LH.TraceEvent & {args: {data: {_syntheticProfilerRange: ProfilerRange}}}} SynthethicEvent */
/** @typedef {Omit<LH.Artifacts.TaskNode, 'event'> & {event: SynthethicEvent, endEvent: SynthethicEvent}} SynthethicTaskNode */
export class CpuProfileModel {
/**
* @param {LH.TraceEvent | undefined} event
* @return {event is SynthethicEvent}
*/
static isSyntheticEvent(event: LH.TraceEvent | undefined): event is SynthethicEvent;
/**
* @param {LH.Artifacts.TaskNode} task
* @return {task is SynthethicTaskNode}
*/
static isSyntheticTask(task: LH.Artifacts.TaskNode): task is SynthethicTaskNode;
/**
* Finds all the tasks that started or ended (depending on `type`) within the provided time range.
* Uses a memory index to remember the place in the array the last invocation left off to avoid
* re-traversing the entire array, but note that this index might still be slightly off from the
* true start position.
*
* @param {Array<{startTime: number, endTime: number}>} knownTasks
* @param {{type: 'startTime'|'endTime', initialIndex: number, earliestPossibleTimestamp: number, latestPossibleTimestamp: number}} options
*/
static _getTasksInRange(knownTasks: Array<{
startTime: number;
endTime: number;
}>, options: {
type: 'startTime' | 'endTime';
initialIndex: number;
earliestPossibleTimestamp: number;
latestPossibleTimestamp: number;
}): {
tasks: {
startTime: number;
endTime: number;
}[];
lastIndex: number;
};
/**
* Given a particular time range and a set of known true tasks, find the correct timestamp to use
* for a transition between tasks.
*
* Because the sampling profiler only provides a *range* of start/stop function boundaries, this
* method uses knowledge of a known set of tasks to find the most accurate timestamp for a particular
* range. For example, if we know that a function ended between 800ms and 810ms, we can use the
* knowledge that a toplevel task ended at 807ms to use 807ms as the correct endtime for this function.
*
* @param {{syntheticTask: SynthethicTaskNode, eventType: 'start'|'end', allEventsAtTs: {naive: Array<SynthethicEvent>, refined: Array<SynthethicEvent>}, knownTaskStartTimeIndex: number, knownTaskEndTimeIndex: number, knownTasksByStartTime: Array<{startTime: number, endTime: number}>, knownTasksByEndTime: Array<{startTime: number, endTime: number}>}} data
* @return {{timestamp: number, lastStartTimeIndex: number, lastEndTimeIndex: number}}
*/
static _findEffectiveTimestamp(data: {
syntheticTask: SynthethicTaskNode;
eventType: 'start' | 'end';
allEventsAtTs: {
naive: Array<SynthethicEvent>;
refined: Array<SynthethicEvent>;
};
knownTaskStartTimeIndex: number;
knownTaskEndTimeIndex: number;
knownTasksByStartTime: Array<{
startTime: number;
endTime: number;
}>;
knownTasksByEndTime: Array<{
startTime: number;
endTime: number;
}>;
}): {
timestamp: number;
lastStartTimeIndex: number;
lastEndTimeIndex: number;
};
/**
* Creates B/E-style trace events from a CpuProfile object created by `collectProfileEvents()`
*
* @param {CpuProfile} profile
* @param {Array<LH.Artifacts.TaskNode>} tasks
* @return {Array<LH.TraceEvent>}
*/
static synthesizeTraceEvents(profile: CpuProfile, tasks: Array<LH.Artifacts.TaskNode>): Array<LH.TraceEvent>;
/**
* Merges the data of all the `ProfileChunk` trace events into a single CpuProfile object for consumption
* by `synthesizeTraceEvents()`.
*
* @param {Array<LH.TraceEvent>} traceEvents
* @return {Array<CpuProfile>}
*/
static collectProfileEvents(traceEvents: Array<LH.TraceEvent>): Array<CpuProfile>;
/**
* @param {CpuProfile} profile
*/
constructor(profile: CpuProfile);
_profile: CpuProfile;
_nodesById: Map<number, {
id: number;
callFrame: {
functionName: string;
url?: string | undefined;
};
parent?: number | undefined;
}>;
_activeNodeArraysById: Map<number, number[]>;
/**
* Initialization function to enable O(1) access to nodes by node ID.
* @return {Map<number, CpuProfile['nodes'][0]>}
*/
_createNodeMap(): Map<number, CpuProfile['nodes'][0]>;
/**
* Initialization function to enable O(1) access to the set of active nodes in the stack by node ID.
* @return {Map<number, Array<number>>}
*/
_createActiveNodeArrays(): Map<number, Array<number>>;
/**
* Returns all the node IDs in a stack when a specific nodeId is at the top of the stack
* (i.e. a stack's node ID and the node ID of all of its parents).
*
* @param {number} nodeId
* @return {Array<number>}
*/
_getActiveNodeIds(nodeId: number): Array<number>;
/**
* Generates the necessary B/E-style trace events for a single transition from stack A to stack B
* at the given latest timestamp (includes possible range in event.args.data).
*
* Example:
*
* latestPossibleTimestamp 1234
* previousNodeIds 1,2,3
* currentNodeIds 1,2,4
*
* yields [end 3 at ts 1234, begin 4 at ts 1234]
*
* @param {number} earliestPossibleTimestamp
* @param {number} latestPossibleTimestamp
* @param {Array<number>} previousNodeIds
* @param {Array<number>} currentNodeIds
* @return {Array<SynthethicEvent>}
*/
_synthesizeTraceEventsForTransition(earliestPossibleTimestamp: number, latestPossibleTimestamp: number, previousNodeIds: Array<number>, currentNodeIds: Array<number>): Array<SynthethicEvent>;
/**
* Creates the B/E-style trace events using only data from the profile itself. Each B/E event will
* include the actual _range_ the timestamp could have been in its metadata that is used for
* refinement later.
*
* @return {Array<SynthethicEvent>}
*/
_synthesizeNaiveTraceEvents(): Array<SynthethicEvent>;
/**
* Creates a copy of B/E-style trace events with refined timestamps using knowledge from the
* tasks that have definitive timestamps.
*
* With the sampling profiler we know that a function started/ended _sometime between_ two points,
* but not exactly when. Using the information from other tasks gives us more information to be
* more precise with timings and allows us to create a valid task tree later on.
*
* @param {Array<{startTime: number, endTime: number}>} knownTasks
* @param {Array<SynthethicTaskNode>} syntheticTasks
* @param {Array<SynthethicEvent>} syntheticEvents
* @return {Array<SynthethicEvent>}
*/
_refineTraceEventsWithTasks(knownTasks: Array<{
startTime: number;
endTime: number;
}>, syntheticTasks: Array<SynthethicTaskNode>, syntheticEvents: Array<SynthethicEvent>): Array<SynthethicEvent>;
/**
* Creates B/E-style trace events from a CpuProfile object created by `collectProfileEvents()`.
* An optional set of tasks can be passed in to refine the start/end times.
*
* @param {Array<LH.Artifacts.TaskNode>} [knownTaskNodes]
* @return {Array<LH.TraceEvent>}
*/
synthesizeTraceEvents(knownTaskNodes?: import("./main-thread-tasks.js").TaskNode[] | undefined): Array<LH.TraceEvent>;
}
//# sourceMappingURL=cpu-profile-model.d.ts.map

View File

@@ -0,0 +1,591 @@
/**
* @license Copyright 2020 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import {MainThreadTasks} from './main-thread-tasks.js';
const SAMPLER_TRACE_EVENT_NAME = 'FunctionCall-SynthesizedByProfilerModel';
/**
* @fileoverview
*
* This model converts the `Profile` and `ProfileChunk` mega trace events from the `disabled-by-default-v8.cpu_profiler`
* category into B/E-style trace events that main-thread-tasks.js already knows how to parse into a task tree.
*
* The V8 CPU profiler measures where time is being spent by sampling the stack (See https://www.jetbrains.com/help/profiler/Profiling_Guidelines__Choosing_the_Right_Profiling_Mode.html
* for a generic description of the differences between tracing and sampling).
*
* A `Profile` event is a record of the stack that was being executed at different sample points in time.
* It has a structure like this:
*
* nodes: [function A, function B, function C]
* samples: [node with id 2, node with id 1, ...]
* timeDeltas: [4125μs since last sample, 121μs since last sample, ...]
*
* Note that this is subtly different from the protocol-based Crdp.Profiler.Profile type.
*
* Helpful prior art:
* @see https://cs.chromium.org/chromium/src/third_party/devtools-frontend/src/front_end/sdk/CPUProfileDataModel.js?sq=package:chromium&g=0&l=42
* @see https://github.com/v8/v8/blob/99ca333b0efba3236954b823101315aefeac51ab/tools/profile.js
* @see https://github.com/jlfwong/speedscope/blob/9ed1eb192cb7e9dac43a5f25bd101af169dc654a/src/import/chrome.ts#L200
*/
/**
* @typedef CpuProfile
* @property {string} id
* @property {number} pid
* @property {number} tid
* @property {number} startTime
* @property {Required<LH.TraceCpuProfile>['nodes']} nodes
* @property {Array<number>} samples
* @property {Array<number>} timeDeltas
*/
/** @typedef {Required<Required<LH.TraceEvent['args']>['data']>['_syntheticProfilerRange']} ProfilerRange */
/** @typedef {LH.TraceEvent & {args: {data: {_syntheticProfilerRange: ProfilerRange}}}} SynthethicEvent */
/** @typedef {Omit<LH.Artifacts.TaskNode, 'event'> & {event: SynthethicEvent, endEvent: SynthethicEvent}} SynthethicTaskNode */
class CpuProfileModel {
/**
* @param {CpuProfile} profile
*/
constructor(profile) {
this._profile = profile;
this._nodesById = this._createNodeMap();
this._activeNodeArraysById = this._createActiveNodeArrays();
}
/**
* Initialization function to enable O(1) access to nodes by node ID.
* @return {Map<number, CpuProfile['nodes'][0]>}
*/
_createNodeMap() {
/** @type {Map<number, CpuProfile['nodes'][0]>} */
const map = new Map();
for (const node of this._profile.nodes) {
map.set(node.id, node);
}
return map;
}
/**
* Initialization function to enable O(1) access to the set of active nodes in the stack by node ID.
* @return {Map<number, Array<number>>}
*/
_createActiveNodeArrays() {
/** @type {Map<number, Array<number>>} */
const map = new Map();
/** @param {number} id @return {Array<number>} */
const getActiveNodes = id => {
if (map.has(id)) return map.get(id) || [];
const node = this._nodesById.get(id);
if (!node) throw new Error(`No such node ${id}`);
if (typeof node.parent === 'number') {
const array = getActiveNodes(node.parent).concat([id]);
map.set(id, array);
return array;
} else {
return [id];
}
};
for (const node of this._profile.nodes) {
map.set(node.id, getActiveNodes(node.id));
}
return map;
}
/**
* Returns all the node IDs in a stack when a specific nodeId is at the top of the stack
* (i.e. a stack's node ID and the node ID of all of its parents).
*
* @param {number} nodeId
* @return {Array<number>}
*/
_getActiveNodeIds(nodeId) {
const activeNodeIds = this._activeNodeArraysById.get(nodeId);
if (!activeNodeIds) throw new Error(`No such node ID ${nodeId}`);
return activeNodeIds;
}
/**
* Generates the necessary B/E-style trace events for a single transition from stack A to stack B
* at the given latest timestamp (includes possible range in event.args.data).
*
* Example:
*
* latestPossibleTimestamp 1234
* previousNodeIds 1,2,3
* currentNodeIds 1,2,4
*
* yields [end 3 at ts 1234, begin 4 at ts 1234]
*
* @param {number} earliestPossibleTimestamp
* @param {number} latestPossibleTimestamp
* @param {Array<number>} previousNodeIds
* @param {Array<number>} currentNodeIds
* @return {Array<SynthethicEvent>}
*/
_synthesizeTraceEventsForTransition(
earliestPossibleTimestamp,
latestPossibleTimestamp,
previousNodeIds,
currentNodeIds
) {
const startNodes = currentNodeIds
.filter(id => !previousNodeIds.includes(id))
.map(id => this._nodesById.get(id))
.filter(/** @return {node is CpuProfile['nodes'][0]} */ node => !!node);
const endNodes = previousNodeIds
.filter(id => !currentNodeIds.includes(id))
.map(id => this._nodesById.get(id))
.filter(/** @return {node is CpuProfile['nodes'][0]} */ node => !!node);
/** @param {CpuProfile['nodes'][0]} node @return {SynthethicEvent} */
const createSyntheticEvent = node => ({
ts: Number.isFinite(latestPossibleTimestamp)
? latestPossibleTimestamp
: earliestPossibleTimestamp,
pid: this._profile.pid,
tid: this._profile.tid,
dur: 0,
ph: 'I',
// This trace event name is Lighthouse-specific and wouldn't be found in a real trace.
// Attribution logic in main-thread-tasks.js special cases this event.
name: SAMPLER_TRACE_EVENT_NAME,
cat: 'lighthouse',
args: {
data: {
callFrame: node.callFrame,
_syntheticProfilerRange: {earliestPossibleTimestamp, latestPossibleTimestamp},
},
},
});
/** @type {Array<SynthethicEvent>} */
const startEvents = startNodes.map(createSyntheticEvent).map(evt => ({...evt, ph: 'B'}));
/** @type {Array<SynthethicEvent>} */
const endEvents = endNodes.map(createSyntheticEvent).map(evt => ({...evt, ph: 'E'}));
// Ensure we put end events in first to finish prior tasks before starting new ones.
return [...endEvents.reverse(), ...startEvents];
}
/**
* @param {LH.TraceEvent | undefined} event
* @return {event is SynthethicEvent}
*/
static isSyntheticEvent(event) {
if (!event) return false;
return Boolean(
event.name === SAMPLER_TRACE_EVENT_NAME &&
event.args.data?._syntheticProfilerRange
);
}
/**
* @param {LH.Artifacts.TaskNode} task
* @return {task is SynthethicTaskNode}
*/
static isSyntheticTask(task) {
return CpuProfileModel.isSyntheticEvent(task.event) &&
CpuProfileModel.isSyntheticEvent(task.endEvent);
}
/**
* Finds all the tasks that started or ended (depending on `type`) within the provided time range.
* Uses a memory index to remember the place in the array the last invocation left off to avoid
* re-traversing the entire array, but note that this index might still be slightly off from the
* true start position.
*
* @param {Array<{startTime: number, endTime: number}>} knownTasks
* @param {{type: 'startTime'|'endTime', initialIndex: number, earliestPossibleTimestamp: number, latestPossibleTimestamp: number}} options
*/
static _getTasksInRange(knownTasks, options) {
const {type, initialIndex, earliestPossibleTimestamp, latestPossibleTimestamp} = options;
// We may have overshot a little from last time, so back up to find the real starting index.
let startIndex = initialIndex;
while (startIndex > 0) {
const task = knownTasks[startIndex];
if (task && task[type] < earliestPossibleTimestamp) break;
startIndex--;
}
/** @type {Array<{startTime: number, endTime: number}>} */
const matchingTasks = [];
for (let i = startIndex; i < knownTasks.length; i++) {
const task = knownTasks[i];
// Task is before our range of interest, keep looping.
if (task[type] < earliestPossibleTimestamp) continue;
// Task is after our range of interest, we're done.
if (task[type] > latestPossibleTimestamp) {
return {tasks: matchingTasks, lastIndex: i};
}
// Task is in our range of interest, add it to our list.
matchingTasks.push(task);
}
// We went through all tasks before reaching the end of our range.
return {tasks: matchingTasks, lastIndex: knownTasks.length};
}
/**
* Given a particular time range and a set of known true tasks, find the correct timestamp to use
* for a transition between tasks.
*
* Because the sampling profiler only provides a *range* of start/stop function boundaries, this
* method uses knowledge of a known set of tasks to find the most accurate timestamp for a particular
* range. For example, if we know that a function ended between 800ms and 810ms, we can use the
* knowledge that a toplevel task ended at 807ms to use 807ms as the correct endtime for this function.
*
* @param {{syntheticTask: SynthethicTaskNode, eventType: 'start'|'end', allEventsAtTs: {naive: Array<SynthethicEvent>, refined: Array<SynthethicEvent>}, knownTaskStartTimeIndex: number, knownTaskEndTimeIndex: number, knownTasksByStartTime: Array<{startTime: number, endTime: number}>, knownTasksByEndTime: Array<{startTime: number, endTime: number}>}} data
* @return {{timestamp: number, lastStartTimeIndex: number, lastEndTimeIndex: number}}
*/
static _findEffectiveTimestamp(data) {
const {
eventType,
syntheticTask,
allEventsAtTs,
knownTasksByStartTime,
knownTaskStartTimeIndex,
knownTasksByEndTime,
knownTaskEndTimeIndex,
} = data;
const targetEvent = eventType === 'start' ? syntheticTask.event : syntheticTask.endEvent;
const pairEvent = eventType === 'start' ? syntheticTask.endEvent : syntheticTask.event;
const timeRange = targetEvent.args.data._syntheticProfilerRange;
const pairTimeRange = pairEvent.args.data._syntheticProfilerRange;
const {tasks: knownTasksStarting, lastIndex: lastStartTimeIndex} = this._getTasksInRange(
knownTasksByStartTime,
{
type: 'startTime',
initialIndex: knownTaskStartTimeIndex,
earliestPossibleTimestamp: timeRange.earliestPossibleTimestamp,
latestPossibleTimestamp: timeRange.latestPossibleTimestamp,
}
);
const {tasks: knownTasksEnding, lastIndex: lastEndTimeIndex} = this._getTasksInRange(
knownTasksByEndTime,
{
type: 'endTime',
initialIndex: knownTaskEndTimeIndex,
earliestPossibleTimestamp: timeRange.earliestPossibleTimestamp,
latestPossibleTimestamp: timeRange.latestPossibleTimestamp,
}
);
// First, find all the tasks that span *across* (not fully contained within) our ambiguous range.
const knownTasksStartingNotContained = knownTasksStarting
.filter(t => !knownTasksEnding.includes(t));
const knownTasksEndingNotContained = knownTasksEnding
.filter(t => !knownTasksStarting.includes(t));
// Each one of these spanning tasks can be in one of three situations:
// - Task is a parent of the sample.
// - Task is a child of the sample.
// - Task has no overlap with the sample.
// Parent tasks must satisfy...
// parentTask.startTime <= syntheticTask.startTime
// AND
// syntheticTask.endTime <= parentTask.endTime
const parentTasks =
eventType === 'start'
? knownTasksStartingNotContained.filter(
t => t.endTime >= pairTimeRange.earliestPossibleTimestamp
)
: knownTasksEndingNotContained.filter(
t => t.startTime <= pairTimeRange.latestPossibleTimestamp
);
// Child tasks must satisfy...
// syntheticTask.startTime <= childTask.startTime
// AND
// childTask.endTime <= syntheticTask.endTime
const childTasks =
eventType === 'start'
? knownTasksStartingNotContained.filter(
t => t.endTime < pairTimeRange.earliestPossibleTimestamp
)
: knownTasksEndingNotContained.filter(
t => t.startTime > pairTimeRange.latestPossibleTimestamp
);
// Unrelated tasks must satisfy...
// unrelatedTask.endTime <= syntheticTask.startTime
// OR
// syntheticTask.endTime <= unrelatedTask.startTime
const unrelatedTasks =
eventType === 'start' ? knownTasksEndingNotContained : knownTasksStartingNotContained;
// Now we narrow our allowable range using the three types of tasks and the other events
// that we've already refined.
const minimumTs = Math.max(
// Sampled event couldn't be earlier than this to begin with.
timeRange.earliestPossibleTimestamp,
// Sampled start event can't be before its parent started.
// Sampled end event can't be before its child ended.
...(eventType === 'start'
? parentTasks.map(t => t.startTime)
: childTasks.map(t => t.endTime)),
// Sampled start event can't be before unrelated tasks ended.
...(eventType === 'start' ? unrelatedTasks.map(t => t.endTime) : []),
// Sampled start event can't be before the other `E` events at its same timestamp.
...(eventType === 'start'
? allEventsAtTs.refined.filter(e => e.ph === 'E').map(e => e.ts)
: [])
);
const maximumTs = Math.min(
// Sampled event couldn't be later than this to begin with.
timeRange.latestPossibleTimestamp,
// Sampled start event can't be after its child started.
// Sampled end event can't be after its parent ended.
...(eventType === 'start'
? childTasks.map(t => t.startTime)
: parentTasks.map(t => t.endTime)),
// Sampled end event can't be after unrelated tasks started.
...(eventType === 'start' ? [] : unrelatedTasks.map(t => t.startTime)),
// Sampled end event can't be after the other `B` events at its same timestamp.
// This is _currently_ only possible in contrived scenarios due to the sorted order of processing,
// but it's a non-obvious observation and case to account for.
...(eventType === 'start'
? []
: allEventsAtTs.refined.filter(e => e.ph === 'B').map(e => e.ts))
);
// We want to maximize the size of the sampling tasks within our constraints, so we'll pick
// the _earliest_ possible time for start events and the _latest_ possible time for end events.
const effectiveTimestamp =
(eventType === 'start' && Number.isFinite(minimumTs)) || !Number.isFinite(maximumTs)
? minimumTs
: maximumTs;
return {timestamp: effectiveTimestamp, lastStartTimeIndex, lastEndTimeIndex};
}
/**
* Creates the B/E-style trace events using only data from the profile itself. Each B/E event will
* include the actual _range_ the timestamp could have been in its metadata that is used for
* refinement later.
*
* @return {Array<SynthethicEvent>}
*/
_synthesizeNaiveTraceEvents() {
const profile = this._profile;
const length = profile.samples.length;
if (profile.timeDeltas.length !== length) throw new Error(`Invalid CPU profile length`);
/** @type {Array<SynthethicEvent>} */
const events = [];
let currentProfilerTimestamp = profile.startTime;
let earliestPossibleTimestamp = -Infinity;
/** @type {Array<number>} */
let lastActiveNodeIds = [];
for (let i = 0; i < profile.samples.length; i++) {
const nodeId = profile.samples[i];
const timeDelta = Math.max(profile.timeDeltas[i], 1);
const node = this._nodesById.get(nodeId);
if (!node) throw new Error(`Missing node ${nodeId}`);
currentProfilerTimestamp += timeDelta;
const activeNodeIds = this._getActiveNodeIds(nodeId);
events.push(
...this._synthesizeTraceEventsForTransition(
earliestPossibleTimestamp,
currentProfilerTimestamp,
lastActiveNodeIds,
activeNodeIds
)
);
earliestPossibleTimestamp = currentProfilerTimestamp;
lastActiveNodeIds = activeNodeIds;
}
events.push(
...this._synthesizeTraceEventsForTransition(
currentProfilerTimestamp,
Infinity,
lastActiveNodeIds,
[]
)
);
return events;
}
/**
* Creates a copy of B/E-style trace events with refined timestamps using knowledge from the
* tasks that have definitive timestamps.
*
* With the sampling profiler we know that a function started/ended _sometime between_ two points,
* but not exactly when. Using the information from other tasks gives us more information to be
* more precise with timings and allows us to create a valid task tree later on.
*
* @param {Array<{startTime: number, endTime: number}>} knownTasks
* @param {Array<SynthethicTaskNode>} syntheticTasks
* @param {Array<SynthethicEvent>} syntheticEvents
* @return {Array<SynthethicEvent>}
*/
_refineTraceEventsWithTasks(knownTasks, syntheticTasks, syntheticEvents) {
/** @type {Array<SynthethicEvent>} */
const refinedEvents = [];
/** @type {Map<number, {naive: Array<SynthethicEvent>, refined: Array<SynthethicEvent>}>} */
const syntheticEventsByTs = new Map();
for (const event of syntheticEvents) {
const group = syntheticEventsByTs.get(event.ts) || {naive: [], refined: []};
group.naive.push(event);
syntheticEventsByTs.set(event.ts, group);
}
/** @type {Map<SynthethicEvent, SynthethicTaskNode>} */
const syntheticTasksByEvent = new Map();
for (const task of syntheticTasks) {
syntheticTasksByEvent.set(task.event, task);
syntheticTasksByEvent.set(task.endEvent, task);
}
const knownTasksByStartTime = knownTasks.slice().sort((a, b) => a.startTime - b.startTime);
const knownTasksByEndTime = knownTasks.slice().sort((a, b) => a.endTime - b.endTime);
let knownTaskStartTimeIndex = 0;
let knownTaskEndTimeIndex = 0;
for (const event of syntheticEvents) {
const syntheticTask = syntheticTasksByEvent.get(event);
if (!syntheticTask) throw new Error('Impossible - all events have a task');
const allEventsAtTs = syntheticEventsByTs.get(event.ts);
if (!allEventsAtTs) throw new Error('Impossible - we just mapped every event');
const effectiveTimestampData = CpuProfileModel._findEffectiveTimestamp({
eventType: event.ph === 'B' ? 'start' : 'end',
syntheticTask,
allEventsAtTs,
knownTaskStartTimeIndex,
knownTaskEndTimeIndex,
knownTasksByStartTime,
knownTasksByEndTime,
});
knownTaskStartTimeIndex = effectiveTimestampData.lastStartTimeIndex;
knownTaskEndTimeIndex = effectiveTimestampData.lastEndTimeIndex;
const refinedEvent = {...event, ts: effectiveTimestampData.timestamp};
refinedEvents.push(refinedEvent);
allEventsAtTs.refined.push(refinedEvent);
}
return refinedEvents;
}
/**
* Creates B/E-style trace events from a CpuProfile object created by `collectProfileEvents()`.
* An optional set of tasks can be passed in to refine the start/end times.
*
* @param {Array<LH.Artifacts.TaskNode>} [knownTaskNodes]
* @return {Array<LH.TraceEvent>}
*/
synthesizeTraceEvents(knownTaskNodes = []) {
const naiveEvents = this._synthesizeNaiveTraceEvents();
if (!naiveEvents.length) return [];
let finalEvents = naiveEvents;
if (knownTaskNodes.length) {
// If we have task information, put the times back into raw trace event ts scale.
/** @type {(baseTs: number) => (node: LH.Artifacts.TaskNode) => LH.Artifacts.TaskNode} */
const rebaseTaskTime = baseTs => node => ({
...node,
startTime: baseTs + node.startTime * 1000,
endTime: baseTs + node.endTime * 1000,
duration: node.duration * 1000,
});
// The first task node might not be time 0, so recompute the baseTs.
const baseTs = knownTaskNodes[0].event.ts - knownTaskNodes[0].startTime * 1000;
const knownTasks = knownTaskNodes.map(rebaseTaskTime(baseTs));
// We'll also create tasks for our naive events so we have the B/E pairs readily available.
const naiveProfilerTasks = MainThreadTasks.getMainThreadTasks(naiveEvents, [], Infinity)
.map(rebaseTaskTime(naiveEvents[0].ts))
.filter(CpuProfileModel.isSyntheticTask);
if (!naiveProfilerTasks.length) throw new Error('Failed to create naive profiler tasks');
finalEvents = this._refineTraceEventsWithTasks(knownTasks, naiveProfilerTasks, naiveEvents);
}
return finalEvents;
}
/**
* Creates B/E-style trace events from a CpuProfile object created by `collectProfileEvents()`
*
* @param {CpuProfile} profile
* @param {Array<LH.Artifacts.TaskNode>} tasks
* @return {Array<LH.TraceEvent>}
*/
static synthesizeTraceEvents(profile, tasks) {
const model = new CpuProfileModel(profile);
return model.synthesizeTraceEvents(tasks);
}
/**
* Merges the data of all the `ProfileChunk` trace events into a single CpuProfile object for consumption
* by `synthesizeTraceEvents()`.
*
* @param {Array<LH.TraceEvent>} traceEvents
* @return {Array<CpuProfile>}
*/
static collectProfileEvents(traceEvents) {
/** @type {Map<string, CpuProfile>} */
const profiles = new Map();
for (const event of traceEvents) {
if (event.name !== 'Profile' && event.name !== 'ProfileChunk') continue;
if (typeof event.id !== 'string') continue;
// `Profile` or `ProfileChunk` can partially define these across multiple events.
// We'll fallback to empty values and worry about validation in the `synthesizeTraceEvents` phase.
const cpuProfileArg = event.args.data?.cpuProfile || {};
const timeDeltas = event.args.data?.timeDeltas || cpuProfileArg.timeDeltas;
let profile = profiles.get(event.id);
if (event.name === 'Profile') {
profile = {
id: event.id,
pid: event.pid,
tid: event.tid,
startTime: event.args.data?.startTime || event.ts,
nodes: cpuProfileArg.nodes || [],
samples: cpuProfileArg.samples || [],
timeDeltas: timeDeltas || [],
};
} else {
if (!profile) continue;
profile.nodes.push(...(cpuProfileArg.nodes || []));
profile.samples.push(...(cpuProfileArg.samples || []));
profile.timeDeltas.push(...(timeDeltas || []));
}
profiles.set(profile.id, profile);
}
return Array.from(profiles.values());
}
}
export {CpuProfileModel};

View File

@@ -0,0 +1,162 @@
export type TaskGroup = import('./task-groups.js').TaskGroup;
export type TaskNode = {
event: LH.TraceEvent;
endEvent: LH.TraceEvent | undefined;
children: TaskNode[];
parent: TaskNode | undefined;
/**
* Indicates that the task had an endTime that was inferred rather than specified in the trace. i.e. in the source trace this task was unbounded.
*/
unbounded: boolean;
startTime: number;
endTime: number;
duration: number;
selfTime: number;
attributableURLs: string[];
group: TaskGroup;
};
export type PriorTaskData = {
timers: Map<string, TaskNode>;
xhrs: Map<string, TaskNode>;
frameURLsById: Map<string, string>;
lastTaskURLs: string[];
};
/**
* @fileoverview
*
* This artifact converts the array of raw trace events into an array of hierarchical
* tasks for easier consumption and bottom-up analysis.
*
* Events are easily produced but difficult to consume. They're a mixture of start/end markers, "complete" events, etc.
* @see https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
*
* LH's TaskNode is an artifact that fills in the gaps a trace event leaves behind.
* i.e. when did it end? which events are children/parents of this one?
*
* Each task will have its group/classification, start time, end time,
* duration, and self time computed. Each task will potentially have a parent, children, and an
* attributableURL for the script that was executing/forced this execution.
*/
/** @typedef {import('./task-groups.js').TaskGroup} TaskGroup */
/**
* @typedef TaskNode
* @prop {LH.TraceEvent} event
* @prop {LH.TraceEvent|undefined} endEvent
* @prop {TaskNode[]} children
* @prop {TaskNode|undefined} parent
* @prop {boolean} unbounded Indicates that the task had an endTime that was inferred rather than specified in the trace. i.e. in the source trace this task was unbounded.
* @prop {number} startTime
* @prop {number} endTime
* @prop {number} duration
* @prop {number} selfTime
* @prop {string[]} attributableURLs
* @prop {TaskGroup} group
*/
/** @typedef {{timers: Map<string, TaskNode>, xhrs: Map<string, TaskNode>, frameURLsById: Map<string, string>, lastTaskURLs: string[]}} PriorTaskData */
export class MainThreadTasks {
/**
* @param {LH.TraceEvent} event
* @param {LH.TraceEvent} [endEvent]
* @return {TaskNode}
*/
static _createNewTaskNode(event: LH.TraceEvent, endEvent?: LH.TraceEvent | undefined): TaskNode;
/**
*
* @param {TaskNode} currentTask
* @param {number} stopTs
* @param {PriorTaskData} priorTaskData
* @param {Array<LH.TraceEvent>} reverseEventsQueue
*/
static _assignAllTimersUntilTs(currentTask: TaskNode, stopTs: number, priorTaskData: PriorTaskData, reverseEventsQueue: Array<LH.TraceEvent>): void;
/**
* This function takes the start and end events from a thread and creates tasks from them.
* We do this by iterating through the start and end event arrays simultaneously. For each start
* event we attempt to find its end event.
*
* Because of this matching of start/end events and the need to be mutating our end events queue,
* we reverse the array to more efficiently `.pop()` them off rather than `.shift()` them off.
* While it's true the worst case runtime here is O(n^2), ~99.999% of the time the reverse loop is O(1)
* because the overwhelmingly common case is that end event for a given start event is simply the very next event in our queue.
*
* @param {LH.TraceEvent[]} taskStartEvents
* @param {LH.TraceEvent[]} taskEndEvents
* @param {number} traceEndTs
* @return {TaskNode[]}
*/
static _createTasksFromStartAndEndEvents(taskStartEvents: LH.TraceEvent[], taskEndEvents: LH.TraceEvent[], traceEndTs: number): TaskNode[];
/**
* This function iterates through the tasks to set the `.parent`/`.children` properties of tasks
* according to their implied nesting structure. If any of these relationships seem impossible based on
* the timestamps, this method will throw.
*
* @param {TaskNode[]} sortedTasks
* @param {LH.TraceEvent[]} timerInstallEvents
* @param {PriorTaskData} priorTaskData
*/
static _createTaskRelationships(sortedTasks: TaskNode[], timerInstallEvents: LH.TraceEvent[], priorTaskData: PriorTaskData): void;
/**
* This function takes the raw trace events sorted in increasing timestamp order and outputs connected task nodes.
* To create the task heirarchy we make several passes over the events.
*
* 1. Create three arrays of X/B events, E events, and TimerInstall events.
* 2. Create tasks for each X/B event, throwing if a matching E event cannot be found for a given B.
* 3. Sort the tasks by ↑ startTime, ↓ duration.
* 4. Match each task to its parent, throwing if there is any invalid overlap between tasks.
* 5. Sort the tasks once more by ↑ startTime, ↓ duration in case they changed during relationship creation.
*
* @param {LH.TraceEvent[]} mainThreadEvents
* @param {PriorTaskData} priorTaskData
* @param {number} traceEndTs
* @return {TaskNode[]}
*/
static _createTasksFromEvents(mainThreadEvents: LH.TraceEvent[], priorTaskData: PriorTaskData, traceEndTs: number): TaskNode[];
/**
* @param {TaskNode} task
* @param {TaskNode|undefined} parent
* @return {number}
*/
static _computeRecursiveSelfTime(task: TaskNode, parent: TaskNode | undefined): number;
/**
* @param {TaskNode} task
* @param {string[]} parentURLs
* @param {string[]} allURLsInTree
* @param {PriorTaskData} priorTaskData
*/
static _computeRecursiveAttributableURLs(task: TaskNode, parentURLs: string[], allURLsInTree: string[], priorTaskData: PriorTaskData): void;
/**
* @param {TaskNode} task
* @param {Array<string>} urls
*/
static _setRecursiveEmptyAttributableURLs(task: TaskNode, urls: Array<string>): void;
/**
* @param {TaskNode} task
* @param {TaskGroup} [parentGroup]
*/
static _computeRecursiveTaskGroup(task: TaskNode, parentGroup?: import("./task-groups.js").TaskGroup | undefined): void;
/**
* @param {LH.TraceEvent[]} mainThreadEvents
* @param {Array<{id: string, url: string}>} frames
* @param {number} traceEndTs
* @param {number} [traceStartTs] Optional time-0 ts for tasks. Tasks before this point will have negative start/end times. Defaults to the first task found.
* @return {TaskNode[]}
*/
static getMainThreadTasks(mainThreadEvents: LH.TraceEvent[], frames: Array<{
id: string;
url: string;
}>, traceEndTs: number, traceStartTs?: number | undefined): TaskNode[];
/**
* Prints an artistic rendering of the task tree for easier debugability.
*
* @param {TaskNode[]} tasks
* @param {{printWidth?: number, startTime?: number, endTime?: number, taskLabelFn?: (node: TaskNode) => string}} options
* @return {string}
*/
static printTaskTreeToDebugString(tasks: TaskNode[], options?: {
printWidth?: number | undefined;
startTime?: number | undefined;
endTime?: number | undefined;
taskLabelFn?: ((node: TaskNode) => string) | undefined;
}): string;
}
import * as LH from '../../../types/lh.js';
//# sourceMappingURL=main-thread-tasks.d.ts.map

View File

@@ -0,0 +1,688 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
import * as LH from '../../../types/lh.js';
import {taskGroups, taskNameToGroup} from './task-groups.js';
/**
* @fileoverview
*
* This artifact converts the array of raw trace events into an array of hierarchical
* tasks for easier consumption and bottom-up analysis.
*
* Events are easily produced but difficult to consume. They're a mixture of start/end markers, "complete" events, etc.
* @see https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
*
* LH's TaskNode is an artifact that fills in the gaps a trace event leaves behind.
* i.e. when did it end? which events are children/parents of this one?
*
* Each task will have its group/classification, start time, end time,
* duration, and self time computed. Each task will potentially have a parent, children, and an
* attributableURL for the script that was executing/forced this execution.
*/
/** @typedef {import('./task-groups.js').TaskGroup} TaskGroup */
/**
* @typedef TaskNode
* @prop {LH.TraceEvent} event
* @prop {LH.TraceEvent|undefined} endEvent
* @prop {TaskNode[]} children
* @prop {TaskNode|undefined} parent
* @prop {boolean} unbounded Indicates that the task had an endTime that was inferred rather than specified in the trace. i.e. in the source trace this task was unbounded.
* @prop {number} startTime
* @prop {number} endTime
* @prop {number} duration
* @prop {number} selfTime
* @prop {string[]} attributableURLs
* @prop {TaskGroup} group
*/
/** @typedef {{timers: Map<string, TaskNode>, xhrs: Map<string, TaskNode>, frameURLsById: Map<string, string>, lastTaskURLs: string[]}} PriorTaskData */
class MainThreadTasks {
/**
* @param {LH.TraceEvent} event
* @param {LH.TraceEvent} [endEvent]
* @return {TaskNode}
*/
static _createNewTaskNode(event, endEvent) {
const isCompleteEvent = event.ph === 'X' && !endEvent;
const isStartEndEventPair = event.ph === 'B' && endEvent && endEvent.ph === 'E';
if (!isCompleteEvent && !isStartEndEventPair) {
throw new Error('Invalid parameters for _createNewTaskNode');
}
const startTime = event.ts;
const endTime = endEvent ? endEvent.ts : event.ts + Number(event.dur || 0);
const newTask = {
event,
endEvent,
startTime,
endTime,
duration: endTime - startTime,
// These properties will be filled in later
unbounded: false,
parent: undefined,
children: [],
attributableURLs: [],
group: taskGroups.other,
selfTime: NaN,
};
return newTask;
}
/**
*
* @param {TaskNode} currentTask
* @param {number} stopTs
* @param {PriorTaskData} priorTaskData
* @param {Array<LH.TraceEvent>} reverseEventsQueue
*/
static _assignAllTimersUntilTs(
currentTask,
stopTs,
priorTaskData,
reverseEventsQueue
) {
while (reverseEventsQueue.length) {
const nextTimerInstallEvent = reverseEventsQueue.pop();
// We're out of events to look at; we're done.
if (!nextTimerInstallEvent) break;
// Timer event is after our current task; push it back on for next time, and we're done.
if (nextTimerInstallEvent.ts > stopTs) {
reverseEventsQueue.push(nextTimerInstallEvent);
break;
}
// Timer event is before the current task, just skip it.
if (nextTimerInstallEvent.ts < currentTask.startTime) {
continue;
}
// We're right where we need to be, point the timerId to our `currentTask`
/** @type {string} */
// @ts-expect-error - timerId exists on `TimerInstall` events.
const timerId = nextTimerInstallEvent.args.data.timerId;
priorTaskData.timers.set(timerId, currentTask);
}
}
/**
* This function takes the start and end events from a thread and creates tasks from them.
* We do this by iterating through the start and end event arrays simultaneously. For each start
* event we attempt to find its end event.
*
* Because of this matching of start/end events and the need to be mutating our end events queue,
* we reverse the array to more efficiently `.pop()` them off rather than `.shift()` them off.
* While it's true the worst case runtime here is O(n^2), ~99.999% of the time the reverse loop is O(1)
* because the overwhelmingly common case is that end event for a given start event is simply the very next event in our queue.
*
* @param {LH.TraceEvent[]} taskStartEvents
* @param {LH.TraceEvent[]} taskEndEvents
* @param {number} traceEndTs
* @return {TaskNode[]}
*/
static _createTasksFromStartAndEndEvents(taskStartEvents, taskEndEvents, traceEndTs) {
/** @type {TaskNode[]} */
const tasks = [];
// Create a reversed copy of the array to avoid copying the rest of the queue on every mutation.
// i.e. pop() is O(1) while shift() is O(n), we take the earliest ts element off the queue *a lot*
// so we'll optimize for having the earliest timestamp events at the end of the array.
const taskEndEventsReverseQueue = taskEndEvents.slice().reverse();
for (let i = 0; i < taskStartEvents.length; i++) {
const taskStartEvent = taskStartEvents[i];
if (taskStartEvent.ph === 'X') {
// Task is a complete X event, we have all the information we need already.
tasks.push(MainThreadTasks._createNewTaskNode(taskStartEvent));
continue;
}
// Task is a B/E event pair, we need to find the matching E event.
let matchedEventIndex = -1;
let matchingNestedEventCount = 0;
let matchingNestedEventIndex = i + 1;
// We loop through the reversed end events queue from back to front because we still want to
// see end events in increasing timestamp order.
// While worst case we will loop through all events, the overwhelmingly common case is that
// the immediate next event is our event of interest which makes this loop typically O(1).
for (let j = taskEndEventsReverseQueue.length - 1; j >= 0; j--) {
const endEvent = taskEndEventsReverseQueue[j];
// We are considering an end event, so we'll count how many nested events we saw along the way.
for (; matchingNestedEventIndex < taskStartEvents.length; matchingNestedEventIndex++) {
if (taskStartEvents[matchingNestedEventIndex].ts >= endEvent.ts) break;
if (taskStartEvents[matchingNestedEventIndex].name === taskStartEvent.name) {
matchingNestedEventCount++;
}
}
// The event doesn't have a matching name, skip it.
if (endEvent.name !== taskStartEvent.name) continue;
// The event has a timestamp that is too early, skip it.
if (endEvent.ts < taskStartEvent.ts) continue;
// The event matches our name and happened after start, the last thing to check is if it was for a nested event.
if (matchingNestedEventCount > 0) {
// If it was for a nested event, decrement our counter and move on.
matchingNestedEventCount--;
continue;
}
// If it wasn't, we found our matching E event! Mark the index and stop the loop.
matchedEventIndex = j;
break;
}
/** @type {LH.TraceEvent} */
let taskEndEvent;
let unbounded = false;
if (matchedEventIndex === -1) {
// If we couldn't find an end event, we'll assume it's the end of the trace.
// If this creates invalid parent/child relationships it will be caught in the next step.
taskEndEvent = {...taskStartEvent, ph: 'E', ts: traceEndTs};
unbounded = true;
} else if (matchedEventIndex === taskEndEventsReverseQueue.length - 1) {
// Use .pop() in the common case where the immediately next event is needed.
// It's ~25x faster, https://jsperf.com/pop-vs-splice.
taskEndEvent = /** @type {LH.TraceEvent} */ (taskEndEventsReverseQueue.pop());
} else {
taskEndEvent = taskEndEventsReverseQueue.splice(matchedEventIndex, 1)[0];
}
const task = MainThreadTasks._createNewTaskNode(taskStartEvent, taskEndEvent);
task.unbounded = unbounded;
tasks.push(task);
}
if (taskEndEventsReverseQueue.length) {
throw new Error(
`Fatal trace logic error - ${taskEndEventsReverseQueue.length} unmatched end events`
);
}
return tasks;
}
/**
* This function iterates through the tasks to set the `.parent`/`.children` properties of tasks
* according to their implied nesting structure. If any of these relationships seem impossible based on
* the timestamps, this method will throw.
*
* @param {TaskNode[]} sortedTasks
* @param {LH.TraceEvent[]} timerInstallEvents
* @param {PriorTaskData} priorTaskData
*/
static _createTaskRelationships(sortedTasks, timerInstallEvents, priorTaskData) {
/** @type {TaskNode|undefined} */
let currentTask;
// Create a reversed copy of the array to avoid copying the rest of the queue on every mutation.
const timerInstallEventsReverseQueue = timerInstallEvents.slice().reverse();
for (let i = 0; i < sortedTasks.length; i++) {
let nextTask = sortedTasks[i];
// Do bookkeeping on XHR requester data.
if (nextTask.event.name === 'XHRReadyStateChange') {
const data = nextTask.event.args.data;
const url = data?.url;
if (data && url && data.readyState === 1) priorTaskData.xhrs.set(url, nextTask);
}
// This inner loop updates what our `currentTask` is at `nextTask.startTime - ε`.
// While `nextTask` starts after our `currentTask`, close out the task, popup to the parent, and repeat.
// If at the end `currentTask` is undefined, then `nextTask` is a toplevel task.
// Otherwise, `nextTask` is a child of `currentTask`.
while (
currentTask &&
Number.isFinite(currentTask.endTime) &&
currentTask.endTime <= nextTask.startTime
) {
MainThreadTasks._assignAllTimersUntilTs(
currentTask,
currentTask.endTime,
priorTaskData,
timerInstallEventsReverseQueue
);
currentTask = currentTask.parent;
}
// If there's a `currentTask`, `nextTask` must be a child.
// Set the `.parent`/`.children` relationships and timer bookkeeping accordingly.
if (currentTask) {
if (nextTask.endTime > currentTask.endTime) {
const timeDelta = nextTask.endTime - currentTask.endTime;
// The child task is taking longer than the parent task, which should be impossible.
// In reality these situations happen, so we allow for some flexibility in trace event times.
if (timeDelta < 1000) {
// It's less than 1ms, we'll let it slide by increasing the duration of the parent.
currentTask.endTime = nextTask.endTime;
currentTask.duration += timeDelta;
} else if (nextTask.unbounded) {
// It's ending at traceEndTs, it means we were missing the end event. We'll truncate it to the parent.
nextTask.endTime = currentTask.endTime;
nextTask.duration = nextTask.endTime - nextTask.startTime;
} else if (
nextTask.startTime - currentTask.startTime < 1000 &&
!currentTask.children.length
) {
// The true parent started less than 1ms before the true child, so we're looking at the relationship backwards.
// We'll let it slide and fix the situation by swapping the two tasks into their correct positions
// and increasing the duration of the parent.
// Below is an artistic rendition of the heirarchy we are trying to create.
// ████████████currentTask.parent██████████████████
// █████████nextTask██████████████
// ███████currentTask███████
const actualParentTask = nextTask;
const actualChildTask = currentTask;
// We'll grab the grandparent task to see if we need to fix it.
// We'll reassign it to be the parent of `actualParentTask` in a bit.
const grandparentTask = currentTask.parent;
if (grandparentTask) {
const lastGrandparentChildIndex = grandparentTask.children.length - 1;
if (grandparentTask.children[lastGrandparentChildIndex] !== actualChildTask) {
// The child we need to swap should always be the most recently added child.
// But if not then there's a serious bug in this code, so double-check.
throw new Error('Fatal trace logic error - impossible children');
}
grandparentTask.children.pop();
grandparentTask.children.push(actualParentTask);
}
actualParentTask.parent = grandparentTask;
actualParentTask.startTime = actualChildTask.startTime;
actualParentTask.duration = actualParentTask.endTime - actualParentTask.startTime;
currentTask = actualParentTask;
nextTask = actualChildTask;
} else {
// None of our workarounds matched. It's time to throw an error.
// When we fall into this error, it's usually because of one of two reasons.
// - There was slop in the opposite direction (child started 1ms before parent),
// the child was assumed to be parent instead, and another task already started.
// - The child timestamp ended more than 1ms after the parent.
// Two unrelated tasks where the first hangs over the second by >1ms is also this case.
// These have more complicated fixes, so handling separately https://github.com/GoogleChrome/lighthouse/pull/9491#discussion_r327331204.
/** @type {any} */
const error = new Error('Fatal trace logic error - child cannot end after parent');
error.timeDelta = timeDelta;
error.nextTaskEvent = nextTask.event;
error.nextTaskEndEvent = nextTask.endEvent;
error.nextTaskEndTime = nextTask.endTime;
error.currentTaskEvent = currentTask.event;
error.currentTaskEndEvent = currentTask.endEvent;
error.currentTaskEndTime = currentTask.endTime;
throw error;
}
}
nextTask.parent = currentTask;
currentTask.children.push(nextTask);
MainThreadTasks._assignAllTimersUntilTs(
currentTask,
nextTask.startTime,
priorTaskData,
timerInstallEventsReverseQueue
);
}
currentTask = nextTask;
}
if (currentTask) {
MainThreadTasks._assignAllTimersUntilTs(
currentTask,
currentTask.endTime,
priorTaskData,
timerInstallEventsReverseQueue
);
}
}
/**
* This function takes the raw trace events sorted in increasing timestamp order and outputs connected task nodes.
* To create the task heirarchy we make several passes over the events.
*
* 1. Create three arrays of X/B events, E events, and TimerInstall events.
* 2. Create tasks for each X/B event, throwing if a matching E event cannot be found for a given B.
* 3. Sort the tasks by ↑ startTime, ↓ duration.
* 4. Match each task to its parent, throwing if there is any invalid overlap between tasks.
* 5. Sort the tasks once more by ↑ startTime, ↓ duration in case they changed during relationship creation.
*
* @param {LH.TraceEvent[]} mainThreadEvents
* @param {PriorTaskData} priorTaskData
* @param {number} traceEndTs
* @return {TaskNode[]}
*/
static _createTasksFromEvents(mainThreadEvents, priorTaskData, traceEndTs) {
/** @type {Array<LH.TraceEvent>} */
const taskStartEvents = [];
/** @type {Array<LH.TraceEvent>} */
const taskEndEvents = [];
/** @type {Array<LH.TraceEvent>} */
const timerInstallEvents = [];
// Phase 1 - Create three arrays of X/B events, E events, and TimerInstall events.
for (const event of mainThreadEvents) {
if (event.ph === 'X' || event.ph === 'B') taskStartEvents.push(event);
if (event.ph === 'E') taskEndEvents.push(event);
if (event.name === 'TimerInstall') timerInstallEvents.push(event);
}
// Phase 2 - Create tasks for each taskStartEvent.
const tasks = MainThreadTasks._createTasksFromStartAndEndEvents(
taskStartEvents,
taskEndEvents,
traceEndTs
);
// Phase 3 - Sort the tasks by increasing startTime, decreasing duration.
const sortedTasks = tasks.sort(
(taskA, taskB) => taskA.startTime - taskB.startTime || taskB.duration - taskA.duration
);
// Phase 4 - Match each task to its parent.
MainThreadTasks._createTaskRelationships(sortedTasks, timerInstallEvents, priorTaskData);
// Phase 5 - Sort once more in case the order changed after wiring up relationships.
return sortedTasks.sort(
(taskA, taskB) => taskA.startTime - taskB.startTime || taskB.duration - taskA.duration
);
}
/**
* @param {TaskNode} task
* @param {TaskNode|undefined} parent
* @return {number}
*/
static _computeRecursiveSelfTime(task, parent) {
if (parent && task.endTime > parent.endTime) {
throw new Error('Fatal trace logic error - child cannot end after parent');
}
const childTime = task.children
.map(child => MainThreadTasks._computeRecursiveSelfTime(child, task))
.reduce((sum, child) => sum + child, 0);
task.selfTime = task.duration - childTime;
return task.duration;
}
/**
* @param {TaskNode} task
* @param {string[]} parentURLs
* @param {string[]} allURLsInTree
* @param {PriorTaskData} priorTaskData
*/
static _computeRecursiveAttributableURLs(task, parentURLs, allURLsInTree, priorTaskData) {
const args = task.event.args;
const argsData = {...(args.beginData || {}), ...(args.data || {})};
const frame = argsData.frame || '';
let frameURL = priorTaskData.frameURLsById.get(frame);
const stackFrameURLs = (argsData.stackTrace || []).map(entry => entry.url);
// If the frame was an `about:blank` style ad frame, the first real URL will be more relevant to the frame's URL.
const potentialFrameURL = stackFrameURLs[0];
if (frame && frameURL && frameURL.startsWith('about:') && potentialFrameURL) {
priorTaskData.frameURLsById.set(frame, potentialFrameURL);
frameURL = potentialFrameURL;
}
/** @type {Array<string|undefined>} */
let taskURLs = [];
switch (task.event.name) {
/**
* Some trace events reference a specific script URL that triggered them.
* Use this URL as the higher precedence attributable URL.
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/timeline/TimelineUIUtils.js?type=cs&q=_initEventStyles+-f:out+f:devtools&sq=package:chromium&g=0&l=678-744
*/
case 'v8.compile':
case 'EvaluateScript':
case 'FunctionCall':
taskURLs = [argsData.url, frameURL];
break;
case 'v8.compileModule':
taskURLs = [task.event.args.fileName];
break;
case 'TimerFire': {
/** @type {string} */
// @ts-expect-error - timerId exists when name is TimerFire
const timerId = task.event.args.data.timerId;
const timerInstallerTaskNode = priorTaskData.timers.get(timerId);
if (!timerInstallerTaskNode) break;
taskURLs = timerInstallerTaskNode.attributableURLs;
break;
}
case 'ParseHTML':
taskURLs = [argsData.url, frameURL];
break;
case 'ParseAuthorStyleSheet':
taskURLs = [argsData.styleSheetUrl, frameURL];
break;
case 'UpdateLayoutTree':
case 'Layout':
case 'Paint':
// If we had a specific frame we were updating, just attribute it to that frame.
if (frameURL) {
taskURLs = [frameURL];
break;
}
// Otherwise, sometimes Chrome will split layout into separate toplevel task after things have settled.
// In this case we want to attribute the work to the prior task.
// Inherit from previous task only if we don't have any task data set already.
if (allURLsInTree.length) break;
taskURLs = priorTaskData.lastTaskURLs;
break;
case 'XHRReadyStateChange':
case 'XHRLoad': {
// Inherit from task that issued the XHR
const xhrUrl = argsData.url;
const readyState = argsData.readyState;
if (!xhrUrl || (typeof readyState === 'number' && readyState !== 4)) break;
const xhrRequesterTaskNode = priorTaskData.xhrs.get(xhrUrl);
if (!xhrRequesterTaskNode) break;
taskURLs = xhrRequesterTaskNode.attributableURLs;
break;
}
default:
taskURLs = [];
break;
}
/** @type {string[]} */
const attributableURLs = Array.from(parentURLs);
for (const url of [...taskURLs, ...stackFrameURLs]) {
// Don't add empty URLs
if (!url) continue;
// Add unique URLs to our overall tree.
if (!allURLsInTree.includes(url)) allURLsInTree.push(url);
// Don't add consecutive, duplicate URLs
if (attributableURLs[attributableURLs.length - 1] === url) continue;
attributableURLs.push(url);
}
task.attributableURLs = attributableURLs;
task.children.forEach(child =>
MainThreadTasks._computeRecursiveAttributableURLs(
child,
attributableURLs,
allURLsInTree,
priorTaskData
)
);
// After we've traversed the entire tree, set all the empty URLs to the set that we found in the task.
// This attributes the overhead of browser task management to the scripts that created the work rather than
// have it fall into the blackhole of "Other".
if (!attributableURLs.length && !task.parent && allURLsInTree.length) {
MainThreadTasks._setRecursiveEmptyAttributableURLs(task, allURLsInTree);
}
}
/**
* @param {TaskNode} task
* @param {Array<string>} urls
*/
static _setRecursiveEmptyAttributableURLs(task, urls) {
// If this task had any attributableURLs, its children will too, so we can stop here.
if (task.attributableURLs.length) return;
task.attributableURLs = urls.slice();
task.children.forEach(child =>
MainThreadTasks._setRecursiveEmptyAttributableURLs(
child,
urls
)
);
}
/**
* @param {TaskNode} task
* @param {TaskGroup} [parentGroup]
*/
static _computeRecursiveTaskGroup(task, parentGroup) {
const group = taskNameToGroup[task.event.name];
task.group = group || parentGroup || taskGroups.other;
task.children.forEach(child => MainThreadTasks._computeRecursiveTaskGroup(child, task.group));
}
/**
* @param {LH.TraceEvent[]} mainThreadEvents
* @param {Array<{id: string, url: string}>} frames
* @param {number} traceEndTs
* @param {number} [traceStartTs] Optional time-0 ts for tasks. Tasks before this point will have negative start/end times. Defaults to the first task found.
* @return {TaskNode[]}
*/
static getMainThreadTasks(mainThreadEvents, frames, traceEndTs, traceStartTs) {
const timers = new Map();
const xhrs = new Map();
const frameURLsById = new Map();
frames.forEach(({id, url}) => frameURLsById.set(id, url));
/** @type {Array<string>} */
const lastTaskURLs = [];
const priorTaskData = {timers, xhrs, frameURLsById, lastTaskURLs};
const tasks = MainThreadTasks._createTasksFromEvents(
mainThreadEvents,
priorTaskData,
traceEndTs
);
// Compute the recursive properties we couldn't compute earlier, starting at the toplevel tasks
for (const task of tasks) {
if (task.parent) continue;
MainThreadTasks._computeRecursiveSelfTime(task, undefined);
MainThreadTasks._computeRecursiveAttributableURLs(task, [], [], priorTaskData);
MainThreadTasks._computeRecursiveTaskGroup(task);
priorTaskData.lastTaskURLs = task.attributableURLs;
}
// Rebase all the times to be relative to start of trace and covert to ms.
const firstTs = traceStartTs ?? tasks[0].startTime;
for (const task of tasks) {
task.startTime = (task.startTime - firstTs) / 1000;
task.endTime = (task.endTime - firstTs) / 1000;
task.duration /= 1000;
task.selfTime /= 1000;
// Check that we have selfTime which captures all other timing data.
if (!Number.isFinite(task.selfTime)) {
throw new Error('Invalid task timing data');
}
}
return tasks;
}
/**
* Prints an artistic rendering of the task tree for easier debugability.
*
* @param {TaskNode[]} tasks
* @param {{printWidth?: number, startTime?: number, endTime?: number, taskLabelFn?: (node: TaskNode) => string}} options
* @return {string}
*/
static printTaskTreeToDebugString(tasks, options = {}) {
const traceEndMs = Math.max(...tasks.map(t => t.endTime), 0);
const {
printWidth = 100,
startTime = 0,
endTime = traceEndMs,
taskLabelFn = node => node.event.name,
} = options;
/** @param {TaskNode} task */
function computeTaskDepth(task) {
let depth = 0;
for (; task.parent; task = task.parent) depth++;
return depth;
}
const traceRange = endTime - startTime;
const characterInMs = traceRange / printWidth;
/** @type {Map<TaskNode, {id: string, task: TaskNode}>} */
const taskLegend = new Map();
/** @type {Map<number, TaskNode[]>} */
const tasksByDepth = new Map();
for (const task of tasks) {
if (task.startTime > endTime || task.endTime < startTime) continue;
const depth = computeTaskDepth(task);
const tasksAtDepth = tasksByDepth.get(depth) || [];
tasksAtDepth.push(task);
tasksByDepth.set(depth, tasksAtDepth);
// Create a user-friendly ID for new tasks using a capital letter.
// 65 is the ASCII code for 'A' and there are 26 letters in the english alphabet.
const id = String.fromCharCode(65 + (taskLegend.size % 26));
taskLegend.set(task, {id, task});
}
const debugStringLines = [
`Trace Duration: ${traceEndMs.toFixed(0)}ms`,
`Range: [${startTime}, ${endTime}]`,
`█ = ${characterInMs.toFixed(2)}ms`,
'',
];
const increasingDepth = Array.from(tasksByDepth.entries()).sort((a, b) => a[0] - b[0]);
for (const [, tasks] of increasingDepth) {
const taskRow = Array.from({length: printWidth}).map(() => ' ');
for (const task of tasks) {
const taskStart = Math.max(task.startTime, startTime);
const taskEnd = Math.min(task.endTime, endTime);
const {id} = taskLegend.get(task) || {id: '?'};
const startIndex = Math.floor(taskStart / characterInMs);
const endIndex = Math.floor(taskEnd / characterInMs);
const idIndex = Math.floor((startIndex + endIndex) / 2);
for (let i = startIndex; i <= endIndex; i++) taskRow[i] = '█';
for (let i = 0; i < id.length; i++) taskRow[idIndex] = id;
}
debugStringLines.push(taskRow.join(''));
}
debugStringLines.push('');
for (const {id, task} of taskLegend.values()) {
debugStringLines.push(`${id} = ${taskLabelFn(task)}`);
}
return debugStringLines.join('\n');
}
}
export {MainThreadTasks};

View File

@@ -0,0 +1,66 @@
export type TaskGroupIds = 'parseHTML' | 'styleLayout' | 'paintCompositeRender' | 'scriptParseCompile' | 'scriptEvaluation' | 'garbageCollection' | 'other';
export type TaskGroup = {
id: TaskGroupIds;
label: string;
traceEventNames: string[];
};
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {'parseHTML'|'styleLayout'|'paintCompositeRender'|'scriptParseCompile'|'scriptEvaluation'|'garbageCollection'|'other'} TaskGroupIds */
/**
* @typedef TaskGroup
* @property {TaskGroupIds} id
* @property {string} label
* @property {string[]} traceEventNames
*/
/**
* Make sure the traceEventNames keep up with the ones in DevTools
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/timeline_model/TimelineModel.js?type=cs&q=TimelineModel.TimelineModel.RecordType+%3D&g=0&l=1156
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/timeline/TimelineUIUtils.js?type=cs&q=_initEventStyles+-f:out+f:devtools&sq=package:chromium&g=0&l=39
* @type {{[P in TaskGroupIds]: {id: P, label: string, traceEventNames: Array<string>}}}
*/
export const taskGroups: {
other: {
id: "other";
label: string;
traceEventNames: Array<string>;
};
parseHTML: {
id: "parseHTML";
label: string;
traceEventNames: Array<string>;
};
styleLayout: {
id: "styleLayout";
label: string;
traceEventNames: Array<string>;
};
paintCompositeRender: {
id: "paintCompositeRender";
label: string;
traceEventNames: Array<string>;
};
scriptParseCompile: {
id: "scriptParseCompile";
label: string;
traceEventNames: Array<string>;
};
scriptEvaluation: {
id: "scriptEvaluation";
label: string;
traceEventNames: Array<string>;
};
garbageCollection: {
id: "garbageCollection";
label: string;
traceEventNames: Array<string>;
};
};
/** @type {Object<string, TaskGroup>} */
export const taskNameToGroup: {
[x: string]: TaskGroup;
};
//# sourceMappingURL=task-groups.d.ts.map

View File

@@ -0,0 +1,111 @@
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/** @typedef {'parseHTML'|'styleLayout'|'paintCompositeRender'|'scriptParseCompile'|'scriptEvaluation'|'garbageCollection'|'other'} TaskGroupIds */
/**
* @typedef TaskGroup
* @property {TaskGroupIds} id
* @property {string} label
* @property {string[]} traceEventNames
*/
/**
* Make sure the traceEventNames keep up with the ones in DevTools
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/timeline_model/TimelineModel.js?type=cs&q=TimelineModel.TimelineModel.RecordType+%3D&g=0&l=1156
* @see https://cs.chromium.org/chromium/src/third_party/blink/renderer/devtools/front_end/timeline/TimelineUIUtils.js?type=cs&q=_initEventStyles+-f:out+f:devtools&sq=package:chromium&g=0&l=39
* @type {{[P in TaskGroupIds]: {id: P, label: string, traceEventNames: Array<string>}}}
*/
const taskGroups = {
parseHTML: {
id: 'parseHTML',
label: 'Parse HTML & CSS',
traceEventNames: ['ParseHTML', 'ParseAuthorStyleSheet'],
},
styleLayout: {
id: 'styleLayout',
label: 'Style & Layout',
traceEventNames: [
'ScheduleStyleRecalculation',
'UpdateLayoutTree', // previously RecalculateStyles
'InvalidateLayout',
'Layout',
],
},
paintCompositeRender: {
id: 'paintCompositeRender',
label: 'Rendering',
traceEventNames: [
'Animation',
'HitTest',
'PaintSetup',
'Paint',
'PaintImage',
'RasterTask', // Previously Rasterize
'ScrollLayer',
'UpdateLayer',
'UpdateLayerTree',
'CompositeLayers',
'PrePaint', // New name for UpdateLayerTree: https://crrev.com/c/3519012
],
},
scriptParseCompile: {
id: 'scriptParseCompile',
label: 'Script Parsing & Compilation',
traceEventNames: ['v8.compile', 'v8.compileModule', 'v8.parseOnBackground'],
},
scriptEvaluation: {
id: 'scriptEvaluation',
label: 'Script Evaluation',
traceEventNames: [
'EventDispatch',
'EvaluateScript',
'v8.evaluateModule',
'FunctionCall',
'TimerFire',
'FireIdleCallback',
'FireAnimationFrame',
'RunMicrotasks',
'V8.Execute',
],
},
garbageCollection: {
id: 'garbageCollection',
label: 'Garbage Collection',
traceEventNames: [
'MinorGC', // Previously GCEvent
'MajorGC',
'BlinkGC.AtomicPhase', // Previously ThreadState::performIdleLazySweep, ThreadState::completeSweep, BlinkGCMarking
// Kept for compatibility on older traces
'ThreadState::performIdleLazySweep',
'ThreadState::completeSweep',
'BlinkGCMarking',
],
},
other: {
id: 'other',
label: 'Other',
traceEventNames: [
'MessageLoop::RunTask',
'TaskQueueManager::ProcessTaskFromWorkQueue',
'ThreadControllerImpl::DoWork',
],
},
};
/** @type {Object<string, TaskGroup>} */
const taskNameToGroup = {};
for (const group of Object.values(taskGroups)) {
for (const traceEventName of group.traceEventNames) {
taskNameToGroup[traceEventName] = group;
}
}
export {
taskGroups,
taskNameToGroup,
};

Some files were not shown because too many files have changed in this diff Show More