Skip to content

Commit 08b6cc4

Browse files
authored
Merge pull request #170 from Polymer/performance
Support measurements from the browser performance timeline API
2 parents ff284b0 + f788b36 commit 08b6cc4

14 files changed

+361
-97
lines changed

CHANGELOG.md

+15
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,21 @@ project adheres to [Semantic Versioning](http://semver.org/).
77

88
## Unreleased
99

10+
- Add ability to pull measurements from the browser performance measurement API,
11+
e.g.:
12+
13+
```
14+
"benchmarks": [
15+
{
16+
"measurement": {
17+
"performanceEntry": {
18+
"name": "foo"
19+
}
20+
}
21+
}
22+
]
23+
```
24+
1025
- Fix `main` entry in package.json to point to `lib/cli.js`.
1126

1227
- Added more fields to JSON output file to more closely match table printed to the console

README.md

+43-2
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,49 @@ confidence in them.
8383

8484
## Measurement modes
8585

86-
Tachometer currently supports three kinds of time interval measurements,
87-
controlled with the `--measure` flag.
86+
Tachometer supports four kinds of time interval measurements, controlled with
87+
the `measurement` config file property, or the `--measure` flag.
88+
89+
#### Performance API
90+
91+
Retrieve a measure, mark, or paint timing from the
92+
[`performance.getEntriesByName`](https://developer.mozilla.org/en-US/docs/Web/API/Performance/getEntriesByName)
93+
API. Note this mode can only be used with a config file.
94+
95+
For example, in your benchmark:
96+
97+
```javascript
98+
performance.mark('foo-start');
99+
// Do some work ...
100+
performance.mark('foo-stop');
101+
performance.measure('foo', 'foo-start', 'foo-stop');
102+
```
103+
104+
And in your config file:
105+
106+
```json
107+
"benchmarks": [
108+
{
109+
"measurement": {
110+
"performanceEntry": {
111+
"name": "foo"
112+
}
113+
}
114+
}
115+
]
116+
```
117+
118+
The following performance entry types are supported:
119+
120+
- [`measure`](https://developer.mozilla.org/en-US/docs/Web/API/PerformanceMeasure):
121+
Retrieve the `duration` of a user-defined interval between two marks. Use for
122+
measuring the timing of a specific chunk of your code.
123+
- [`mark`](https://developer.mozilla.org/en-US/docs/Web/API/User_Timing_API/Using_the_User_Timing_API#Performance_measures):
124+
Retrieve the `startTime` of a user-defined instant. Use for measuring the time
125+
between initial page navigation and a specific point in your code.
126+
- [`paint`](https://developer.mozilla.org/en-US/docs/Web/API/PerformancePaintTiming):
127+
Retrieve the `startTime` of a built-in paint measurement (e.g.
128+
`first-contentful-paint`).
88129

89130
#### Callback
90131

config.schema.json

+35-6
Original file line numberDiff line numberDiff line change
@@ -88,13 +88,29 @@
8888
"type": "array"
8989
},
9090
"measurement": {
91-
"description": "Which time interval to measure.\n\nOptions:\n - callback: bench.start() to bench.stop() (default for fully qualified\n URLs.\n - fcp: first contentful paint (default for local paths)\n - global: result returned from window.tachometerResult (or custom\n expression set via measurementExpression)",
92-
"enum": [
93-
"callback",
94-
"fcp",
95-
"global"
91+
"anyOf": [
92+
{
93+
"additionalProperties": false,
94+
"properties": {
95+
"performanceEntry": {
96+
"$ref": "#/definitions/PerformanceEntryCriteria"
97+
}
98+
},
99+
"required": [
100+
"performanceEntry"
101+
],
102+
"type": "object"
103+
},
104+
{
105+
"enum": [
106+
"callback",
107+
"fcp",
108+
"global"
109+
],
110+
"type": "string"
111+
}
96112
],
97-
"type": "string"
113+
"description": "Which time interval to measure.\n\nOptions:\n - callback: bench.start() to bench.stop() (default for local paths)\n - fcp: first contentful paint (default for fully qualified URLs)\n - global: result returned from window.tachometerResult (or custom\n expression set via measurementExpression)\n - {\n performanceEntry: {\n //\nhttps://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry/name\n name: string;\n }\n }"
98114
},
99115
"measurementExpression": {
100116
"description": "Expression to use to retrieve global result. Defaults to\n`window.tachometerResult`.",
@@ -238,6 +254,19 @@
238254
"description": "A mapping from NPM package name to version specifier, as used in a\npackage.json's \"dependencies\" and \"devDependencies\".",
239255
"type": "object"
240256
},
257+
"PerformanceEntryCriteria": {
258+
"additionalProperties": false,
259+
"description": "Criteria for matching a Performance Entry.",
260+
"properties": {
261+
"name": {
262+
"type": "string"
263+
}
264+
},
265+
"required": [
266+
"name"
267+
],
268+
"type": "object"
269+
},
241270
"SafariConfig": {
242271
"additionalProperties": false,
243272
"properties": {

src/automatic.ts

+8-13
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,8 @@ import ProgressBar = require('progress');
1616
import ansi = require('ansi-escape-sequences');
1717

1818
import {jsonOutput, legacyJsonOutput} from './json-output';
19-
import {browserSignature, makeDriver, openAndSwitchToNewTab, pollForGlobalResult, pollForFirstContentfulPaint} from './browser';
19+
import {browserSignature, makeDriver, openAndSwitchToNewTab} from './browser';
20+
import {measure} from './measure';
2021
import {BenchmarkResult, BenchmarkSpec} from './types';
2122
import {formatCsvStats, formatCsvRaw} from './csv';
2223
import {ResultStats, ResultStatsWithDifferences, horizonsResolved, summaryStats, computeDifferences} from './stats';
@@ -25,6 +26,7 @@ import {Config} from './config';
2526
import * as github from './github';
2627
import {Server} from './server';
2728
import {specUrl} from './specs';
29+
import {wait} from './util';
2830

2931
function combineResults(results: BenchmarkResult[]): BenchmarkResult {
3032
const combined: BenchmarkResult = {
@@ -92,25 +94,18 @@ export async function automaticMode(
9294
const {driver, initialTabHandle} =
9395
browsers.get(browserSignature(spec.browser))!;
9496

95-
let millis;
97+
let millis: number|undefined;
9698
let bytesSent = 0;
9799
let userAgent = '';
98100
// TODO(aomarks) Make maxAttempts and timeouts configurable.
99101
const maxAttempts = 3;
100102
for (let attempt = 1;; attempt++) {
101103
await openAndSwitchToNewTab(driver, spec.browser);
102104
await driver.get(url);
103-
104-
if (spec.measurement === 'fcp') {
105-
millis = await pollForFirstContentfulPaint(driver);
106-
} else if (spec.measurement === 'global') {
107-
millis = await pollForGlobalResult(
108-
driver, spec.measurementExpression || 'undefined');
109-
} else { // bench.start() and bench.stop() callback
110-
if (server === undefined) {
111-
throw new Error('Internal error: no server for spec');
112-
}
113-
millis = (await server.nextResults()).millis;
105+
for (let waited = 0; millis === undefined && waited <= 10000;
106+
waited += 50) {
107+
await wait(50);
108+
millis = await measure(driver, spec, server);
114109
}
115110

116111
// Close the active tab (but not the whole browser, since the

src/browser.ts

+1-67
Original file line numberDiff line numberDiff line change
@@ -275,70 +275,4 @@ export async function openAndSwitchToNewTab(
275275
await driverWithSendDevToolsCommand.sendDevToolsCommand(
276276
'Emulation.setCPUThrottlingRate', {rate: config.cpuThrottlingRate});
277277
}
278-
}
279-
280-
/**
281-
* Return the First Contentful Paint (FCP) time (millisecond interval since
282-
* navigation) for the given driver. Polls every 100 milliseconds, and returns
283-
* undefined if no FCP was found after 10 seconds.
284-
*
285-
* https://w3c.github.io/paint-timing/#first-contentful-paint
286-
* https://developers.google.com/web/tools/lighthouse/audits/first-contentful-paint
287-
*/
288-
export async function pollForFirstContentfulPaint(driver: webdriver.WebDriver):
289-
Promise<number|undefined> {
290-
for (let waited = 0; waited <= 10000; waited += 100) {
291-
await wait(100);
292-
const entries = await driver.executeScript(
293-
'return window.performance.getEntriesByName(' +
294-
'"first-contentful-paint");') as PerformanceEntry[];
295-
if (entries.length > 0) {
296-
return entries[0].startTime;
297-
}
298-
}
299-
}
300-
301-
/**
302-
* Poll for the `window.tachometerResult` global and return it once it is set.
303-
* Polls every 50 milliseconds, and returns undefined if no result was found
304-
* after 10 seconds. Throws if a value was found, but it was not a number, or it
305-
* was a negative number.
306-
*/
307-
export async function pollForGlobalResult(
308-
driver: webdriver.WebDriver,
309-
expression: string): Promise<number|undefined> {
310-
// Both here and for FCP above, we could automatically tune the poll time
311-
// after we get our first result, so that when the script is fast we spend
312-
// less time waiting, and so that when the script is slow we interfere it
313-
// less frequently.
314-
for (let waited = 0; waited <= 10000; waited += 50) {
315-
await wait(50);
316-
const result =
317-
await driver.executeScript(`return (${expression});`) as unknown;
318-
if (result !== undefined && result !== null) {
319-
if (typeof result !== 'number') {
320-
throw new Error(
321-
`'${expression}' was type ` +
322-
`${typeof result}, expected number.`);
323-
}
324-
if (result < 0) {
325-
throw new Error(`'${expression}' was negative: ${result}`);
326-
}
327-
return result;
328-
}
329-
}
330-
}
331-
332-
/**
333-
* https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry
334-
*
335-
* Note a more complete interface for this is defined in the standard
336-
* lib.dom.d.ts, but we don't want to depend on that since it would make all
337-
* DOM types ambiently defined.
338-
*/
339-
interface PerformanceEntry {
340-
name: string;
341-
startTime: number;
342-
}
343-
344-
const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
278+
}

src/configfile.ts

+25-5
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import * as jsonschema from 'jsonschema';
1515
import {BrowserConfig, BrowserName, parseBrowserConfigString, validateBrowserConfig} from './browser';
1616
import {Config, parseHorizons, urlFromLocalPath} from './config';
1717
import * as defaults from './defaults';
18-
import {BenchmarkSpec, Measurement, PackageDependencyMap} from './types';
18+
import {BenchmarkSpec, Measurement, measurements, PackageDependencyMap} from './types';
1919
import {isHttpUrl} from './util';
2020

2121
/**
@@ -102,11 +102,17 @@ interface ConfigFileBenchmark {
102102
* Which time interval to measure.
103103
*
104104
* Options:
105-
* - callback: bench.start() to bench.stop() (default for fully qualified
106-
* URLs.
107-
* - fcp: first contentful paint (default for local paths)
105+
* - callback: bench.start() to bench.stop() (default for local paths)
106+
* - fcp: first contentful paint (default for fully qualified URLs)
108107
* - global: result returned from window.tachometerResult (or custom
109108
* expression set via measurementExpression)
109+
* - {
110+
* performanceEntry: {
111+
* //
112+
* https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry/name
113+
* name: string;
114+
* }
115+
* }
110116
*/
111117
measurement?: Measurement;
112118

@@ -270,7 +276,8 @@ export async function parseConfigFile(parsedJson: unknown):
270276
const result =
271277
jsonschema.validate(parsedJson, schema, {propertyName: 'config'});
272278
if (result.errors.length > 0) {
273-
throw new Error(result.errors[0].toString());
279+
throw new Error(
280+
[...new Set(result.errors.map(customizeJsonSchemaError))].join('\n'));
274281
}
275282
const validated = parsedJson as ConfigFile;
276283
const root = validated.root || '.';
@@ -293,6 +300,19 @@ export async function parseConfigFile(parsedJson: unknown):
293300
};
294301
}
295302

303+
/**
304+
* Some of the automatically generated jsonschema errors are unclear, e.g. when
305+
* there is a union of complex types they are reported as "[schema1],
306+
* [schema2]" etc.
307+
*/
308+
function customizeJsonSchemaError(error: jsonschema.ValidationError): string {
309+
if (error.property.match(/^config\.benchmarks\[\d+\]\.measurement$/)) {
310+
return `${error.property} is not one of: ${[...measurements].join(', ')}` +
311+
' or an object like `performanceEntry: string`';
312+
}
313+
return error.toString();
314+
}
315+
296316
async function parseBenchmark(benchmark: ConfigFileBenchmark, root: string):
297317
Promise<Partial<BenchmarkSpec>> {
298318
const spec: Partial<BenchmarkSpec> = {};

src/flags.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ export const optDefs: commandLineUsage.OptionDefinition[] = [
146146
'* global: set window.tachometerResult = <milliseconds>\n' +
147147
'* fcp: first contentful paint',
148148
type: (str: string): string => {
149-
if (!measurements.has(str as Measurement)) {
149+
if (!measurements.has(str)) {
150150
throw new Error(
151151
`Expected --measure flag to be one of: ` +
152152
`${[...measurements.values()].join(', ')} ` +

0 commit comments

Comments
 (0)