node/lib/internal/perf/usertiming.js
legendecas 062f8e3730
perf_hooks: web performance timeline compliance
All API introduced in this PR are compliant with web
[performance-timeline](https://w3c.github.io/performance-timeline)
spec. "performance-timeline" is listed as supported web spec in the doc
https://nodejs.org/docs/latest/api/perf_hooks.html#perf_hooks_performance_measurement_apis.

Changes summary:
1. Add new supported wpt test subsets: user-timing and
  performance-timeline.
2. Add support for `Performance.getEntries`,
  `Performance.getEntriesByName` and `Performance.getEntriesByType`
  to synchronously fetch buffered performance entries. This means
  the user should invoke `Performance.clearMarks` and
  `Performance.clearMeasures` to clear buffered entries to prevent from
  those entries been kept alive forever.
3. Add support (again after https://github.com/nodejs/node/pull/37136)
  for `buffered` flags for `PerformanceObserver`.
3. Fixes `PerformanceMark` and `PerformanceMeasure` wpt compliance
  issues.
4. Only user-created performance entries will be buffered globally. This
  behavior should be compliant with
  https://w3c.github.io/timing-entrytypes-registry/#registry.

With the new ability to fetch user-created performance entries
synchronously, the issues raised in
https://github.com/nodejs/diagnostics/issues/464#issuecomment-861920116
could also be fixed.

PR-URL: https://github.com/nodejs/node/pull/39297
Reviewed-By: James M Snell <jasnell@gmail.com>
2021-07-25 23:43:31 +08:00

181 lines
4.7 KiB
JavaScript

'use strict';
const {
SafeMap,
SafeSet,
SafeArrayIterator,
SymbolToStringTag,
} = primordials;
const { InternalPerformanceEntry } = require('internal/perf/performance_entry');
const { now } = require('internal/perf/utils');
const { enqueue } = require('internal/perf/observe');
const nodeTiming = require('internal/perf/nodetiming');
const {
validateNumber,
validateObject,
validateString,
} = require('internal/validators');
const {
codes: {
ERR_INVALID_ARG_VALUE,
ERR_PERFORMANCE_INVALID_TIMESTAMP,
ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS,
},
} = require('internal/errors');
const { structuredClone, lazyDOMException } = require('internal/util');
const markTimings = new SafeMap();
const nodeTimingReadOnlyAttributes = new SafeSet(new SafeArrayIterator([
'nodeStart',
'v8Start',
'environment',
'loopStart',
'loopExit',
'bootstrapComplete',
]));
function getMark(name) {
if (name === undefined) return;
if (typeof name === 'number') {
if (name < 0)
throw new ERR_PERFORMANCE_INVALID_TIMESTAMP(name);
return name;
}
name = `${name}`;
if (nodeTimingReadOnlyAttributes.has(name))
return nodeTiming[name];
const ts = markTimings.get(name);
if (ts === undefined)
throw lazyDOMException(`The "${name}" performance mark has not been set`, 'SyntaxError');
return ts;
}
class PerformanceMark extends InternalPerformanceEntry {
constructor(name, options) {
name = `${name}`;
if (nodeTimingReadOnlyAttributes.has(name))
throw new ERR_INVALID_ARG_VALUE('name', name);
options ??= {};
validateObject(options, 'options');
const startTime = options.startTime ?? now();
validateNumber(startTime, 'startTime');
if (startTime < 0)
throw new ERR_PERFORMANCE_INVALID_TIMESTAMP(startTime);
markTimings.set(name, startTime);
let detail = options.detail;
detail = detail != null ?
structuredClone(detail) :
null;
super(name, 'mark', startTime, 0, detail);
}
get [SymbolToStringTag]() {
return 'PerformanceMark';
}
}
class PerformanceMeasure extends InternalPerformanceEntry {
constructor(name, start, duration, detail) {
super(name, 'measure', start, duration, detail);
}
get [SymbolToStringTag]() {
return 'PerformanceMeasure';
}
}
function mark(name, options = {}) {
const mark = new PerformanceMark(name, options);
enqueue(mark);
return mark;
}
function calculateStartDuration(startOrMeasureOptions, endMark) {
startOrMeasureOptions ??= 0;
let start;
let end;
let duration;
let optionsValid = false;
if (typeof startOrMeasureOptions === 'object') {
({ start, end, duration } = startOrMeasureOptions);
optionsValid = start !== undefined || end !== undefined;
}
if (optionsValid) {
if (endMark !== undefined) {
throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS(
'endMark must not be specified');
}
if (start === undefined && end === undefined) {
throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS(
'One of options.start or options.end is required');
}
if (start !== undefined && end !== undefined && duration !== undefined) {
throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS(
'Must not have options.start, options.end, and ' +
'options.duration specified');
}
}
if (endMark !== undefined) {
end = getMark(endMark);
} else if (optionsValid && end !== undefined) {
end = getMark(end);
} else if (optionsValid && start !== undefined && duration !== undefined) {
end = getMark(start) + getMark(duration);
} else {
end = now();
}
if (typeof startOrMeasureOptions === 'string') {
start = getMark(startOrMeasureOptions);
} else if (optionsValid && start !== undefined) {
start = getMark(start);
} else if (optionsValid && duration !== undefined && end !== undefined) {
start = end - getMark(duration);
} else {
start = 0;
}
duration = end - start;
return { start, duration };
}
function measure(name, startOrMeasureOptions, endMark) {
validateString(name, 'name');
const {
start,
duration,
} = calculateStartDuration(startOrMeasureOptions, endMark);
let detail = startOrMeasureOptions?.detail;
detail = detail != null ? structuredClone(detail) : null;
const measure = new PerformanceMeasure(name, start, duration, detail);
enqueue(measure);
return measure;
}
function clearMarkTimings(name) {
if (name !== undefined) {
name = `${name}`;
if (nodeTimingReadOnlyAttributes.has(name))
throw new ERR_INVALID_ARG_VALUE('name', name);
markTimings.delete(name);
return;
}
markTimings.clear();
}
module.exports = {
PerformanceMark,
PerformanceMeasure,
clearMarkTimings,
mark,
measure,
};