From 062f8e37307336d7f0a3e12f4ca252fe4b44a49d Mon Sep 17 00:00:00 2001 From: legendecas Date: Thu, 24 Jun 2021 23:33:09 +0800 Subject: [PATCH] perf_hooks: web performance timeline compliance All API introduced in this PR are compliant with web [performance-timeline](https://w3c.github.io/performance-timeline) spec. "performance-timeline" is listed as supported web spec in the doc https://nodejs.org/docs/latest/api/perf_hooks.html#perf_hooks_performance_measurement_apis. Changes summary: 1. Add new supported wpt test subsets: user-timing and performance-timeline. 2. Add support for `Performance.getEntries`, `Performance.getEntriesByName` and `Performance.getEntriesByType` to synchronously fetch buffered performance entries. This means the user should invoke `Performance.clearMarks` and `Performance.clearMeasures` to clear buffered entries to prevent from those entries been kept alive forever. 3. Add support (again after https://github.com/nodejs/node/pull/37136) for `buffered` flags for `PerformanceObserver`. 3. Fixes `PerformanceMark` and `PerformanceMeasure` wpt compliance issues. 4. Only user-created performance entries will be buffered globally. This behavior should be compliant with https://w3c.github.io/timing-entrytypes-registry/#registry. With the new ability to fetch user-created performance entries synchronously, the issues raised in https://github.com/nodejs/diagnostics/issues/464#issuecomment-861920116 could also be fixed. PR-URL: https://github.com/nodejs/node/pull/39297 Reviewed-By: James M Snell --- benchmark/perf_hooks/usertiming.js | 19 +- lib/internal/errors.js | 2 - lib/internal/perf/observe.js | 131 +++++++ lib/internal/perf/performance.js | 60 ++- lib/internal/perf/performance_entry.js | 3 + lib/internal/perf/usertiming.js | 103 +++-- lib/internal/util.js | 16 + lib/perf_hooks.js | 6 +- test/common/index.js | 6 + test/fixtures/wpt/LICENSE.md | 2 +- test/fixtures/wpt/README.md | 4 +- .../wpt/interfaces/performance-timeline.idl | 49 +++ test/fixtures/wpt/interfaces/user-timing.idl | 34 ++ .../wpt/performance-timeline/META.yml | 4 + .../buffered-flag-after-timeout.any.js | 11 + .../buffered-flag-observer.any.js | 15 + .../case-sensitivity.any.js | 64 ++++ .../get-invalid-entries.html | 27 ++ .../performance-timeline/idlharness.any.js | 25 ++ .../multiple-buffered-flag-observers.any.js | 32 ++ .../performance-timeline/not-clonable.html | 10 + .../observer-buffered-false.any.js | 12 + .../performanceentry-tojson.any.js | 21 + .../performanceobservers.js | 44 +++ .../po-callback-mutate.any.js | 66 ++++ ...o-disconnect-removes-observed-types.any.js | 19 + .../performance-timeline/po-disconnect.any.js | 37 ++ .../po-entries-sort.any.js | 64 ++++ .../performance-timeline/po-getentries.any.js | 38 ++ .../po-mark-measure.any.js | 61 +++ .../po-observe-repeated-type.any.js | 17 + .../po-observe-type.any.js | 64 ++++ .../performance-timeline/po-observe.any.js | 63 +++ .../wpt/performance-timeline/po-observe.html | 86 +++++ .../wpt/performance-timeline/po-resource.html | 48 +++ .../po-takeRecords.any.js | 34 ++ .../resources/postmessage-entry.html | 17 + .../performance-timeline/resources/square.png | Bin 0 -> 249 bytes .../resources/worker-invalid-entries.js | 6 + .../worker-with-performance-observer.js | 6 + .../supportedEntryTypes.any.js | 19 + .../webtiming-resolution.any.js | 25 ++ .../worker-with-performance-observer.html | 18 + test/fixtures/wpt/user-timing/META.yml | 4 + .../wpt/user-timing/buffered-flag.any.js | 27 ++ .../wpt/user-timing/case-sensitivity.any.js | 25 ++ test/fixtures/wpt/user-timing/clearMarks.html | 84 ++++ .../wpt/user-timing/clearMeasures.html | 77 ++++ .../wpt/user-timing/clear_all_marks.any.js | 17 + .../wpt/user-timing/clear_all_measures.any.js | 21 + .../clear_non_existent_mark.any.js | 26 ++ .../clear_non_existent_measure.any.js | 29 ++ .../wpt/user-timing/clear_one_mark.any.js | 26 ++ .../wpt/user-timing/clear_one_measure.any.js | 29 ++ .../wpt/user-timing/entry_type.any.js | 13 + .../wpt/user-timing/idlharness.any.js | 33 ++ .../invoke_with_timing_attributes.html | 35 ++ .../invoke_with_timing_attributes.worker.js | 25 ++ .../user-timing/invoke_without_parameter.html | 26 ++ .../user-timing/mark-entry-constructor.any.js | 40 ++ .../wpt/user-timing/mark-errors.any.js | 15 + test/fixtures/wpt/user-timing/mark-l3.any.js | 39 ++ .../mark-measure-feature-detection.html | 36 ++ .../mark-measure-return-objects.any.js | 37 ++ test/fixtures/wpt/user-timing/mark.any.js | 118 ++++++ test/fixtures/wpt/user-timing/mark.html | 58 +++ .../wpt/user-timing/mark_exceptions.html | 41 ++ .../wpt/user-timing/measure-exceptions.html | 49 +++ .../wpt/user-timing/measure-l3.any.js | 35 ++ .../wpt/user-timing/measure-with-dict.any.js | 112 ++++++ test/fixtures/wpt/user-timing/measure.html | 362 ++++++++++++++++++ ...ure_associated_with_navigation_timing.html | 57 +++ .../wpt/user-timing/measure_exception.html | 34 ++ .../measure_exceptions_navigation_timing.html | 70 ++++ .../measure_navigation_timing.html | 205 ++++++++++ .../wpt/user-timing/measure_syntax_err.any.js | 33 ++ test/fixtures/wpt/user-timing/measures.html | 66 ++++ .../performance-measure-invalid.worker.js | 9 + .../resources/user-timing-helper.js | 30 ++ .../resources/webperftestharness.js | 124 ++++++ .../resources/webperftestharnessextension.js | 202 ++++++++++ .../structured-serialize-detail.any.js | 64 ++++ .../supported-usertiming-types.any.js | 37 ++ .../wpt/user-timing/user-timing-tojson.html | 44 +++ .../wpt/user-timing/user_timing_exists.any.js | 12 + test/fixtures/wpt/versions.json | 10 +- test/parallel/test-perf-hooks-usertiming.js | 13 +- test/wpt/status/performance-timeline.json | 1 + test/wpt/status/user-timing.json | 11 + test/wpt/test-performance-timeline.js | 27 ++ test/wpt/test-user-timing.js | 27 ++ 91 files changed, 3844 insertions(+), 59 deletions(-) create mode 100644 test/fixtures/wpt/interfaces/performance-timeline.idl create mode 100644 test/fixtures/wpt/interfaces/user-timing.idl create mode 100644 test/fixtures/wpt/performance-timeline/META.yml create mode 100644 test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js create mode 100644 test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js create mode 100644 test/fixtures/wpt/performance-timeline/case-sensitivity.any.js create mode 100644 test/fixtures/wpt/performance-timeline/get-invalid-entries.html create mode 100644 test/fixtures/wpt/performance-timeline/idlharness.any.js create mode 100644 test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js create mode 100644 test/fixtures/wpt/performance-timeline/not-clonable.html create mode 100644 test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js create mode 100644 test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js create mode 100644 test/fixtures/wpt/performance-timeline/performanceobservers.js create mode 100644 test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-disconnect.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-entries-sort.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-getentries.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-mark-measure.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-observe-type.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-observe.any.js create mode 100644 test/fixtures/wpt/performance-timeline/po-observe.html create mode 100644 test/fixtures/wpt/performance-timeline/po-resource.html create mode 100644 test/fixtures/wpt/performance-timeline/po-takeRecords.any.js create mode 100644 test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html create mode 100644 test/fixtures/wpt/performance-timeline/resources/square.png create mode 100644 test/fixtures/wpt/performance-timeline/resources/worker-invalid-entries.js create mode 100644 test/fixtures/wpt/performance-timeline/resources/worker-with-performance-observer.js create mode 100644 test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js create mode 100644 test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js create mode 100644 test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html create mode 100644 test/fixtures/wpt/user-timing/META.yml create mode 100644 test/fixtures/wpt/user-timing/buffered-flag.any.js create mode 100644 test/fixtures/wpt/user-timing/case-sensitivity.any.js create mode 100644 test/fixtures/wpt/user-timing/clearMarks.html create mode 100644 test/fixtures/wpt/user-timing/clearMeasures.html create mode 100644 test/fixtures/wpt/user-timing/clear_all_marks.any.js create mode 100644 test/fixtures/wpt/user-timing/clear_all_measures.any.js create mode 100644 test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js create mode 100644 test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js create mode 100644 test/fixtures/wpt/user-timing/clear_one_mark.any.js create mode 100644 test/fixtures/wpt/user-timing/clear_one_measure.any.js create mode 100644 test/fixtures/wpt/user-timing/entry_type.any.js create mode 100644 test/fixtures/wpt/user-timing/idlharness.any.js create mode 100644 test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html create mode 100644 test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js create mode 100644 test/fixtures/wpt/user-timing/invoke_without_parameter.html create mode 100644 test/fixtures/wpt/user-timing/mark-entry-constructor.any.js create mode 100644 test/fixtures/wpt/user-timing/mark-errors.any.js create mode 100644 test/fixtures/wpt/user-timing/mark-l3.any.js create mode 100644 test/fixtures/wpt/user-timing/mark-measure-feature-detection.html create mode 100644 test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js create mode 100644 test/fixtures/wpt/user-timing/mark.any.js create mode 100644 test/fixtures/wpt/user-timing/mark.html create mode 100644 test/fixtures/wpt/user-timing/mark_exceptions.html create mode 100644 test/fixtures/wpt/user-timing/measure-exceptions.html create mode 100644 test/fixtures/wpt/user-timing/measure-l3.any.js create mode 100644 test/fixtures/wpt/user-timing/measure-with-dict.any.js create mode 100644 test/fixtures/wpt/user-timing/measure.html create mode 100644 test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html create mode 100644 test/fixtures/wpt/user-timing/measure_exception.html create mode 100644 test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html create mode 100644 test/fixtures/wpt/user-timing/measure_navigation_timing.html create mode 100644 test/fixtures/wpt/user-timing/measure_syntax_err.any.js create mode 100644 test/fixtures/wpt/user-timing/measures.html create mode 100644 test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js create mode 100644 test/fixtures/wpt/user-timing/resources/user-timing-helper.js create mode 100644 test/fixtures/wpt/user-timing/resources/webperftestharness.js create mode 100644 test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js create mode 100644 test/fixtures/wpt/user-timing/structured-serialize-detail.any.js create mode 100644 test/fixtures/wpt/user-timing/supported-usertiming-types.any.js create mode 100644 test/fixtures/wpt/user-timing/user-timing-tojson.html create mode 100644 test/fixtures/wpt/user-timing/user_timing_exists.any.js create mode 100644 test/wpt/status/performance-timeline.json create mode 100644 test/wpt/status/user-timing.json create mode 100644 test/wpt/test-performance-timeline.js create mode 100644 test/wpt/test-user-timing.js diff --git a/benchmark/perf_hooks/usertiming.js b/benchmark/perf_hooks/usertiming.js index ae797351ad78cc..24a53a116785df 100644 --- a/benchmark/perf_hooks/usertiming.js +++ b/benchmark/perf_hooks/usertiming.js @@ -8,24 +8,27 @@ const { } = require('perf_hooks'); const bench = common.createBenchmark(main, { - n: [1e5] + n: [1e5], + observe: ['all', 'measure'], }); function test() { performance.mark('a'); - setImmediate(() => { - performance.mark('b'); - performance.measure('a to b', 'a', 'b'); - }); + performance.mark('b'); + performance.measure('a to b', 'a', 'b'); } -function main({ n }) { +function main({ n, observe }) { + const entryTypes = observe === 'all' ? + [ 'mark', 'measure' ] : + [ observe ]; const obs = new PerformanceObserver(() => { bench.end(n); }); - obs.observe({ entryTypes: ['measure'], buffered: true }); + obs.observe({ entryTypes, buffered: true }); bench.start(); - for (let i = 0; i < n; i++) + performance.mark('start'); + for (let i = 0; i < 1e5; i++) test(); } diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 56a0ef8fb06a14..701a531e8cc6c5 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1266,8 +1266,6 @@ E('ERR_INVALID_PACKAGE_TARGET', pkgPath}package.json${base ? ` imported from ${base}` : ''}${relError ? '; targets must start with "./"' : ''}`; }, Error); -E('ERR_INVALID_PERFORMANCE_MARK', - 'The "%s" performance mark has not been set', Error); E('ERR_INVALID_PROTOCOL', 'Protocol "%s" not supported. Expected "%s"', TypeError); diff --git a/lib/internal/perf/observe.js b/lib/internal/perf/observe.js index 8ec8512434510b..af09d33c5405f3 100644 --- a/lib/internal/perf/observe.js +++ b/lib/internal/perf/observe.js @@ -4,10 +4,13 @@ const { ArrayFrom, ArrayIsArray, ArrayPrototypeFilter, + ArrayPrototypeFlatMap, ArrayPrototypeIncludes, ArrayPrototypePush, + ArrayPrototypePushApply, ArrayPrototypeSlice, ArrayPrototypeSort, + Error, ObjectDefineProperties, ObjectFreeze, ObjectKeys, @@ -31,6 +34,7 @@ const { const { InternalPerformanceEntry, isPerformanceEntry, + kBufferNext, } = require('internal/perf/performance_entry'); const { @@ -83,6 +87,16 @@ const kSupportedEntryTypes = ObjectFreeze([ 'measure', ]); +// Performance timeline entry Buffers +const markEntryBuffer = createBuffer(); +const measureEntryBuffer = createBuffer(); +const kMaxPerformanceEntryBuffers = 1e6; +const kClearPerformanceEntryBuffers = ObjectFreeze({ + 'mark': 'performance.clearMarks', + 'measure': 'performance.clearMeasures', +}); +const kWarnedEntryTypes = new SafeMap(); + const kObservers = new SafeSet(); const kPending = new SafeSet(); let isPending = false; @@ -190,6 +204,7 @@ class PerformanceObserver { const { entryTypes, type, + buffered, } = { ...options }; if (entryTypes === undefined && type === undefined) throw new ERR_MISSING_ARGS('options.entryTypes', 'options.type'); @@ -229,6 +244,13 @@ class PerformanceObserver { return; this[kEntryTypes].add(type); maybeIncrementObserverCount(type); + if (buffered) { + const entries = filterBufferMapByNameAndType(undefined, type); + ArrayPrototypePushApply(this[kBuffer], entries); + kPending.add(this); + if (kPending.size) + queuePending(); + } } if (this[kEntryTypes].size) @@ -291,6 +313,99 @@ function enqueue(entry) { for (const obs of kObservers) { obs[kMaybeBuffer](entry); } + + const entryType = entry.entryType; + let buffer; + if (entryType === 'mark') { + buffer = markEntryBuffer; + } else if (entryType === 'measure') { + buffer = measureEntryBuffer; + } else { + return; + } + + const count = buffer.count + 1; + buffer.count = count; + if (count === 1) { + buffer.head = entry; + buffer.tail = entry; + return; + } + buffer.tail[kBufferNext] = entry; + buffer.tail = entry; + + if (count > kMaxPerformanceEntryBuffers && + !kWarnedEntryTypes.has(entryType)) { + kWarnedEntryTypes.set(entryType, true); + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + const w = new Error('Possible perf_hooks memory leak detected. ' + + `${count} ${entryType} entries added to the global ` + + 'performance entry buffer. Use ' + + `${kClearPerformanceEntryBuffers[entryType]} to ` + + 'clear the buffer.'); + w.name = 'MaxPerformanceEntryBufferExceededWarning'; + w.entryType = entryType; + w.count = count; + process.emitWarning(w); + } +} + +function clearEntriesFromBuffer(type, name) { + let buffer; + if (type === 'mark') { + buffer = markEntryBuffer; + } else if (type === 'measure') { + buffer = measureEntryBuffer; + } else { + return; + } + if (name === undefined) { + resetBuffer(buffer); + return; + } + + let head = null; + let tail = null; + for (let entry = buffer.head; entry !== null; entry = entry[kBufferNext]) { + if (entry.name !== name) { + head = head ?? entry; + tail = entry; + continue; + } + if (tail === null) { + continue; + } + tail[kBufferNext] = entry[kBufferNext]; + } + buffer.head = head; + buffer.tail = tail; +} + +function filterBufferMapByNameAndType(name, type) { + let bufferList; + if (type === 'mark') { + bufferList = [markEntryBuffer]; + } else if (type === 'measure') { + bufferList = [measureEntryBuffer]; + } else if (type !== undefined) { + // Unrecognized type; + return []; + } else { + bufferList = [markEntryBuffer, measureEntryBuffer]; + } + return ArrayPrototypeFlatMap(bufferList, + (buffer) => filterBufferByName(buffer, name)); +} + +function filterBufferByName(buffer, name) { + const arr = []; + for (let entry = buffer.head; entry !== null; entry = entry[kBufferNext]) { + if (name === undefined || entry.name === name) { + ArrayPrototypePush(arr, entry); + } + } + return arr; } function observerCallback(name, type, startTime, duration, details) { @@ -338,8 +453,24 @@ function hasObserver(type) { return observerCounts[observerType] > 0; } +function createBuffer() { + return { + head: null, + tail: null, + count: 0, + }; +} + +function resetBuffer(buffer) { + buffer.head = null; + buffer.tail = null; + buffer.count = 0; +} + module.exports = { PerformanceObserver, enqueue, hasObserver, + clearEntriesFromBuffer, + filterBufferMapByNameAndType, }; diff --git a/lib/internal/perf/performance.js b/lib/internal/perf/performance.js index ca4aed90e4e270..2f75eb143a6ffe 100644 --- a/lib/internal/perf/performance.js +++ b/lib/internal/perf/performance.js @@ -16,8 +16,12 @@ const { now } = require('internal/perf/utils'); const { mark, measure, - clearMarks, + clearMarkTimings, } = require('internal/perf/usertiming'); +const { + clearEntriesFromBuffer, + filterBufferMapByNameAndType, +} = require('internal/perf/observe'); const eventLoopUtilization = require('internal/perf/event_loop_utilization'); const nodeTiming = require('internal/perf/nodetiming'); @@ -48,7 +52,6 @@ class Performance extends EventTarget { timeOrigin: this.timeOrigin, }, opts)}`; } - } function toJSON() { @@ -59,6 +62,39 @@ function toJSON() { }; } +function clearMarks(name) { + if (name !== undefined) { + name = `${name}`; + } + clearMarkTimings(name); + clearEntriesFromBuffer('mark', name); +} + +function clearMeasures(name) { + if (name !== undefined) { + name = `${name}`; + } + clearEntriesFromBuffer('measure', name); +} + +function getEntries() { + return filterBufferMapByNameAndType(); +} + +function getEntriesByName(name) { + if (name !== undefined) { + name = `${name}`; + } + return filterBufferMapByNameAndType(name, undefined); +} + +function getEntriesByType(type) { + if (type !== undefined) { + type = `${type}`; + } + return filterBufferMapByNameAndType(undefined, type); +} + class InternalPerformance extends EventTarget {} InternalPerformance.prototype.constructor = Performance.prototype.constructor; ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype); @@ -69,11 +105,31 @@ ObjectDefineProperties(Performance.prototype, { enumerable: false, value: clearMarks, }, + clearMeasures: { + configurable: true, + enumerable: false, + value: clearMeasures, + }, eventLoopUtilization: { configurable: true, enumerable: false, value: eventLoopUtilization, }, + getEntries: { + configurable: true, + enumerable: false, + value: getEntries, + }, + getEntriesByName: { + configurable: true, + enumerable: false, + value: getEntriesByName, + }, + getEntriesByType: { + configurable: true, + enumerable: false, + value: getEntriesByType, + }, mark: { configurable: true, enumerable: false, diff --git a/lib/internal/perf/performance_entry.js b/lib/internal/perf/performance_entry.js index f9f1c9e8966e2d..8fcb0ca3fcdc0c 100644 --- a/lib/internal/perf/performance_entry.js +++ b/lib/internal/perf/performance_entry.js @@ -17,6 +17,7 @@ const kType = Symbol('kType'); const kStart = Symbol('kStart'); const kDuration = Symbol('kDuration'); const kDetail = Symbol('kDetail'); +const kBufferNext = Symbol('kBufferNext'); function isPerformanceEntry(obj) { return obj?.[kName] !== undefined; @@ -67,6 +68,7 @@ class InternalPerformanceEntry { this[kStart] = start; this[kDuration] = duration; this[kDetail] = detail; + this[kBufferNext] = null; } } @@ -79,4 +81,5 @@ module.exports = { InternalPerformanceEntry, PerformanceEntry, isPerformanceEntry, + kBufferNext, }; diff --git a/lib/internal/perf/usertiming.js b/lib/internal/perf/usertiming.js index f83091de1919a8..496c75deb3b78f 100644 --- a/lib/internal/perf/usertiming.js +++ b/lib/internal/perf/usertiming.js @@ -1,10 +1,10 @@ 'use strict'; const { - ObjectKeys, SafeMap, SafeSet, SafeArrayIterator, + SymbolToStringTag, } = primordials; const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); @@ -21,13 +21,14 @@ const { const { codes: { ERR_INVALID_ARG_VALUE, - ERR_INVALID_PERFORMANCE_MARK, ERR_PERFORMANCE_INVALID_TIMESTAMP, ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS, }, } = require('internal/errors'); -const marks = new SafeMap(); +const { structuredClone, lazyDOMException } = require('internal/util'); + +const markTimings = new SafeMap(); const nodeTimingReadOnlyAttributes = new SafeSet(new SafeArrayIterator([ 'nodeStart', @@ -48,60 +49,69 @@ function getMark(name) { name = `${name}`; if (nodeTimingReadOnlyAttributes.has(name)) return nodeTiming[name]; - const ts = marks.get(name); + const ts = markTimings.get(name); if (ts === undefined) - throw new ERR_INVALID_PERFORMANCE_MARK(name); + throw lazyDOMException(`The "${name}" performance mark has not been set`, 'SyntaxError'); return ts; } class PerformanceMark extends InternalPerformanceEntry { - constructor(name, options = {}) { + constructor(name, options) { name = `${name}`; if (nodeTimingReadOnlyAttributes.has(name)) throw new ERR_INVALID_ARG_VALUE('name', name); + options ??= {}; validateObject(options, 'options'); - const { - detail, - startTime = now(), - } = options; + const startTime = options.startTime ?? now(); validateNumber(startTime, 'startTime'); if (startTime < 0) throw new ERR_PERFORMANCE_INVALID_TIMESTAMP(startTime); - marks.set(name, startTime); + markTimings.set(name, startTime); + + let detail = options.detail; + detail = detail != null ? + structuredClone(detail) : + null; super(name, 'mark', startTime, 0, detail); - enqueue(this); + } + + get [SymbolToStringTag]() { + return 'PerformanceMark'; } } class PerformanceMeasure extends InternalPerformanceEntry { constructor(name, start, duration, detail) { super(name, 'measure', start, duration, detail); - enqueue(this); + } + + get [SymbolToStringTag]() { + return 'PerformanceMeasure'; } } function mark(name, options = {}) { - return new PerformanceMark(name, options); + const mark = new PerformanceMark(name, options); + enqueue(mark); + return mark; } function calculateStartDuration(startOrMeasureOptions, endMark) { startOrMeasureOptions ??= 0; - let detail; let start; let end; let duration; - if (typeof startOrMeasureOptions === 'object' && - ObjectKeys(startOrMeasureOptions).length) { - ({ - start, - end, - duration, - detail, - } = startOrMeasureOptions); + let optionsValid = false; + if (typeof startOrMeasureOptions === 'object') { + ({ start, end, duration } = startOrMeasureOptions); + optionsValid = start !== undefined || end !== undefined; + } + if (optionsValid) { if (endMark !== undefined) { throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS( 'endMark must not be specified'); } + if (start === undefined && end === undefined) { throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS( 'One of options.start or options.end is required'); @@ -111,21 +121,30 @@ function calculateStartDuration(startOrMeasureOptions, endMark) { 'Must not have options.start, options.end, and ' + 'options.duration specified'); } - start = getMark(start); - duration = getMark(duration); - } else { - start = getMark(startOrMeasureOptions); } - end = getMark(endMark || end) ?? - ((start !== undefined && duration !== undefined) ? - start + duration : now()); - - start ??= (duration !== undefined) ? end - duration : 0; + if (endMark !== undefined) { + end = getMark(endMark); + } else if (optionsValid && end !== undefined) { + end = getMark(end); + } else if (optionsValid && start !== undefined && duration !== undefined) { + end = getMark(start) + getMark(duration); + } else { + end = now(); + } - duration ??= end - start; + if (typeof startOrMeasureOptions === 'string') { + start = getMark(startOrMeasureOptions); + } else if (optionsValid && start !== undefined) { + start = getMark(start); + } else if (optionsValid && duration !== undefined && end !== undefined) { + start = end - getMark(duration); + } else { + start = 0; + } - return { start, duration, detail }; + duration = end - start; + return { start, duration }; } function measure(name, startOrMeasureOptions, endMark) { @@ -133,25 +152,29 @@ function measure(name, startOrMeasureOptions, endMark) { const { start, duration, - detail } = calculateStartDuration(startOrMeasureOptions, endMark); - return new PerformanceMeasure(name, start, duration, detail); + let detail = startOrMeasureOptions?.detail; + detail = detail != null ? structuredClone(detail) : null; + const measure = new PerformanceMeasure(name, start, duration, detail); + enqueue(measure); + return measure; } -function clearMarks(name) { +function clearMarkTimings(name) { if (name !== undefined) { name = `${name}`; if (nodeTimingReadOnlyAttributes.has(name)) throw new ERR_INVALID_ARG_VALUE('name', name); - marks.delete(name); + markTimings.delete(name); return; } - marks.clear(); + markTimings.clear(); } module.exports = { PerformanceMark, - clearMarks, + PerformanceMeasure, + clearMarkTimings, mark, measure, }; diff --git a/lib/internal/util.js b/lib/internal/util.js index 101fbec67775b0..9158fc8e52431e 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -449,6 +449,21 @@ const lazyDOMException = hideStackFrames((message, name) => { return new DOMException(message, name); }); +function structuredClone(value) { + const { + DefaultSerializer, + DefaultDeserializer, + } = require('v8'); + const ser = new DefaultSerializer(); + ser._getDataCloneError = hideStackFrames((message) => + lazyDOMException(message, 'DataCloneError')); + ser.writeValue(value); + const serialized = ser.releaseBuffer(); + + const des = new DefaultDeserializer(serialized); + return des.readValue(); +} + module.exports = { assertCrypto, cachedResult, @@ -471,6 +486,7 @@ module.exports = { promisify, sleep, spliceOne, + structuredClone, removeColors, // Symbol used to customize promisify conversion diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 339d3ca4ff0ab4..9916fd5ce40c35 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -10,7 +10,10 @@ const { const { PerformanceEntry } = require('internal/perf/performance_entry'); const { PerformanceObserver } = require('internal/perf/observe'); -const { PerformanceMark } = require('internal/perf/usertiming'); +const { + PerformanceMark, + PerformanceMeasure, +} = require('internal/perf/usertiming'); const { InternalPerformance } = require('internal/perf/performance'); const { @@ -22,6 +25,7 @@ const monitorEventLoopDelay = require('internal/perf/event_loop_delay'); module.exports = { PerformanceEntry, PerformanceMark, + PerformanceMeasure, PerformanceObserver, monitorEventLoopDelay, createHistogram, diff --git a/test/common/index.js b/test/common/index.js index 0f0885997ce9a1..2ac4538cbea804 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -286,6 +286,12 @@ if (global.gc) { if (global.performance) { knownGlobals.push(global.performance); } +if (global.PerformanceMark) { + knownGlobals.push(global.PerformanceMark); +} +if (global.PerformanceMeasure) { + knownGlobals.push(global.PerformanceMeasure); +} function allowGlobals(...allowlist) { knownGlobals = knownGlobals.concat(allowlist); diff --git a/test/fixtures/wpt/LICENSE.md b/test/fixtures/wpt/LICENSE.md index 39c46d03ac2988..ad4858c8745cfa 100644 --- a/test/fixtures/wpt/LICENSE.md +++ b/test/fixtures/wpt/LICENSE.md @@ -1,6 +1,6 @@ # The 3-Clause BSD License -Copyright © web-platform-tests contributors +Copyright 2019 web-platform-tests contributors Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 6019abd8aa8769..27261f56e46467 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -19,10 +19,12 @@ Last update: - html/webappapis/atob: https://github.com/web-platform-tests/wpt/tree/f267e1dca6/html/webappapis/atob - html/webappapis/microtask-queuing: https://github.com/web-platform-tests/wpt/tree/2c5c3c4c27/html/webappapis/microtask-queuing - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers -- interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces +- interfaces: https://github.com/web-platform-tests/wpt/tree/80a4176623/interfaces +- performance-timeline: https://github.com/web-platform-tests/wpt/tree/17ebc3aea0/performance-timeline - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources - streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams - url: https://github.com/web-platform-tests/wpt/tree/77d54aa9e0/url +- user-timing: https://github.com/web-platform-tests/wpt/tree/df24fb604e/user-timing [Web Platform Tests]: https://github.com/web-platform-tests/wpt [`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-wpt diff --git a/test/fixtures/wpt/interfaces/performance-timeline.idl b/test/fixtures/wpt/interfaces/performance-timeline.idl new file mode 100644 index 00000000000000..d3a5a278055eba --- /dev/null +++ b/test/fixtures/wpt/interfaces/performance-timeline.idl @@ -0,0 +1,49 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Performance Timeline Level 2 (https://w3c.github.io/performance-timeline/) + +partial interface Performance { + PerformanceEntryList getEntries (); + PerformanceEntryList getEntriesByType (DOMString type); + PerformanceEntryList getEntriesByName (DOMString name, optional DOMString type); +}; +typedef sequence PerformanceEntryList; + +[Exposed=(Window,Worker)] +interface PerformanceEntry { + readonly attribute DOMString name; + readonly attribute DOMString entryType; + readonly attribute DOMHighResTimeStamp startTime; + readonly attribute DOMHighResTimeStamp duration; + [Default] object toJSON(); +}; + +callback PerformanceObserverCallback = undefined (PerformanceObserverEntryList entries, + PerformanceObserver observer, + optional PerformanceObserverCallbackOptions options = {}); +[Exposed=(Window,Worker)] +interface PerformanceObserver { + constructor(PerformanceObserverCallback callback); + undefined observe (optional PerformanceObserverInit options = {}); + undefined disconnect (); + PerformanceEntryList takeRecords(); + [SameObject] static readonly attribute FrozenArray supportedEntryTypes; +}; + +dictionary PerformanceObserverCallbackOptions { + unsigned long long droppedEntriesCount; +}; + +dictionary PerformanceObserverInit { + sequence entryTypes; + DOMString type; + boolean buffered; +}; + +[Exposed=(Window,Worker)] +interface PerformanceObserverEntryList { + PerformanceEntryList getEntries(); + PerformanceEntryList getEntriesByType (DOMString type); + PerformanceEntryList getEntriesByName (DOMString name, optional DOMString type); +}; diff --git a/test/fixtures/wpt/interfaces/user-timing.idl b/test/fixtures/wpt/interfaces/user-timing.idl new file mode 100644 index 00000000000000..28ee8aac2b19a6 --- /dev/null +++ b/test/fixtures/wpt/interfaces/user-timing.idl @@ -0,0 +1,34 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: User Timing Level 3 (https://w3c.github.io/user-timing/) + +dictionary PerformanceMarkOptions { + any detail; + DOMHighResTimeStamp startTime; +}; + +dictionary PerformanceMeasureOptions { + any detail; + (DOMString or DOMHighResTimeStamp) start; + DOMHighResTimeStamp duration; + (DOMString or DOMHighResTimeStamp) end; +}; + +partial interface Performance { + PerformanceMark mark(DOMString markName, optional PerformanceMarkOptions markOptions = {}); + undefined clearMarks(optional DOMString markName); + PerformanceMeasure measure(DOMString measureName, optional (DOMString or PerformanceMeasureOptions) startOrMeasureOptions = {}, optional DOMString endMark); + undefined clearMeasures(optional DOMString measureName); +}; + +[Exposed=(Window,Worker)] +interface PerformanceMark : PerformanceEntry { + constructor(DOMString markName, optional PerformanceMarkOptions markOptions = {}); + readonly attribute any detail; +}; + +[Exposed=(Window,Worker)] +interface PerformanceMeasure : PerformanceEntry { + readonly attribute any detail; +}; diff --git a/test/fixtures/wpt/performance-timeline/META.yml b/test/fixtures/wpt/performance-timeline/META.yml new file mode 100644 index 00000000000000..89fae1db0d9b7a --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/META.yml @@ -0,0 +1,4 @@ +spec: https://w3c.github.io/performance-timeline/ +suggested_reviewers: + - plehegar + - igrigorik diff --git a/test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js b/test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js new file mode 100644 index 00000000000000..08b3e323146585 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js @@ -0,0 +1,11 @@ +async_test(t => { + performance.mark('foo'); + t.step_timeout(() => { + // After a timeout, PerformanceObserver should still receive entry if using the buffered flag. + new PerformanceObserver(t.step_func_done(list => { + const entries = list.getEntries(); + assert_equals(entries.length, 1, 'There should be 1 mark entry.'); + assert_equals(entries[0].entryType, 'mark'); + })).observe({type: 'mark', buffered: true}); + }, 100); +}, 'PerformanceObserver with buffered flag sees entry after timeout'); diff --git a/test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js b/test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js new file mode 100644 index 00000000000000..31dc39c128ad55 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js @@ -0,0 +1,15 @@ +async_test( t=> { + for (let i = 0; i < 50; i++) + performance.mark('foo' + i); + let marksCreated = 50; + let marksReceived = 0; + new PerformanceObserver(list => { + marksReceived += list.getEntries().length; + if (marksCreated < 100) { + performance.mark('bar' + marksCreated); + marksCreated++; + } + if (marksReceived == 100) + t.done(); + }).observe({type: 'mark', buffered: true}); +}, 'PerformanceObserver with buffered flag should see past and future entries.'); diff --git a/test/fixtures/wpt/performance-timeline/case-sensitivity.any.js b/test/fixtures/wpt/performance-timeline/case-sensitivity.any.js new file mode 100644 index 00000000000000..3a98505ae67f7d --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/case-sensitivity.any.js @@ -0,0 +1,64 @@ + test(function () { + assert_equals(typeof self.performance, "object"); + assert_equals(typeof self.performance.getEntriesByType, "function"); + var lowerList = self.performance.getEntriesByType("resource"); + var upperList = self.performance.getEntriesByType("RESOURCE"); + var mixedList = self.performance.getEntriesByType("ReSoUrCe"); + + assert_not_equals(lowerList.length, 0, "Resource entries exist"); + assert_equals(upperList.length, 0, "getEntriesByType('RESOURCE').length"); + assert_equals(mixedList.length, 0, "getEntriesByType('ReSoUrCe').length"); + + }, "getEntriesByType values are case sensitive"); + + test(function () { + assert_equals(typeof self.performance, "object"); + assert_equals(typeof self.performance.getEntriesByName, "function"); + var origin = self.location.protocol + "//" + self.location.host; + var location1 = origin.toUpperCase() + "/resources/testharness.js"; + var location2 = self.location.protocol + "//" + + self.location.host.toUpperCase() + "/resources/testharness.js"; + var lowerList = self.performance.getEntriesByName(origin + "/resources/testharness.js"); + var upperList = self.performance.getEntriesByName(location1); + var mixedList = self.performance.getEntriesByName(location2); + + assert_equals(lowerList.length, 1, "Resource entry exist"); + assert_equals(upperList.length, 0, "getEntriesByName('" + location1 + "').length"); + assert_equals(mixedList.length, 0, "getEntriesByName('" + location2 + "').length"); + + }, "getEntriesByName values are case sensitive"); + + async_test(function (t) { + // Test type/buffered case sensitivity. + observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + assert_unreached("Observer(type) should not be called."); + }) + ); + observer.observe({type: "Mark"}); + observer.observe({type: "Measure"}); + observer.observe({type: "MARK"}); + observer.observe({type: "MEASURE"}); + observer.observe({type: "Mark", buffered: true}); + observer.observe({type: "Measure", buffered: true}); + observer.observe({type: "MARK", buffered: true}); + observer.observe({type: "MEASURE", buffered: true}); + self.performance.mark("mark1"); + self.performance.measure("measure1"); + + // Test entryTypes case sensitivity. + observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + assert_unreached("Observer(entryTypes) should not be called."); + }) + ); + observer.observe({entryTypes: ["Mark", "Measure"]}); + observer.observe({entryTypes: ["MARK", "MEASURE"]}); + self.performance.mark("mark1"); + self.performance.measure("measure1"); + + t.step_timeout(function() { + t.done(); + }, 1000); + + }, "observe() and case sensitivity for types/entryTypes and buffered."); diff --git a/test/fixtures/wpt/performance-timeline/get-invalid-entries.html b/test/fixtures/wpt/performance-timeline/get-invalid-entries.html new file mode 100644 index 00000000000000..33d6589e275e26 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/get-invalid-entries.html @@ -0,0 +1,27 @@ + + + + + + + + + + diff --git a/test/fixtures/wpt/performance-timeline/idlharness.any.js b/test/fixtures/wpt/performance-timeline/idlharness.any.js new file mode 100644 index 00000000000000..32efebe98ffd47 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/idlharness.any.js @@ -0,0 +1,25 @@ +// META: global=window,worker +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js + +// https://w3c.github.io/performance-timeline/ + +'use strict'; + +idl_test( + ['performance-timeline'], + ['hr-time', 'dom'], + async idl_array => { + idl_array.add_objects({ + Performance: ['performance'], + PerformanceObserver: ['observer'], + PerformanceObserverEntryList: ['entryList'], + }); + + self.entryList = await new Promise((resolve, reject) => { + self.observer = new PerformanceObserver(resolve); + observer.observe({ entryTypes: ['mark'] }); + performance.mark('test'); + }); + } +); diff --git a/test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js b/test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js new file mode 100644 index 00000000000000..5dd44fb18fbdb9 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js @@ -0,0 +1,32 @@ +promise_test(() => { + // The first promise waits for one buffered flag observer to receive 3 entries. + const promise1 = new Promise(resolve1 => { + let numObserved1 = 0; + new PerformanceObserver((entryList, obs) => { + // This buffered flag observer is constructed after a regular observer detects a mark. + new PerformanceObserver(list => { + numObserved1 += list.getEntries().length; + if (numObserved1 == 3) + resolve1(); + }).observe({type: 'mark', buffered: true}); + obs.disconnect(); + }).observe({entryTypes: ['mark']}); + performance.mark('foo'); + }); + // The second promise waits for another buffered flag observer to receive 3 entries. + const promise2 = new Promise(resolve2 => { + step_timeout(() => { + let numObserved2 = 0; + // This buffered flag observer is constructed after a delay of 100ms. + new PerformanceObserver(list => { + numObserved2 += list.getEntries().length; + if (numObserved2 == 3) + resolve2(); + }).observe({type: 'mark', buffered: true}); + }, 100); + performance.mark('bar'); + }); + performance.mark('meow'); + // Pass if and only if both buffered observers received all 3 mark entries. + return Promise.all([promise1, promise2]); +}, 'Multiple PerformanceObservers with buffered flag see all entries'); diff --git a/test/fixtures/wpt/performance-timeline/not-clonable.html b/test/fixtures/wpt/performance-timeline/not-clonable.html new file mode 100644 index 00000000000000..d651776e5f4b94 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/not-clonable.html @@ -0,0 +1,10 @@ + + + + + diff --git a/test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js b/test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js new file mode 100644 index 00000000000000..a28100b0fdba2e --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js @@ -0,0 +1,12 @@ +async_test(t => { + performance.mark('foo'); + // Use a timeout to ensure the remainder of the test runs after the entry is created. + t.step_timeout(() => { + // Observer with buffered flag set to false should not see entry. + new PerformanceObserver(() => { + assert_unreached('Should not have observed any entry!'); + }).observe({type: 'mark', buffered: false}); + // Use a timeout to give time to the observer. + t.step_timeout(t.step_func_done(() => {}), 100); + }, 0); +}, 'PerformanceObserver without buffered flag set to false cannot see past entries.'); diff --git a/test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js b/test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js new file mode 100644 index 00000000000000..44f0156eec1924 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js @@ -0,0 +1,21 @@ +test(() => { + performance.mark('markName'); + performance.measure('measureName'); + + const entries = performance.getEntries(); + const performanceEntryKeys = [ + 'name', + 'entryType', + 'startTime', + 'duration' + ]; + for (let i = 0; i < entries.length; ++i) { + assert_equals(typeof(entries[i].toJSON), 'function'); + const json = entries[i].toJSON(); + assert_equals(typeof(json), 'object'); + for (const key of performanceEntryKeys) { + assert_equals(json[key], entries[i][key], + `entries[${i}].toJSON().${key} should match entries[${i}].${key}`); + } + } +}, 'Test toJSON() in PerformanceEntry'); diff --git a/test/fixtures/wpt/performance-timeline/performanceobservers.js b/test/fixtures/wpt/performance-timeline/performanceobservers.js new file mode 100644 index 00000000000000..3f357374efdfc2 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/performanceobservers.js @@ -0,0 +1,44 @@ +// Compares a performance entry to a predefined one +// perfEntriesToCheck is an array of performance entries from the user agent +// expectedEntries is an array of performance entries minted by the test +function checkEntries(perfEntriesToCheck, expectedEntries) { + function findMatch(pe) { + // we match based on entryType and name + for (var i = expectedEntries.length - 1; i >= 0; i--) { + var ex = expectedEntries[i]; + if (ex.entryType === pe.entryType && ex.name === pe.name) { + return ex; + } + } + return null; + } + + assert_equals(perfEntriesToCheck.length, expectedEntries.length, "performance entries must match"); + + perfEntriesToCheck.forEach(function (pe1) { + assert_not_equals(findMatch(pe1), null, "Entry matches"); + }); +} + +// Waits for performance.now to advance. Since precision reduction might +// cause it to return the same value across multiple calls. +function wait() { + var now = performance.now(); + while (now === performance.now()) + continue; +} + +// Ensure the entries list is sorted by startTime. +function checkSorted(entries) { + assert_not_equals(entries.length, 0, "entries list must not be empty"); + if (!entries.length) + return; + + var sorted = false; + var lastStartTime = entries[0].startTime; + for (var i = 1; i < entries.length; ++i) { + var currStartTime = entries[i].startTime; + assert_less_than_equal(lastStartTime, currStartTime, "entry list must be sorted by startTime"); + lastStartTime = currStartTime; + } +} diff --git a/test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js b/test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js new file mode 100644 index 00000000000000..8f1b09bc377120 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js @@ -0,0 +1,66 @@ +// META: script=performanceobservers.js + + async_test(function (t) { + var callbackCount = 0; + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + callbackCount++; + + if (callbackCount === 1) { + checkEntries(entryList.getEntries(), [ + {entryType: "measure", name: "measure1"}, + ]); + observer.observe({entryTypes: ["mark"]}); + self.performance.mark("mark2"); + self.performance.measure("measure2"); + return; + } + + if (callbackCount === 2) { + checkEntries(entryList.getEntries(), [ + {entryType: "mark", name: "mark2"}, + ]); + self.performance.mark("mark-before-change-observe-state-to-measure"); + self.performance.measure("measure-before-change-observe-state-to-measure"); + observer.observe({entryTypes: ["measure"]}); + self.performance.mark("mark3"); + self.performance.measure("measure3"); + return; + } + + if (callbackCount === 3) { + checkEntries(entryList.getEntries(), [ + {entryType: "measure", name: "measure3"}, + {entryType: "mark", name: "mark-before-change-observe-state-to-measure"}, + ]); + self.performance.mark("mark-before-change-observe-state-to-both"); + self.performance.measure("measure-before-change-observe-state-to-both"); + observer.observe({entryTypes: ["mark", "measure"]}); + self.performance.mark("mark4"); + self.performance.measure("measure4"); + return; + } + + if (callbackCount === 4) { + checkEntries(entryList.getEntries(), [ + {entryType: "measure", name: "measure-before-change-observe-state-to-both"}, + {entryType: "measure", name: "measure4"}, + {entryType: "mark", name: "mark4"}, + ]); + self.performance.mark("mark-before-disconnect"); + self.performance.measure("measure-before-disconnect"); + observer.disconnect(); + self.performance.mark("mark-after-disconnect"); + self.performance.measure("measure-after-disconnect"); + t.done(); + return; + } + + assert_unreached("The callback must not be invoked after disconnecting"); + }) + ); + + observer.observe({entryTypes: ["measure"]}); + self.performance.mark("mark1"); + self.performance.measure("measure1"); + }, "PerformanceObserver modifications inside callback should update filtering and not clear buffer"); diff --git a/test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js b/test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js new file mode 100644 index 00000000000000..cac97bea0755c1 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js @@ -0,0 +1,19 @@ +// META: script=performanceobservers.js + +async_test(function (t) { + const observer = new PerformanceObserver( + t.step_func(function (entryList) { + // There should be no mark entry. + checkEntries(entryList.getEntries(), + [{ entryType: "measure", name: "b"}]); + t.done(); + }) + ); + observer.observe({type: "mark"}); + // Disconnect the observer. + observer.disconnect(); + // Now, only observe measure. + observer.observe({type: "measure"}); + performance.mark("a"); + performance.measure("b"); +}, "Types observed are forgotten when disconnect() is called."); diff --git a/test/fixtures/wpt/performance-timeline/po-disconnect.any.js b/test/fixtures/wpt/performance-timeline/po-disconnect.any.js new file mode 100644 index 00000000000000..5f5fb5aa43ba46 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-disconnect.any.js @@ -0,0 +1,37 @@ +// META: script=performanceobservers.js + + async_test(function (t) { + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + assert_unreached("This callback must not be invoked"); + }) + ); + observer.observe({entryTypes: ["mark", "measure", "navigation"]}); + observer.disconnect(); + self.performance.mark("mark1"); + self.performance.measure("measure1"); + t.step_timeout(function () { + t.done(); + }, 2000); + }, "disconnected callbacks must not be invoked"); + + test(function () { + var obs = new PerformanceObserver(function () { return true; }); + obs.disconnect(); + obs.disconnect(); + }, "disconnecting an unconnected observer is a no-op"); + + async_test(function (t) { + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + assert_unreached("This callback must not be invoked"); + }) + ); + observer.observe({entryTypes: ["mark"]}); + self.performance.mark("mark1"); + observer.disconnect(); + self.performance.mark("mark2"); + t.step_timeout(function () { + t.done(); + }, 2000); + }, "An observer disconnected after a mark must not have its callback invoked"); diff --git a/test/fixtures/wpt/performance-timeline/po-entries-sort.any.js b/test/fixtures/wpt/performance-timeline/po-entries-sort.any.js new file mode 100644 index 00000000000000..b0c781a3c0c03b --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-entries-sort.any.js @@ -0,0 +1,64 @@ +// META: script=performanceobservers.js + + async_test(function (t) { + var stored_entries = []; + var stored_entries_by_type = []; + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + + stored_entries = entryList.getEntries(); + stored_entries_by_type = entryList.getEntriesByType("mark"); + stored_entries_by_name = entryList.getEntriesByName("name-repeat"); + var startTimeOfMark2 = entryList.getEntriesByName("mark2")[0].startTime; + + checkSorted(stored_entries); + checkEntries(stored_entries, [ + {entryType: "measure", name: "measure1"}, + {entryType: "measure", name: "measure2"}, + {entryType: "measure", name: "measure3"}, + {entryType: "measure", name: "name-repeat"}, + {entryType: "mark", name: "mark1"}, + {entryType: "mark", name: "mark2"}, + {entryType: "measure", name: "measure-matching-mark2-1"}, + {entryType: "measure", name: "measure-matching-mark2-2"}, + {entryType: "mark", name: "name-repeat"}, + {entryType: "mark", name: "name-repeat"}, + ]); + + checkSorted(stored_entries_by_type); + checkEntries(stored_entries_by_type, [ + {entryType: "mark", name: "mark1"}, + {entryType: "mark", name: "mark2"}, + {entryType: "mark", name: "name-repeat"}, + {entryType: "mark", name: "name-repeat"}, + ]); + + checkSorted(stored_entries_by_name); + checkEntries(stored_entries_by_name, [ + {entryType: "measure", name: "name-repeat"}, + {entryType: "mark", name: "name-repeat"}, + {entryType: "mark", name: "name-repeat"}, + ]); + + observer.disconnect(); + t.done(); + }) + ); + + observer.observe({entryTypes: ["mark", "measure"]}); + + self.performance.mark("mark1"); + self.performance.measure("measure1"); + wait(); // Ensure mark1 !== mark2 startTime by making sure performance.now advances. + self.performance.mark("mark2"); + self.performance.measure("measure2"); + self.performance.measure("measure-matching-mark2-1", "mark2"); + wait(); // Ensure mark2 !== mark3 startTime by making sure performance.now advances. + self.performance.mark("name-repeat"); + self.performance.measure("measure3"); + self.performance.measure("measure-matching-mark2-2", "mark2"); + wait(); // Ensure name-repeat startTime will differ. + self.performance.mark("name-repeat"); + wait(); // Ensure name-repeat startTime will differ. + self.performance.measure("name-repeat"); + }, "getEntries, getEntriesByType, getEntriesByName sort order"); diff --git a/test/fixtures/wpt/performance-timeline/po-getentries.any.js b/test/fixtures/wpt/performance-timeline/po-getentries.any.js new file mode 100644 index 00000000000000..36169d5dbd6e0e --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-getentries.any.js @@ -0,0 +1,38 @@ +// META: script=performanceobservers.js + + async_test(function (t) { + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + checkEntries(entryList.getEntries(), + [{ entryType: "mark", name: "mark1"}], "getEntries"); + + checkEntries(entryList.getEntriesByType("mark"), + [{ entryType: "mark", name: "mark1"}], "getEntriesByType"); + assert_equals(entryList.getEntriesByType("measure").length, 0, + "getEntriesByType with no expected entry"); + assert_equals(entryList.getEntriesByType("234567").length, 0, + "getEntriesByType with no expected entry"); + + checkEntries(entryList.getEntriesByName("mark1"), + [{ entryType: "mark", name: "mark1"}], "getEntriesByName"); + assert_equals(entryList.getEntriesByName("mark2").length, 0, + "getEntriesByName with no expected entry"); + assert_equals(entryList.getEntriesByName("234567").length, 0, + "getEntriesByName with no expected entry"); + + checkEntries(entryList.getEntriesByName("mark1", "mark"), + [{ entryType: "mark", name: "mark1"}], "getEntriesByName with a type"); + assert_equals(entryList.getEntriesByName("mark1", "measure").length, 0, + "getEntriesByName with a type with no expected entry"); + assert_equals(entryList.getEntriesByName("mark2", "measure").length, 0, + "getEntriesByName with a type with no expected entry"); + assert_equals(entryList.getEntriesByName("mark1", "234567").length, 0, + "getEntriesByName with a type with no expected entry"); + + observer.disconnect(); + t.done(); + }) + ); + observer.observe({entryTypes: ["mark"]}); + self.performance.mark("mark1"); + }, "getEntries, getEntriesByType and getEntriesByName work"); diff --git a/test/fixtures/wpt/performance-timeline/po-mark-measure.any.js b/test/fixtures/wpt/performance-timeline/po-mark-measure.any.js new file mode 100644 index 00000000000000..0b205e094c75e1 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-mark-measure.any.js @@ -0,0 +1,61 @@ +// META: script=performanceobservers.js + + async_test(function (t) { + var stored_entries = []; + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + stored_entries = + stored_entries.concat(entryList.getEntries()); + if (stored_entries.length >= 4) { + checkEntries(stored_entries, + [{ entryType: "mark", name: "mark1"}, + { entryType: "mark", name: "mark2"}, + { entryType: "measure", name: "measure1"}, + { entryType: "measure", name: "measure2"}]); + observer.disconnect(); + t.done(); + } + }) + ); + observer.observe({entryTypes: ["mark", "measure"]}); + }, "entries are observable"); + + async_test(function (t) { + var mark_entries = []; + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + mark_entries = + mark_entries.concat(entryList.getEntries()); + if (mark_entries.length >= 2) { + checkEntries(mark_entries, + [{ entryType: "mark", name: "mark1"}, + { entryType: "mark", name: "mark2"}]); + observer.disconnect(); + t.done(); + } + }) + ); + observer.observe({entryTypes: ["mark"]}); + self.performance.mark("mark1"); + self.performance.mark("mark2"); + }, "mark entries are observable"); + + async_test(function (t) { + var measure_entries = []; + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + measure_entries = + measure_entries.concat(entryList.getEntries()); + if (measure_entries.length >= 2) { + checkEntries(measure_entries, + [{ entryType: "measure", name: "measure1"}, + { entryType: "measure", name: "measure2"}]); + observer.disconnect(); + t.done(); + } + }) + ); + observer.observe({entryTypes: ["measure"]}); + self.performance.measure("measure1"); + self.performance.measure("measure2"); + }, "measure entries are observable"); diff --git a/test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js b/test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js new file mode 100644 index 00000000000000..2bba396a6b69eb --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js @@ -0,0 +1,17 @@ +// META: script=performanceobservers.js + +async_test(function (t) { + const observer = new PerformanceObserver( + t.step_func(function (entryList) { + checkEntries(entryList.getEntries(), + [{ entryType: "mark", name: "early"}]); + observer.disconnect(); + t.done(); + }) + ); + performance.mark("early"); + // This call will not trigger anything. + observer.observe({type: "mark"}); + // This call should override the previous call and detect the early mark. + observer.observe({type: "mark", buffered: true}); +}, "Two calls of observe() with the same 'type' cause override."); diff --git a/test/fixtures/wpt/performance-timeline/po-observe-type.any.js b/test/fixtures/wpt/performance-timeline/po-observe-type.any.js new file mode 100644 index 00000000000000..b9854cc1466fa7 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-observe-type.any.js @@ -0,0 +1,64 @@ +// META: script=performanceobservers.js + +test(function () { + const obs = new PerformanceObserver(() => {}); + assert_throws_js(TypeError, function () { + obs.observe({}); + }); + assert_throws_js(TypeError, function () { + obs.observe({entryType: ['mark', 'measure']}); + }); +}, "Calling observe() without 'type' or 'entryTypes' throws a TypeError"); + +test(() => { + const obs = new PerformanceObserver(() =>{}); + obs.observe({entryTypes: ["mark"]}); + assert_throws_dom('InvalidModificationError', function () { + obs.observe({type: "measure"}); + }); +}, "Calling observe() with entryTypes and then type should throw an InvalidModificationError"); + +test(() => { + const obs = new PerformanceObserver(() =>{}); + obs.observe({type: "mark"}); + assert_throws_dom('InvalidModificationError', function () { + obs.observe({entryTypes: ["measure"]}); + }); +}, "Calling observe() with type and then entryTypes should throw an InvalidModificationError"); + +test(() => { + const obs = new PerformanceObserver(() =>{}); + assert_throws_js(TypeError, function () { + obs.observe({type: "mark", entryTypes: ["measure"]}); + }); +}, "Calling observe() with type and entryTypes should throw a TypeError"); + +test(function () { + const obs = new PerformanceObserver(() =>{}); + // Definitely not an entry type. + obs.observe({type: "this-cannot-match-an-entryType"}); + // Close to an entry type, but not quite. + obs.observe({type: "marks"}); +}, "Passing in unknown values to type does throw an exception."); + +async_test(function (t) { + let observedMark = false; + let observedMeasure = false; + const observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + observedMark |= entryList.getEntries().filter( + entry => entry.entryType === 'mark').length; + observedMeasure |= entryList.getEntries().filter( + entry => entry.entryType === 'measure').length + // Only conclude the test once we receive both entries! + if (observedMark && observedMeasure) { + observer.disconnect(); + t.done(); + } + }) + ); + observer.observe({type: "mark"}); + observer.observe({type: "measure"}); + self.performance.mark("mark1"); + self.performance.measure("measure1"); +}, "observe() with different type values stacks."); diff --git a/test/fixtures/wpt/performance-timeline/po-observe.any.js b/test/fixtures/wpt/performance-timeline/po-observe.any.js new file mode 100644 index 00000000000000..5b593374baf157 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-observe.any.js @@ -0,0 +1,63 @@ +// META: script=performanceobservers.js + + test(function () { + const obs = new PerformanceObserver(() => {}); + assert_throws_js(TypeError, function () { + obs.observe({entryTypes: "mark"}); + }); + }, "entryTypes must be a sequence or throw a TypeError"); + + test(function () { + const obs = new PerformanceObserver(() => {}); + obs.observe({entryTypes: []}); + }, "Empty sequence entryTypes does not throw an exception."); + + test(function () { + const obs = new PerformanceObserver(() => {}); + obs.observe({entryTypes: ["this-cannot-match-an-entryType"]}); + obs.observe({entryTypes: ["marks","navigate", "resources"]}); + }, "Unknown entryTypes do not throw an exception."); + + test(function () { + const obs = new PerformanceObserver(() => {}); + obs.observe({entryTypes: ["mark","this-cannot-match-an-entryType"]}); + obs.observe({entryTypes: ["this-cannot-match-an-entryType","mark"]}); + obs.observe({entryTypes: ["mark"], others: true}); + }, "Filter unsupported entryType entryType names within the entryTypes sequence"); + + async_test(function (t) { + var finish = t.step_func(function () { t.done(); }); + var observer = new PerformanceObserver( + function (entryList, obs) { + var self = this; + t.step(function () { + assert_true(entryList instanceof PerformanceObserverEntryList, "first callback parameter must be a PerformanceObserverEntryList instance"); + assert_true(obs instanceof PerformanceObserver, "second callback parameter must be a PerformanceObserver instance"); + assert_equals(observer, self, "observer is the this value"); + assert_equals(observer, obs, "observer is second parameter"); + assert_equals(self, obs, "this and second parameter are the same"); + observer.disconnect(); + finish(); + }); + } + ); + self.performance.clearMarks(); + observer.observe({entryTypes: ["mark"]}); + self.performance.mark("mark1"); + }, "Check observer callback parameter and this values"); + + async_test(function (t) { + var observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + checkEntries(entryList.getEntries(), + [{ entryType: "measure", name: "measure1"}]); + observer.disconnect(); + t.done(); + }) + ); + self.performance.clearMarks(); + observer.observe({entryTypes: ["mark"]}); + observer.observe({entryTypes: ["measure"]}); + self.performance.mark("mark1"); + self.performance.measure("measure1"); + }, "replace observer if already present"); diff --git a/test/fixtures/wpt/performance-timeline/po-observe.html b/test/fixtures/wpt/performance-timeline/po-observe.html new file mode 100644 index 00000000000000..a48f0f3764bda4 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-observe.html @@ -0,0 +1,86 @@ + + +PerformanceObservers: PerformanceObserverInit.buffered + + + + +

PerformanceObservers: PerformanceObserverInit.buffered

+

+PerformanceObserverInit.buffered should retrieve previously buffered entries +

+
+ diff --git a/test/fixtures/wpt/performance-timeline/po-resource.html b/test/fixtures/wpt/performance-timeline/po-resource.html new file mode 100644 index 00000000000000..00c173eeae921f --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-resource.html @@ -0,0 +1,48 @@ + + +PerformanceObservers: resource + + + +

PerformanceObservers: resource

+

+New resources will queue a PerformanceEntry. +

+
+ diff --git a/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js b/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js new file mode 100644 index 00000000000000..86ad397b0a5c37 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js @@ -0,0 +1,34 @@ +// META: title=PerformanceObserver: takeRecords +// META: script=performanceobservers.js + +async_test(function (t) { + const observer = new PerformanceObserver(function (entryList, observer) { + assert_unreached('This callback should not have been called.') + }); + let entries = observer.takeRecords(); + checkEntries(entries, [], 'No records before observe'); + observer.observe({entryTypes: ['mark']}); + assert_equals(typeof(observer.takeRecords), 'function'); + entries = observer.takeRecords(); + checkEntries(entries, [], 'No records just from observe'); + performance.mark('a'); + performance.mark('b'); + entries = observer.takeRecords(); + checkEntries(entries, [ + {entryType: 'mark', name: 'a'}, + {entryType: 'mark', name: 'b'} + ]); + performance.mark('c'); + performance.mark('d'); + performance.mark('e'); + entries = observer.takeRecords(); + checkEntries(entries, [ + {entryType: 'mark', name: 'c'}, + {entryType: 'mark', name: 'd'}, + {entryType: 'mark', name: 'e'} + ]); + entries = observer.takeRecords(); + checkEntries(entries, [], 'No entries right after takeRecords'); + observer.disconnect(); + t.done(); + }, "Test PerformanceObserver's takeRecords()"); diff --git a/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html b/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html new file mode 100644 index 00000000000000..ef5be73395b49d --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html @@ -0,0 +1,17 @@ + + diff --git a/test/fixtures/wpt/performance-timeline/resources/square.png b/test/fixtures/wpt/performance-timeline/resources/square.png new file mode 100644 index 0000000000000000000000000000000000000000..be211bc377180386eec7e891485e083a2c45841e GIT binary patch literal 249 zcmeAS@N?(olHy`uVBq!ia0vp^&Oq$I!2~4F*~E4MDaPU;cPEB*=VV?2IV|apzK#qG z8~eHcB(eheoCO|{#S9F5he4R}c>anMpkSY;i(^QJ^V@3&IS(i>usHtKSF4Sf>dC%O z>l*WB#=Z#>-@S`%S8CZt@7Oo5h)`6&={0M#tksJI@9JEj%NRUe L{an^LB{Ts5 true); + postMessage("SUCCESS"); +} catch (ex) { + postMessage("FAILURE"); +} diff --git a/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js b/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js new file mode 100644 index 00000000000000..25f195939e7b69 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js @@ -0,0 +1,19 @@ +test(() => { + if (typeof PerformanceObserver.supportedEntryTypes === "undefined") + assert_unreached("supportedEntryTypes is not supported."); + const types = PerformanceObserver.supportedEntryTypes; + assert_greater_than(types.length, 0, + "There should be at least one entry in supportedEntryTypes."); + for (let i = 1; i < types.length; i++) { + assert_true(types[i-1] < types[i], + "The strings '" + types[i-1] + "' and '" + types[i] + + "' are repeated or they are not in alphabetical order.") + } +}, "supportedEntryTypes exists and returns entries in alphabetical order"); + +test(() => { + if (typeof PerformanceObserver.supportedEntryTypes === "undefined") + assert_unreached("supportedEntryTypes is not supported."); + assert_true(PerformanceObserver.supportedEntryTypes === + PerformanceObserver.supportedEntryTypes); +}, "supportedEntryTypes caches result"); diff --git a/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js b/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js new file mode 100644 index 00000000000000..d869c7c52d55d6 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js @@ -0,0 +1,25 @@ +function testTimeResolution(highResTimeFunc, funcString) { + test(() => { + const t0 = highResTimeFunc(); + let t1 = highResTimeFunc(); + while (t0 == t1) { + t1 = highResTimeFunc(); + } + const epsilon = 1e-5; + assert_greater_than_equal(t1 - t0, 0.005 - epsilon, 'The second ' + funcString + ' should be much greater than the first'); + }, 'Verifies the resolution of ' + funcString + ' is at least 5 microseconds.'); +} + +function timeByPerformanceNow() { + return performance.now(); +} + +function timeByUserTiming() { + performance.mark('timer'); + const time = performance.getEntriesByName('timer')[0].startTime; + performance.clearMarks('timer'); + return time; +} + +testTimeResolution(timeByPerformanceNow, 'performance.now()'); +testTimeResolution(timeByUserTiming, 'entry.startTime'); diff --git a/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html b/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html new file mode 100644 index 00000000000000..fc92bc971003f2 --- /dev/null +++ b/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html @@ -0,0 +1,18 @@ + + + + + + + + + + diff --git a/test/fixtures/wpt/user-timing/META.yml b/test/fixtures/wpt/user-timing/META.yml new file mode 100644 index 00000000000000..5cb2a789c09c89 --- /dev/null +++ b/test/fixtures/wpt/user-timing/META.yml @@ -0,0 +1,4 @@ +spec: https://w3c.github.io/user-timing/ +suggested_reviewers: + - plehegar + - igrigorik diff --git a/test/fixtures/wpt/user-timing/buffered-flag.any.js b/test/fixtures/wpt/user-timing/buffered-flag.any.js new file mode 100644 index 00000000000000..f938c8522d829a --- /dev/null +++ b/test/fixtures/wpt/user-timing/buffered-flag.any.js @@ -0,0 +1,27 @@ +async_test(t => { + // First observer creates second in callback to ensure the entry has been dispatched by the time + // the second observer begins observing. + new PerformanceObserver(() => { + // Second observer requires 'buffered: true' to see an entry. + new PerformanceObserver(t.step_func_done(list => { + const entries = list.getEntries(); + assert_equals(entries.length, 1, 'There should be 1 mark entry.'); + assert_equals(entries[0].entryType, 'mark'); + })).observe({type: 'mark', buffered: true}); + }).observe({entryTypes: ['mark']}); + performance.mark('foo'); +}, 'PerformanceObserver with buffered flag sees previous marks'); + +async_test(t => { + // First observer creates second in callback to ensure the entry has been dispatched by the time + // the second observer begins observing. + new PerformanceObserver(() => { + // Second observer requires 'buffered: true' to see an entry. + new PerformanceObserver(t.step_func_done(list => { + const entries = list.getEntries(); + assert_equals(entries.length, 1, 'There should be 1 measure entry.'); + assert_equals(entries[0].entryType, 'measure'); + })).observe({type: 'measure', buffered: true}); + }).observe({entryTypes: ['measure']}); + performance.measure('bar'); +}, 'PerformanceObserver with buffered flag sees previous measures'); diff --git a/test/fixtures/wpt/user-timing/case-sensitivity.any.js b/test/fixtures/wpt/user-timing/case-sensitivity.any.js new file mode 100644 index 00000000000000..1c0b0dcac361fe --- /dev/null +++ b/test/fixtures/wpt/user-timing/case-sensitivity.any.js @@ -0,0 +1,25 @@ + test(function () { + assert_equals(typeof self.performance, "object"); + assert_equals(typeof self.performance.getEntriesByType, "function"); + + self.performance.mark("mark1"); + self.performance.measure("measure1"); + + const type = [ + 'mark', + 'measure', + ]; + type.forEach(function(entryType) { + if (PerformanceObserver.supportedEntryTypes.includes(entryType)) { + const entryTypeUpperCased = entryType.toUpperCase(); + const entryTypeCapitalized = entryType[0].toUpperCase() + entryType.substring(1); + const lowerList = self.performance.getEntriesByType(entryType); + const upperList = self.performance.getEntriesByType(entryTypeUpperCased); + const mixedList = self.performance.getEntriesByType(entryTypeCapitalized); + + assert_greater_than(lowerList.length, 0, "Entries exist"); + assert_equals(upperList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length"); + assert_equals(mixedList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length"); + } + }); + }, "getEntriesByType values are case sensitive"); diff --git a/test/fixtures/wpt/user-timing/clearMarks.html b/test/fixtures/wpt/user-timing/clearMarks.html new file mode 100644 index 00000000000000..92c60a3bbb856b --- /dev/null +++ b/test/fixtures/wpt/user-timing/clearMarks.html @@ -0,0 +1,84 @@ + + + + +functionality test of window.performance.clearMarks + + + + + + + + + + +

Description

+

This test validates functionality of the interface window.performance.clearMarks.

+
+ + diff --git a/test/fixtures/wpt/user-timing/clearMeasures.html b/test/fixtures/wpt/user-timing/clearMeasures.html new file mode 100644 index 00000000000000..54d41005698305 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clearMeasures.html @@ -0,0 +1,77 @@ + + + + +functionality test of window.performance.clearMeasures + + + + + + + + + + +

Description

+

This test validates functionality of the interface window.performance.clearMeasures.

+
+ + diff --git a/test/fixtures/wpt/user-timing/clear_all_marks.any.js b/test/fixtures/wpt/user-timing/clear_all_marks.any.js new file mode 100644 index 00000000000000..35cd2a04f61036 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_all_marks.any.js @@ -0,0 +1,17 @@ +test(function() { + self.performance.mark("mark1"); + self.performance.mark("mark2"); + + // test that two marks have been created + var entries = self.performance.getEntriesByType("mark"); + assert_equals(entries.length, 2, "Two marks have been created for this test."); + + // clear all marks + self.performance.clearMarks(); + + // test that all marks were cleared + entries = self.performance.getEntriesByType("mark"); + + assert_equals(entries.length, 0, "All marks have been cleared."); + +}, "Clearing all marks remove all of them."); diff --git a/test/fixtures/wpt/user-timing/clear_all_measures.any.js b/test/fixtures/wpt/user-timing/clear_all_measures.any.js new file mode 100644 index 00000000000000..32c993f2827a30 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_all_measures.any.js @@ -0,0 +1,21 @@ +test(function() +{ + self.performance.mark("mark1"); + self.performance.measure("measure1", "mark1"); + self.performance.mark("mark2"); + self.performance.measure("measure2", "mark2"); + + // test that two measures have been created + var entries = self.performance.getEntriesByType("measure"); + assert_equals(entries.length, 2, "Two measures have been created for this test."); + + // clear all measures + self.performance.clearMeasures(); + + // test that all measures were cleared + entries = self.performance.getEntriesByType("measure"); + assert_equals(entries.length, 0, + "After a call to self.performance.clearMeasures(), " + + "self.performance.getEntriesByType(\"measure\") returns an empty object."); + +}, "Clearing all marks remove all of them."); diff --git a/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js b/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js new file mode 100644 index 00000000000000..c7d8b478613401 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js @@ -0,0 +1,26 @@ +test(function() { + self.performance.mark("mark1"); + self.performance.mark("mark2"); + + // test that two marks have been created + var entries = self.performance.getEntriesByType("mark"); + assert_equals(entries.length, 2, "Two marks have been created for this test."); + + // clear non-existent mark + self.performance.clearMarks("mark3"); + + // test that "mark1" still exists + entries = self.performance.getEntriesByName("mark1"); + assert_equals(entries[0].name, "mark1", + "After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" + + "\" is a non-existent mark, self.performance.getEntriesByName(\"mark1\") " + + "returns an object containing the \"mark1\" mark."); + + // test that "mark2" still exists + entries = self.performance.getEntriesByName("mark2"); + assert_equals(entries[0].name, "mark2", + "After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" + + "\" is a non-existent mark, self.performance.getEntriesByName(\"mark2\") " + + "returns an object containing the \"mark2\" mark."); + +}, "Clearing a non-existent mark doesn't affect existing marks"); diff --git a/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js b/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js new file mode 100644 index 00000000000000..9de0b5f266d4e2 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js @@ -0,0 +1,29 @@ +test(function() +{ + self.performance.mark("mark1"); + self.performance.measure("measure1", "mark1"); + self.performance.mark("mark2"); + self.performance.measure("measure2", "mark2"); + + // test that two measures have been created + var entries = self.performance.getEntriesByType("measure"); + assert_equals(entries.length, 2, "Two measures have been created for this test."); + + // clear non-existent measure + self.performance.clearMeasures("measure3"); + + // test that "measure1" still exists + entries = self.performance.getEntriesByName("measure1"); + assert_equals(entries[0].name, "measure1", + "After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" + + "\" is a non-existent measure, self.performance.getEntriesByName(\"measure1\") " + + "returns an object containing the \"measure1\" measure."); + + // test that "measure2" still exists + entries = self.performance.getEntriesByName("measure2"); + assert_equals(entries[0].name, "measure2", + "After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" + + "\" is a non-existent measure, self.performance.getEntriesByName(\"measure2\") " + + "returns an object containing the \"measure2\" measure."); + +}, "Clearing a non-existent measure doesn't affect existing measures"); diff --git a/test/fixtures/wpt/user-timing/clear_one_mark.any.js b/test/fixtures/wpt/user-timing/clear_one_mark.any.js new file mode 100644 index 00000000000000..c180199d8c9f92 --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_one_mark.any.js @@ -0,0 +1,26 @@ +test(function() { + self.performance.mark("mark1"); + self.performance.mark("mark2"); + + // test that two marks have been created + var entries = self.performance.getEntriesByType("mark"); + assert_equals(entries.length, 2, "Two marks have been created for this test."); + + // clear existent mark + self.performance.clearMarks("mark1"); + + // test that "mark1" was cleared + entries = self.performance.getEntriesByName("mark1"); + + assert_equals(entries.length, 0, + "After a call to self.performance.clearMarks(\"mark1\"), " + + "window.performance.getEntriesByName(\"mark1\") returns an empty object."); + + // test that "mark2" still exists + entries = self.performance.getEntriesByName("mark2"); + assert_equals(entries[0].name, "mark2", + "After a call to self.performance.clearMarks(\"mark1\"), " + + "window.performance.getEntriesByName(\"mark2\") returns an object containing the " + + "\"mark2\" mark."); + +}, "Clearing an existent mark doesn't affect other existing marks"); diff --git a/test/fixtures/wpt/user-timing/clear_one_measure.any.js b/test/fixtures/wpt/user-timing/clear_one_measure.any.js new file mode 100644 index 00000000000000..a5e663772c8bbe --- /dev/null +++ b/test/fixtures/wpt/user-timing/clear_one_measure.any.js @@ -0,0 +1,29 @@ +test(function() +{ + self.performance.mark("mark1"); + self.performance.measure("measure1", "mark1"); + self.performance.mark("mark2"); + self.performance.measure("measure2", "mark2"); + + // test that two measures have been created + var entries = self.performance.getEntriesByType("measure"); + assert_equals(entries.length, 2, "Two measures have been created for this test."); + + // clear existent measure + self.performance.clearMeasures("measure1"); + + // test that "measure1" was cleared + entries = self.performance.getEntriesByName("measure1"); + + assert_equals(entries.length, 0, + "After a call to self.performance.clearMeasures(\"measure1\"), " + + "self.performance.getEntriesByName(\"measure1\") returns an empty object."); + + // test that "measure2" still exists + entries = self.performance.getEntriesByName("measure2"); + assert_equals(entries[0].name, "measure2", + "After a call to self.performance.clearMeasures(\"measure1\"), " + + "self.performance.getEntriesByName(\"measure2\") returns an object containing the " + + "\"measure2\" measure."); + +}, "Clearing an existent measure doesn't affect other existing measures"); diff --git a/test/fixtures/wpt/user-timing/entry_type.any.js b/test/fixtures/wpt/user-timing/entry_type.any.js new file mode 100644 index 00000000000000..1e37453d09d42e --- /dev/null +++ b/test/fixtures/wpt/user-timing/entry_type.any.js @@ -0,0 +1,13 @@ +test(function () { + self.performance.mark('mark'); + var mark_entry = self.performance.getEntriesByName('mark')[0]; + + assert_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.'); +}, "Validate the user timing entry type PerformanceMark"); + +test(function () { + self.performance.measure('measure'); + var measure_entry = self.performance.getEntriesByName('measure')[0]; + + assert_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.'); +}, "Validate the user timing entry type PerformanceMeasure"); diff --git a/test/fixtures/wpt/user-timing/idlharness.any.js b/test/fixtures/wpt/user-timing/idlharness.any.js new file mode 100644 index 00000000000000..511f2d0455b833 --- /dev/null +++ b/test/fixtures/wpt/user-timing/idlharness.any.js @@ -0,0 +1,33 @@ +// META: global=window,worker +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js +// META: timeout=long + +// https://w3c.github.io/user-timing/ + +'use strict'; + +idl_test( + ['user-timing'], + ['hr-time', 'performance-timeline', 'dom'], + idl_array => { + try { + performance.mark('test'); + performance.measure('test'); + for (const m of performance.getEntriesByType('mark')) { + self.mark = m; + } + for (const m of performance.getEntriesByType('measure')) { + self.measure = m; + } + } catch (e) { + // Will be surfaced when mark is undefined below. + } + + idl_array.add_objects({ + Performance: ['performance'], + PerformanceMark: ['mark'], + PerformanceMeasure: ['measure'], + }); + } +); diff --git a/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html new file mode 100644 index 00000000000000..1df94a3006d7fb --- /dev/null +++ b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html @@ -0,0 +1,35 @@ + + + + + exception test of performance.mark and performance.measure + + + + + + +

Description

+

This test validates exception scenarios of invoking mark() and measure() with timing attributes as value.

+
+ + + diff --git a/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js new file mode 100644 index 00000000000000..32677c64d3bd5f --- /dev/null +++ b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js @@ -0,0 +1,25 @@ +importScripts("/resources/testharness.js"); +importScripts("resources/webperftestharness.js"); + +function emit_test(attrName) { + test(function() { + performance.mark(attrName); + performance.clearMarks(attrName); + }, "performance.mark should not throw if used with timing attribute " + attrName + + " in workers"); +} +for (var i in timingAttributes) { + emit_test(timingAttributes[i]); +} + +function emit_test2(attrName) { + test(function() { + performance.measure(attrName); + performance.clearMeasures(attrName); + }, "performance.measure should not throw if used with timing attribute " + attrName + + " in workers"); +} +for (var i in timingAttributes) { + emit_test2(timingAttributes[i]); +} +done(); diff --git a/test/fixtures/wpt/user-timing/invoke_without_parameter.html b/test/fixtures/wpt/user-timing/invoke_without_parameter.html new file mode 100644 index 00000000000000..114435e59befbb --- /dev/null +++ b/test/fixtures/wpt/user-timing/invoke_without_parameter.html @@ -0,0 +1,26 @@ + + + + + exception test of performance.mark and performance.measure + + + + + + + +

Description

+

This test validates exception scenarios of invoking mark() and measure() without parameter.

+
+ + + diff --git a/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js b/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js new file mode 100644 index 00000000000000..ef9c403dda6723 --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js @@ -0,0 +1,40 @@ +// META: script=resources/user-timing-helper.js + +test(()=>{ + const entry = new PerformanceMark("name"); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark"}); +}, "Mark entry can be created by 'new PerformanceMark(string)'."); + +test(()=>{ + const entry = new PerformanceMark("name", {}); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark"}); +}, "Mark entry can be created by 'new PerformanceMark(string, {})'."); + +test(()=>{ + const entry = new PerformanceMark("name", {startTime: 1}); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark", startTime: 1}); +}, "Mark entry can be created by 'new PerformanceMark(string, {startTime})'."); + +test(()=>{ + const entry = new PerformanceMark("name", {detail: {info: "abc"}}); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark", detail: {info: "abc"}}); +}, "Mark entry can be created by 'new PerformanceMark(string, {detail})'."); + +test(()=>{ + const entry = + new PerformanceMark("name", {startTime: 1, detail: {info: "abc"}}); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark", startTime: 1, detail: {info: "abc"}}); +}, "Mark entry can be created by " + + "'new PerformanceMark(string, {startTime, detail})'."); + +test(()=>{ + const entry = new PerformanceMark("name"); + assert_true(entry instanceof PerformanceMark); + checkEntry(entry, {name: "name", entryType: "mark"}); + assert_equals(performance.getEntriesByName("name").length, 0); +}, "Using new PerformanceMark() shouldn't add the entry to performance timeline."); diff --git a/test/fixtures/wpt/user-timing/mark-errors.any.js b/test/fixtures/wpt/user-timing/mark-errors.any.js new file mode 100644 index 00000000000000..dcd36695e22f2c --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark-errors.any.js @@ -0,0 +1,15 @@ +test(function() { + assert_throws_js(TypeError, function() { self.performance.mark("mark1", 123); }, "Number passed as a dict argument should cause type-error.") +}, "Number should be rejected as the mark-options.") + +test(function() { + assert_throws_js(TypeError, function() { self.performance.mark("mark1", NaN); }, "NaN passed as a dict argument should cause type-error.") +}, "NaN should be rejected as the mark-options.") + +test(function() { + assert_throws_js(TypeError, function() { self.performance.mark("mark1", Infinity); }, "Infinity passed as a dict argument should cause type-error.") +}, "Infinity should be rejected as the mark-options.") + +test(function() { + assert_throws_js(TypeError, function() { self.performance.mark("mark1", "string"); }, "String passed as a dict argument should cause type-error.") +}, "String should be rejected as the mark-options.") diff --git a/test/fixtures/wpt/user-timing/mark-l3.any.js b/test/fixtures/wpt/user-timing/mark-l3.any.js new file mode 100644 index 00000000000000..407a5c8bba6a3c --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark-l3.any.js @@ -0,0 +1,39 @@ +// META: script=resources/user-timing-helper.js + +async_test(function (t) { + let mark_entries = []; + const expected_entries = + [{ entryType: "mark", name: "mark1", detail: null}, + { entryType: "mark", name: "mark2", detail: null}, + { entryType: "mark", name: "mark3", detail: null}, + { entryType: "mark", name: "mark4", detail: null}, + { entryType: "mark", name: "mark5", detail: null}, + { entryType: "mark", name: "mark6", detail: {}}, + { entryType: "mark", name: "mark7", detail: {info: 'abc'}}, + { entryType: "mark", name: "mark8", detail: null, startTime: 234.56}, + { entryType: "mark", name: "mark9", detail: {count: 3}, startTime: 345.67}]; + const observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + mark_entries = + mark_entries.concat(entryList.getEntries()); + if (mark_entries.length >= expected_entries.length) { + checkEntries(mark_entries, expected_entries); + observer.disconnect(); + t.done(); + } + }) + ); + self.performance.clearMarks(); + observer.observe({entryTypes: ["mark"]}); + const returned_entries = []; + returned_entries.push(self.performance.mark("mark1")); + returned_entries.push(self.performance.mark("mark2", undefined)); + returned_entries.push(self.performance.mark("mark3", null)); + returned_entries.push(self.performance.mark("mark4", {})); + returned_entries.push(self.performance.mark("mark5", {detail: null})); + returned_entries.push(self.performance.mark("mark6", {detail: {}})); + returned_entries.push(self.performance.mark("mark7", {detail: {info: 'abc'}})); + returned_entries.push(self.performance.mark("mark8", {startTime: 234.56})); + returned_entries.push(self.performance.mark("mark9", {detail: {count: 3}, startTime: 345.67})); + checkEntries(returned_entries, expected_entries); +}, "mark entries' detail and startTime are customizable."); diff --git a/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html b/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html new file mode 100644 index 00000000000000..6f1ad489e95680 --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html @@ -0,0 +1,36 @@ + + +User Timing: L2 vs L3 feature detection + + + diff --git a/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js b/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js new file mode 100644 index 00000000000000..bb15c5839818ba --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js @@ -0,0 +1,37 @@ +async_test(function (t) { + self.performance.clearMeasures(); + const measure = self.performance.measure("measure1"); + assert_true(measure instanceof PerformanceMeasure); + t.done(); +}, "L3: performance.measure(name) should return an entry."); + +async_test(function (t) { + self.performance.clearMeasures(); + const measure = self.performance.measure("measure2", + { start: 12, end: 23 }); + assert_true(measure instanceof PerformanceMeasure); + t.done(); +}, "L3: performance.measure(name, param1) should return an entry."); + +async_test(function (t) { + self.performance.clearMeasures(); + self.performance.mark("1"); + self.performance.mark("2"); + const measure = self.performance.measure("measure3", "1", "2"); + assert_true(measure instanceof PerformanceMeasure); + t.done(); +}, "L3: performance.measure(name, param1, param2) should return an entry."); + +async_test(function (t) { + self.performance.clearMarks(); + const mark = self.performance.mark("mark1"); + assert_true(mark instanceof PerformanceMark); + t.done(); +}, "L3: performance.mark(name) should return an entry."); + +async_test(function (t) { + self.performance.clearMarks(); + const mark = self.performance.mark("mark2", { startTime: 34 }); + assert_true(mark instanceof PerformanceMark); + t.done(); +}, "L3: performance.mark(name, param) should return an entry."); diff --git a/test/fixtures/wpt/user-timing/mark.any.js b/test/fixtures/wpt/user-timing/mark.any.js new file mode 100644 index 00000000000000..7e814d2074ca8b --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark.any.js @@ -0,0 +1,118 @@ +// test data +var testThreshold = 20; + +var expectedTimes = new Array(); + +function match_entries(entries, index) +{ + var entry = entries[index]; + var match = self.performance.getEntriesByName("mark")[index]; + assert_equals(entry.name, match.name, "entry.name"); + assert_equals(entry.startTime, match.startTime, "entry.startTime"); + assert_equals(entry.entryType, match.entryType, "entry.entryType"); + assert_equals(entry.duration, match.duration, "entry.duration"); +} + +function filter_entries_by_type(entryList, entryType) +{ + var testEntries = new Array(); + + // filter entryList + for (var i in entryList) + { + if (entryList[i].entryType == entryType) + { + testEntries.push(entryList[i]); + } + } + + return testEntries; +} + +test(function () { + // create first mark + self.performance.mark("mark"); + + expectedTimes[0] = self.performance.now(); + + const entries = self.performance.getEntriesByName("mark"); + assert_equals(entries.length, 1); +}, "Entry 0 is properly created"); + +test(function () { + // create second, duplicate mark + self.performance.mark("mark"); + + expectedTimes[1] = self.performance.now(); + + const entries = self.performance.getEntriesByName("mark"); + assert_equals(entries.length, 2); + +}, "Entry 1 is properly created"); + +function test_mark(index) { + test(function () { + const entries = self.performance.getEntriesByName("mark"); + assert_equals(entries[index].name, "mark", "Entry has the proper name"); + }, "Entry " + index + " has the proper name"); + + test(function () { + const entries = self.performance.getEntriesByName("mark"); + assert_approx_equals(entries[index].startTime, expectedTimes[index], testThreshold); + }, "Entry " + index + " startTime is approximately correct (up to " + testThreshold + + "ms difference allowed)"); + + test(function () { + const entries = self.performance.getEntriesByName("mark"); + assert_equals(entries[index].entryType, "mark"); + }, "Entry " + index + " has the proper entryType"); + + test(function () { + const entries = self.performance.getEntriesByName("mark"); + assert_equals(entries[index].duration, 0); + }, "Entry " + index + " duration == 0"); + + test(function () { + const entries = self.performance.getEntriesByName("mark", "mark"); + assert_equals(entries[index].name, "mark"); + }, "getEntriesByName(\"mark\", \"mark\")[" + index + "] returns an " + + "object containing a \"mark\" mark"); + + test(function () { + const entries = self.performance.getEntriesByName("mark", "mark"); + match_entries(entries, index); + }, "The mark returned by getEntriesByName(\"mark\", \"mark\")[" + index + + "] matches the mark returned by " + + "getEntriesByName(\"mark\")[" + index + "]"); + + test(function () { + const entries = filter_entries_by_type(self.performance.getEntries(), "mark"); + assert_equals(entries[index].name, "mark"); + }, "getEntries()[" + index + "] returns an " + + "object containing a \"mark\" mark"); + + test(function () { + const entries = filter_entries_by_type(self.performance.getEntries(), "mark"); + match_entries(entries, index); + }, "The mark returned by getEntries()[" + index + + "] matches the mark returned by " + + "getEntriesByName(\"mark\")[" + index + "]"); + + test(function () { + const entries = self.performance.getEntriesByType("mark"); + assert_equals(entries[index].name, "mark"); + }, "getEntriesByType(\"mark\")[" + index + "] returns an " + + "object containing a \"mark\" mark"); + + test(function () { + const entries = self.performance.getEntriesByType("mark"); + match_entries(entries, index); + }, "The mark returned by getEntriesByType(\"mark\")[" + index + + "] matches the mark returned by " + + "getEntriesByName(\"mark\")[" + index + "]"); + +} + +for (var i = 0; i < expectedTimes.length; i++) { + test_mark(i); +} diff --git a/test/fixtures/wpt/user-timing/mark.html b/test/fixtures/wpt/user-timing/mark.html new file mode 100644 index 00000000000000..e03e9e6247adab --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark.html @@ -0,0 +1,58 @@ + + + + +functionality test of window.performance.mark + + + + + + + + + + +

Description

+

This test validates functionality of the interface window.performance.mark.

+
+ + diff --git a/test/fixtures/wpt/user-timing/mark_exceptions.html b/test/fixtures/wpt/user-timing/mark_exceptions.html new file mode 100644 index 00000000000000..b445c6b8778ae7 --- /dev/null +++ b/test/fixtures/wpt/user-timing/mark_exceptions.html @@ -0,0 +1,41 @@ + + + + + window.performance User Timing mark() method is throwing the proper exceptions + + + + + + + + + +

Description

+

This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation + timing attribute is provided for the name parameter. +

+ +
+ + diff --git a/test/fixtures/wpt/user-timing/measure-exceptions.html b/test/fixtures/wpt/user-timing/measure-exceptions.html new file mode 100644 index 00000000000000..2836eaee2a86c1 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure-exceptions.html @@ -0,0 +1,49 @@ + + + + This tests that 'performance.measure' throws exceptions with reasonable messages. + + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/user-timing/measure-l3.any.js b/test/fixtures/wpt/user-timing/measure-l3.any.js new file mode 100644 index 00000000000000..24c27c483515ed --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure-l3.any.js @@ -0,0 +1,35 @@ +// META: script=resources/user-timing-helper.js + +function endTime(entry) { + return entry.startTime + entry.duration; +} + +test(function() { + performance.clearMarks(); + performance.clearMeasures(); + const markEntry = performance.mark("mark", {startTime: 123}); + const measureEntry = performance.measure("A", undefined, "mark"); + assert_equals(measureEntry.startTime, 0); + assert_equals(endTime(measureEntry), markEntry.startTime); +}, "When the end mark is given and the start is unprovided, the end time of the measure entry should be the end mark's time, the start time should be 0."); + +test(function() { + performance.clearMarks(); + performance.clearMeasures(); + const markEntry = performance.mark("mark", {startTime: 123}); + const endMin = performance.now(); + const measureEntry = performance.measure("A", "mark", undefined); + const endMax = performance.now(); + assert_equals(measureEntry.startTime, markEntry.startTime); + assert_greater_than_equal(endTime(measureEntry), endMin); + assert_greater_than_equal(endMax, endTime(measureEntry)); +}, "When the start mark is given and the end is unprovided, the start time of the measure entry should be the start mark's time, the end should be now."); + +test(function() { + performance.clearMarks(); + performance.clearMeasures(); + const markEntry = performance.mark("mark", {startTime: 123}); + const measureEntry = performance.measure("A", "mark", "mark"); + assert_equals(endTime(measureEntry), markEntry.startTime); + assert_equals(measureEntry.startTime, markEntry.startTime); +}, "When start and end mark are both given, the start time and end time of the measure entry should be the the marks' time, repectively"); diff --git a/test/fixtures/wpt/user-timing/measure-with-dict.any.js b/test/fixtures/wpt/user-timing/measure-with-dict.any.js new file mode 100644 index 00000000000000..b452feb0de6fbb --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure-with-dict.any.js @@ -0,0 +1,112 @@ +// META: script=resources/user-timing-helper.js + +function cleanupPerformanceTimeline() { + performance.clearMarks(); + performance.clearMeasures(); +} + +async_test(function (t) { + this.add_cleanup(cleanupPerformanceTimeline); + let measureEntries = []; + const timeStamp1 = 784.4; + const timeStamp2 = 1234.5; + const timeStamp3 = 66.6; + const timeStamp4 = 5566; + const expectedEntries = + [{ entryType: "measure", name: "measure1", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure2", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure3", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure4", detail: null }, + { entryType: "measure", name: "measure5", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure6", detail: null, startTime: timeStamp1 }, + { entryType: "measure", name: "measure7", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, + { entryType: "measure", name: "measure8", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure9", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure10", detail: null, startTime: timeStamp1 }, + { entryType: "measure", name: "measure11", detail: null, startTime: timeStamp3 }, + { entryType: "measure", name: "measure12", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure13", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure14", detail: null, startTime: timeStamp3, duration: timeStamp1 - timeStamp3 }, + { entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, + { entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 }, + { entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 }, + { entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, + { entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, + { entryType: "measure", name: "measure20", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure21", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure22", detail: null, startTime: 0 }, + { entryType: "measure", name: "measure23", detail: null, startTime: 0 }]; + const observer = new PerformanceObserver( + t.step_func(function (entryList, obs) { + measureEntries = + measureEntries.concat(entryList.getEntries()); + if (measureEntries.length >= expectedEntries.length) { + checkEntries(measureEntries, expectedEntries); + observer.disconnect(); + t.done(); + } + }) + ); + observer.observe({ entryTypes: ["measure"] }); + self.performance.mark("mark1", { detail: { randomInfo: 3 }, startTime: timeStamp1 }); + self.performance.mark("mark2", { startTime: timeStamp2 }); + + const returnedEntries = []; + returnedEntries.push(self.performance.measure("measure1")); + returnedEntries.push(self.performance.measure("measure2", undefined)); + returnedEntries.push(self.performance.measure("measure3", null)); + returnedEntries.push(self.performance.measure("measure4", 'mark1')); + returnedEntries.push( + self.performance.measure("measure5", null, 'mark1')); + returnedEntries.push( + self.performance.measure("measure6", 'mark1', undefined)); + returnedEntries.push( + self.performance.measure("measure7", 'mark1', 'mark2')); + returnedEntries.push( + self.performance.measure("measure8", {})); + returnedEntries.push( + self.performance.measure("measure9", { start: undefined })); + returnedEntries.push( + self.performance.measure("measure10", { start: 'mark1' })); + returnedEntries.push( + self.performance.measure("measure11", { start: timeStamp3 })); + returnedEntries.push( + self.performance.measure("measure12", { end: undefined })); + returnedEntries.push( + self.performance.measure("measure13", { end: 'mark1' })); + returnedEntries.push( + self.performance.measure("measure14", { start: timeStamp3, end: 'mark1' })); + returnedEntries.push( + self.performance.measure("measure15", { start: timeStamp1, end: timeStamp2, detail: undefined })); + returnedEntries.push( + self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null })); + returnedEntries.push( + self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }})); + returnedEntries.push( + self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 })); + returnedEntries.push( + self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 })); + // {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0. + returnedEntries.push(self.performance.measure("measure20", {}, 'mark1')); + returnedEntries.push(self.performance.measure("measure21", null, 'mark1')); + returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1')); + returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1')); + checkEntries(returnedEntries, expectedEntries); +}, "measure entries' detail and start/end are customizable"); + +test(function() { + this.add_cleanup(cleanupPerformanceTimeline); + assert_throws_js(TypeError, function() { + self.performance.measure("optionsAndNumberEnd", {'start': 2}, 12); + }, "measure should throw a TypeError when passed an options object and an end time"); + assert_throws_js(TypeError, function() { + self.performance.measure("optionsAndMarkEnd", {'start': 2}, 'mark1'); + }, "measure should throw a TypeError when passed an options object and an end mark"); + assert_throws_js(TypeError, function() { + self.performance.measure("negativeStartInOptions", {'start': -1}); + }, "measure cannot have a negative time stamp."); + assert_throws_js(TypeError, function() { + self.performance.measure("negativeEndInOptions", {'end': -1}); + }, "measure cannot have a negative time stamp for end."); +}, "measure should throw a TypeError when passed an invalid argument combination"); + diff --git a/test/fixtures/wpt/user-timing/measure.html b/test/fixtures/wpt/user-timing/measure.html new file mode 100644 index 00000000000000..40f71a3362b581 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure.html @@ -0,0 +1,362 @@ + + + + + + window.performance User Timing measure() method is working properly + + + + + + + + + + +

Description

+

This test validates that the performance.measure() method is working properly. This test creates the + following measures to test this method: +

    +
  • "measure_no_start_no_end": created using a measure() call without a startMark or endMark + provided
  • +
  • "measure_start_no_end": created using a measure() call with only the startMark provided
  • +
  • "measure_start_end": created using a measure() call with both a startMark or endMark provided
  • +
  • "measure_no_start_end": created using a measure() call with only the endMark provided
  • +
  • "measure_no_start_no_end": duplicate of the first measure, used to confirm names can be re-used
  • +
+ After creating each measure, the existence of these measures is validated by calling + performance.getEntriesByName() (both with and without the entryType parameter provided), + performance.getEntriesByType(), and performance.getEntries() +

+ +
+ + diff --git a/test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html b/test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html new file mode 100644 index 00000000000000..702e3d5f87f39e --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html @@ -0,0 +1,57 @@ + + + + +functionality test of window.performance.measure + + + + + + + + + + +

Description

+

This test validates functionality of the interface window.performance.measure using keywords from the Navigation Timing spec.

+
+ + diff --git a/test/fixtures/wpt/user-timing/measure_exception.html b/test/fixtures/wpt/user-timing/measure_exception.html new file mode 100644 index 00000000000000..5c1aa086c0fc88 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure_exception.html @@ -0,0 +1,34 @@ + + + + +exception test of window.performance.measure + + + + + + + + + + +

Description

+

This test validates all exception scenarios of method window.performance.measure in User Timing API

+ +
+ + + diff --git a/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html b/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html new file mode 100644 index 00000000000000..b1868b2cb6b3cb --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html @@ -0,0 +1,70 @@ + + + + + window.performance User Timing measure() method is throwing the proper exceptions + + + + + + + + + +

Description

+

window.performance.measure() method throws a InvalidAccessError + whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark. +

+ +
+ + diff --git a/test/fixtures/wpt/user-timing/measure_navigation_timing.html b/test/fixtures/wpt/user-timing/measure_navigation_timing.html new file mode 100644 index 00000000000000..d6480d27a261c9 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure_navigation_timing.html @@ -0,0 +1,205 @@ + + + + + + window.performance User Timing clearMeasures() method is working properly with navigation timing + attributes + + + + + + + + + + +

Description

+

This test validates that the performance.measure() method is working properly when navigation timing + attributes are used in place of mark names. This test creates the following measures to test this method: +

    +
  • "measure_nav_start_no_end": created using a measure() call with a navigation timing attribute + provided as the startMark and nothing provided as the endMark
  • +
  • "measure_nav_start_mark_end": created using a measure() call with a navigation timing attribute + provided as the startMark and a mark name provided as the endMark
  • +
  • "measure_mark_start_nav_end": created using a measure() call with a mark name provided as the + startMark and a navigation timing attribute provided as the endMark
  • +
  • "measure_nav_start_nav_end":created using a measure() call with a navigation timing attribute + provided as both the startMark and endMark
  • +
+ After creating each measure, the existence of these measures is validated by calling + performance.getEntriesByName() with each measure name +

+ +
+ + diff --git a/test/fixtures/wpt/user-timing/measure_syntax_err.any.js b/test/fixtures/wpt/user-timing/measure_syntax_err.any.js new file mode 100644 index 00000000000000..9b762a40906351 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measure_syntax_err.any.js @@ -0,0 +1,33 @@ +test(function () { + self.performance.mark("existing_mark"); + var entries = self.performance.getEntriesByName("existing_mark"); + assert_equals(entries.length, 1); + self.performance.measure("measure", "existing_mark"); +}, "Create a mark \"existing_mark\""); +test(function () { + assert_throws_dom("SyntaxError", function () { + self.performance.measure("measure", "mark"); + }); +}, "self.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " + + "throws a SyntaxError exception."); + +test(function () { + assert_throws_dom("SyntaxError", function () { + self.performance.measure("measure", "mark", "existing_mark"); + }); +}, "self.performance.measure(\"measure\", \"mark\", \"existing_mark\"), where \"mark\" is a " + + "non-existent mark, throws a SyntaxError exception."); + +test(function () { + assert_throws_dom("SyntaxError", function () { + self.performance.measure("measure", "existing_mark", "mark"); + }); +}, "self.performance.measure(\"measure\", \"existing_mark\", \"mark\"), where \"mark\" " + + "is a non-existent mark, throws a SyntaxError exception."); + +test(function () { + assert_throws_dom("SyntaxError", function () { + self.performance.measure("measure", "mark", "mark"); + }); +}, "self.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " + + "non-existent mark, throws a SyntaxError exception."); diff --git a/test/fixtures/wpt/user-timing/measures.html b/test/fixtures/wpt/user-timing/measures.html new file mode 100644 index 00000000000000..0de68965ddb9c7 --- /dev/null +++ b/test/fixtures/wpt/user-timing/measures.html @@ -0,0 +1,66 @@ + + + + +functionality test of window.performance.measure + + + + + + + + + + +

Description

+

This test validates functionality of the interface window.performance.measure.

+
+ + diff --git a/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js b/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js new file mode 100644 index 00000000000000..29efb729992cc6 --- /dev/null +++ b/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js @@ -0,0 +1,9 @@ +importScripts("/resources/testharness.js"); + +test(() => { + assert_throws_js(TypeError, () => { + performance.measure('name', 'navigationStart', 'navigationStart'); + }); +}, "When converting 'navigationStart' to a timestamp, the global object has to be a Window object."); + +done(); diff --git a/test/fixtures/wpt/user-timing/resources/user-timing-helper.js b/test/fixtures/wpt/user-timing/resources/user-timing-helper.js new file mode 100644 index 00000000000000..8d43768ec28196 --- /dev/null +++ b/test/fixtures/wpt/user-timing/resources/user-timing-helper.js @@ -0,0 +1,30 @@ +// Compares a list of performance entries to a predefined one. +// actualEntries is an array of performance entries from the user agent, +// and expectedEntries is an array of performance entries minted by the test. +// The comparison doesn't assert the order of the entries. +function checkEntries(actualEntries, expectedEntries) { + assert_equals(actualEntries.length, expectedEntries.length, + `The length of actual and expected entries should match. + actual: ${JSON.stringify(actualEntries)}, + expected: ${JSON.stringify(expectedEntries)}`); + const actualEntrySet = new Set(actualEntries.map(ae=>ae.name)); + assert_equals(actualEntrySet.size, actualEntries.length, `Actual entry names are not unique: ${JSON.stringify(actualEntries)}`); + const expectedEntrySet = new Set(expectedEntries.map(ee=>ee.name)); + assert_equals(expectedEntrySet.size, expectedEntries.length, `Expected entry names are not unique: ${JSON.stringify(expectedEntries)}`); + actualEntries.forEach(ae=>{ + const expectedEntry = expectedEntries.find(e=>e.name === ae.name); + assert_true(!!expectedEntry, `Entry name '${ae.name}' was not found.`); + checkEntry(ae, expectedEntry); + }); +} + +function checkEntry(entry, {name, entryType, startTime, detail, duration}) { + assert_equals(entry.name, name); + assert_equals(entry.entryType, entryType); + if (startTime !== undefined) + assert_equals(entry.startTime, startTime); + if (detail !== undefined) + assert_equals(JSON.stringify(entry.detail), JSON.stringify(detail)); + if (duration !== undefined) + assert_equals(entry.duration, duration); +} diff --git a/test/fixtures/wpt/user-timing/resources/webperftestharness.js b/test/fixtures/wpt/user-timing/resources/webperftestharness.js new file mode 100644 index 00000000000000..2fbd0210de906d --- /dev/null +++ b/test/fixtures/wpt/user-timing/resources/webperftestharness.js @@ -0,0 +1,124 @@ +// +// Helper functions for User Timing tests +// + +var timingAttributes = [ + "navigationStart", + "unloadEventStart", + "unloadEventEnd", + "redirectStart", + "redirectEnd", + "fetchStart", + "domainLookupStart", + "domainLookupEnd", + "connectStart", + "connectEnd", + "secureConnectionStart", + "requestStart", + "responseStart", + "responseEnd", + "domLoading", + "domInteractive", + "domContentLoadedEventStart", + "domContentLoadedEventEnd", + "domComplete", + "loadEventStart", + "loadEventEnd" +]; + +function has_required_interfaces() +{ + if (window.performance.mark == undefined || + window.performance.clearMarks == undefined || + window.performance.measure == undefined || + window.performance.clearMeasures == undefined || + window.performance.getEntriesByName == undefined || + window.performance.getEntriesByType == undefined || + window.performance.getEntries == undefined) { + return false; + } + + return true; +} + +function test_namespace(child_name, skip_root) +{ + if (skip_root === undefined) { + var msg = 'window.performance is defined'; + wp_test(function () { assert_not_equals(performanceNamespace, undefined, msg); }, msg); + } + + if (child_name !== undefined) { + var msg2 = 'window.performance.' + child_name + ' is defined'; + wp_test(function() { assert_not_equals(performanceNamespace[child_name], undefined, msg2); }, msg2); + } +} + +function test_attribute_exists(parent_name, attribute_name, properties) +{ + var msg = 'window.performance.' + parent_name + '.' + attribute_name + ' is defined.'; + wp_test(function() { assert_not_equals(performanceNamespace[parent_name][attribute_name], undefined, msg); }, msg, properties); +} + +function test_enum(parent_name, enum_name, value, properties) +{ + var msg = 'window.performance.' + parent_name + '.' + enum_name + ' is defined.'; + wp_test(function() { assert_not_equals(performanceNamespace[parent_name][enum_name], undefined, msg); }, msg, properties); + + msg = 'window.performance.' + parent_name + '.' + enum_name + ' = ' + value; + wp_test(function() { assert_equals(performanceNamespace[parent_name][enum_name], value, msg); }, msg, properties); +} + +function test_timing_order(attribute_name, greater_than_attribute, properties) +{ + // ensure it's not 0 first + var msg = "window.performance.timing." + attribute_name + " > 0"; + wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] > 0, msg); }, msg, properties); + + // ensure it's in the right order + msg = "window.performance.timing." + attribute_name + " >= window.performance.timing." + greater_than_attribute; + wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] >= performanceNamespace.timing[greater_than_attribute], msg); }, msg, properties); +} + +function test_timing_greater_than(attribute_name, greater_than, properties) +{ + var msg = "window.performance.timing." + attribute_name + " > " + greater_than; + test_greater_than(performanceNamespace.timing[attribute_name], greater_than, msg, properties); +} + +function test_timing_equals(attribute_name, equals, msg, properties) +{ + var test_msg = msg || "window.performance.timing." + attribute_name + " == " + equals; + test_equals(performanceNamespace.timing[attribute_name], equals, test_msg, properties); +} + +// +// Non-test related helper functions +// + +function sleep_milliseconds(n) +{ + var start = new Date().getTime(); + while (true) { + if ((new Date().getTime() - start) >= n) break; + } +} + +// +// Common helper functions +// + +function test_greater_than(value, greater_than, msg, properties) +{ + wp_test(function () { assert_true(value > greater_than, msg); }, msg, properties); +} + +function test_greater_or_equals(value, greater_than, msg, properties) +{ + wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties); +} + +function test_not_equals(value, notequals, msg, properties) +{ + wp_test(function() { assert_not_equals(value, notequals, msg); }, msg, properties); +} diff --git a/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js b/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js new file mode 100644 index 00000000000000..8640918d4f255e --- /dev/null +++ b/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js @@ -0,0 +1,202 @@ +// +// Helper functions for User Timing tests +// + +var mark_names = [ + '', + '1', + 'abc', +]; + +var measures = [ + [''], + ['2', 1], + ['aaa', 'navigationStart', ''], +]; + +function test_method_exists(method, method_name, properties) +{ + var msg; + if (typeof method === 'function') + msg = 'performance.' + method.name + ' is supported!'; + else + msg = 'performance.' + method_name + ' is supported!'; + wp_test(function() { assert_equals(typeof method, 'function', msg); }, msg, properties); +} + +function test_method_throw_exception(func_str, exception, msg) +{ + let exception_name; + let test_func; + if (typeof exception == "function") { + exception_name = exception.name; + test_func = assert_throws_js; + } else { + exception_name = exception; + test_func = assert_throws_dom; + } + var msg = 'Invocation of ' + func_str + ' should throw ' + exception_name + ' Exception.'; + wp_test(function() { test_func(exception, function() {eval(func_str)}, msg); }, msg); +} + +function test_noless_than(value, greater_than, msg, properties) +{ + wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties); +} + +function test_fail(msg, properties) +{ + wp_test(function() { assert_unreached(); }, msg, properties); +} + +function test_resource_entries(entries, expected_entries) +{ + // This is slightly convoluted so that we can sort the output. + var actual_entries = {}; + var origin = window.location.protocol + "//" + window.location.host; + + for (var i = 0; i < entries.length; ++i) { + var entry = entries[i]; + var found = false; + for (var expected_entry in expected_entries) { + if (entry.name == origin + expected_entry) { + found = true; + if (expected_entry in actual_entries) { + test_fail(expected_entry + ' is not expected to have duplicate entries'); + } + actual_entries[expected_entry] = entry; + break; + } + } + if (!found) { + test_fail(entries[i].name + ' is not expected to be in the Resource Timing buffer'); + } + } + + sorted_urls = []; + for (var i in actual_entries) { + sorted_urls.push(i); + } + sorted_urls.sort(); + for (var i in sorted_urls) { + var url = sorted_urls[i]; + test_equals(actual_entries[url].initiatorType, + expected_entries[url], + origin + url + ' is expected to have initiatorType ' + expected_entries[url]); + } + for (var j in expected_entries) { + if (!(j in actual_entries)) { + test_fail(origin + j + ' is expected to be in the Resource Timing buffer'); + } + } +} + +function performance_entrylist_checker(type) +{ + const entryType = type; + + function entry_check(entry, expectedNames, testDescription = '') + { + const msg = testDescription + 'Entry \"' + entry.name + '\" should be one that we have set.'; + wp_test(function() { assert_in_array(entry.name, expectedNames, msg); }, msg); + test_equals(entry.entryType, entryType, testDescription + 'entryType should be \"' + entryType + '\".'); + if (type === "measure") { + test_true(isFinite(entry.startTime), testDescription + 'startTime should be a number.'); + test_true(isFinite(entry.duration), testDescription + 'duration should be a number.'); + } else if (type === "mark") { + test_greater_than(entry.startTime, 0, testDescription + 'startTime should greater than 0.'); + test_equals(entry.duration, 0, testDescription + 'duration of mark should be 0.'); + } + } + + function entrylist_order_check(entryList) + { + let inOrder = true; + for (let i = 0; i < entryList.length - 1; ++i) + { + if (entryList[i + 1].startTime < entryList[i].startTime) { + inOrder = false; + break; + } + } + return inOrder; + } + + function entrylist_check(entryList, expectedLength, expectedNames, testDescription = '') + { + test_equals(entryList.length, expectedLength, testDescription + 'There should be ' + expectedLength + ' entries.'); + test_true(entrylist_order_check(entryList), testDescription + 'Entries in entrylist should be in order.'); + for (let i = 0; i < entryList.length; ++i) + { + entry_check(entryList[i], expectedNames, testDescription + 'Entry_list ' + i + '. '); + } + } + + return{"entrylist_check":entrylist_check}; +} + +function PerformanceContext(context) +{ + this.performanceContext = context; +} + +PerformanceContext.prototype = +{ + + initialMeasures: function(item, index, array) + { + this.performanceContext.measure.apply(this.performanceContext, item); + }, + + mark: function() + { + this.performanceContext.mark.apply(this.performanceContext, arguments); + }, + + measure: function() + { + this.performanceContext.measure.apply(this.performanceContext, arguments); + }, + + clearMarks: function() + { + this.performanceContext.clearMarks.apply(this.performanceContext, arguments); + }, + + clearMeasures: function() + { + this.performanceContext.clearMeasures.apply(this.performanceContext, arguments); + + }, + + getEntries: function() + { + return this.performanceContext.getEntries.apply(this.performanceContext, arguments); + }, + + getEntriesByType: function() + { + return this.performanceContext.getEntriesByType.apply(this.performanceContext, arguments); + }, + + getEntriesByName: function() + { + return this.performanceContext.getEntriesByName.apply(this.performanceContext, arguments); + }, + + setResourceTimingBufferSize: function() + { + return this.performanceContext.setResourceTimingBufferSize.apply(this.performanceContext, arguments); + }, + + registerResourceTimingBufferFullCallback: function(func) + { + this.performanceContext.onresourcetimingbufferfull = func; + }, + + clearResourceTimings: function() + { + this.performanceContext.clearResourceTimings.apply(this.performanceContext, arguments); + } + +}; diff --git a/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js b/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js new file mode 100644 index 00000000000000..78771b2f7663d4 --- /dev/null +++ b/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js @@ -0,0 +1,64 @@ +test(function() { + performance.clearMarks(); + const detail = { randomInfo: 123 } + const markEntry = new PerformanceMark("A", { detail }); + assert_equals(markEntry.detail.randomInfo, detail.randomInfo); + assert_not_equals(markEntry.detail, detail); +}, "The detail property in the mark constructor should be structured-clone."); + +test(function() { + performance.clearMarks(); + const detail = { randomInfo: 123 } + const markEntry = performance.mark("A", { detail }); + assert_not_equals(markEntry.detail, detail); +}, "The detail property in the mark method should be structured-clone."); + +test(function() { + performance.clearMarks(); + const markEntry = performance.mark("A"); + assert_equals(markEntry.detail, null); +}, "When accessing detail from a mark entry and the detail is not provided, just return a null value."); + +test(function() { + performance.clearMarks(); + const detail = { unserializable: Symbol() }; + assert_throws_dom("DataCloneError", ()=>{ + new PerformanceMark("A", { detail }); + }, "Trying to structured-serialize a Symbol."); +}, "Mark: Throw an exception when the detail property cannot be structured-serialized."); + +test(function() { + performance.clearMeasures(); + const detail = { randomInfo: 123 } + const measureEntry = performance.measure("A", { start: 0, detail }); + assert_not_equals(measureEntry.detail, detail); +}, "The detail property in the measure method should be structured-clone."); + +test(function() { + performance.clearMeasures(); + const detail = { randomInfo: 123 } + const measureEntry = performance.measure("A", { start: 0, detail }); + assert_equals(measureEntry.detail, measureEntry.detail); +}, "The detail property in the measure method should be the same reference."); + +test(function() { + performance.clearMeasures(); + const measureEntry = performance.measure("A"); + assert_equals(measureEntry.detail, null); +}, "When accessing detail from a measure entry and the detail is not provided, just return a null value."); + +test(function() { + performance.clearMeasures(); + const detail = { unserializable: Symbol() }; + assert_throws_dom("DataCloneError", ()=>{ + performance.measure("A", { start: 0, detail }); + }, "Trying to structured-serialize a Symbol."); +}, "Measure: Throw an exception when the detail property cannot be structured-serialized."); + +test(function() { + const bar = { 1: 2 }; + const detail = { foo: 1, bar }; + const mark = performance.mark("m", { detail }); + detail.foo = 2; + assert_equals(mark.detail.foo, 1); +}, "The detail object is cloned when passed to mark API."); diff --git a/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js b/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js new file mode 100644 index 00000000000000..ea3b2fe9dc90f7 --- /dev/null +++ b/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js @@ -0,0 +1,37 @@ +test(() => { + if (typeof PerformanceObserver.supportedEntryTypes === "undefined") + assert_unreached("supportedEntryTypes is not supported."); + const types = PerformanceObserver.supportedEntryTypes; + assert_true(types.includes("mark"), + "There should be 'mark' in PerformanceObserver.supportedEntryTypes"); + assert_true(types.includes("measure"), + "There should be 'measure' in PerformanceObserver.supportedEntryTypes"); + assert_greater_than(types.indexOf("measure"), types.indexOf('mark'), + "The 'measure' entry should appear after the 'mark' entry"); +}, "supportedEntryTypes contains 'mark' and 'measure'."); + +if (typeof PerformanceObserver.supportedEntryTypes !== "undefined") { + const entryTypes = { + "mark": () => { + performance.mark('foo'); + }, + "measure": () => { + performance.measure('bar'); + } + } + for (let entryType in entryTypes) { + if (PerformanceObserver.supportedEntryTypes.includes(entryType)) { + promise_test(async() => { + await new Promise((resolve) => { + new PerformanceObserver(function (list, observer) { + observer.disconnect(); + resolve(); + }).observe({entryTypes: [entryType]}); + + // Force the PerformanceEntry. + entryTypes[entryType](); + }) + }, `'${entryType}' entries should be observable.`) + } + } +} diff --git a/test/fixtures/wpt/user-timing/user-timing-tojson.html b/test/fixtures/wpt/user-timing/user-timing-tojson.html new file mode 100644 index 00000000000000..6aef7fa904ab95 --- /dev/null +++ b/test/fixtures/wpt/user-timing/user-timing-tojson.html @@ -0,0 +1,44 @@ + + + + + + + + + + diff --git a/test/fixtures/wpt/user-timing/user_timing_exists.any.js b/test/fixtures/wpt/user-timing/user_timing_exists.any.js new file mode 100644 index 00000000000000..adf9052ebd58d8 --- /dev/null +++ b/test/fixtures/wpt/user-timing/user_timing_exists.any.js @@ -0,0 +1,12 @@ +test(function() { + assert_not_equals(self.performance.mark, undefined); +}, "self.performance.mark is defined."); +test(function() { + assert_not_equals(self.performance.clearMarks, undefined); +}, "self.performance.clearMarks is defined."); +test(function() { + assert_not_equals(self.performance.measure, undefined); +}, "self.performance.measure is defined."); +test(function() { + assert_not_equals(self.performance.clearMeasures, undefined); +}, "self.performance.clearMeasures is defined."); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index fb10c7d403d730..fb49e36cc07747 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -36,9 +36,13 @@ "path": "html/webappapis/timers" }, "interfaces": { - "commit": "fcb671ed8b068b25cee87429d803833777f35c2c", + "commit": "80a417662387b6eda904607d78ad246c5d8bf191", "path": "interfaces" }, + "performance-timeline": { + "commit": "17ebc3aea0d6321e69554067c39ab5855e6fb67e", + "path": "performance-timeline" + }, "resources": { "commit": "972ca5b6693bffebebc5805e1b9da68a6876e1f6", "path": "resources" @@ -50,5 +54,9 @@ "url": { "commit": "77d54aa9e0405f737987b59331f3584e3e1c26f9", "path": "url" + }, + "user-timing": { + "commit": "df24fb604e2d40528ac1d1b5dd970e32fc5c2978", + "path": "user-timing" } } \ No newline at end of file diff --git a/test/parallel/test-perf-hooks-usertiming.js b/test/parallel/test-perf-hooks-usertiming.js index 401d0a6816481a..e7ef26889eae0f 100644 --- a/test/parallel/test-perf-hooks-usertiming.js +++ b/test/parallel/test-perf-hooks-usertiming.js @@ -29,7 +29,7 @@ assert(measure); assert.strictEqual(m.entryType, 'mark'); assert.strictEqual(typeof m.startTime, 'number'); assert.strictEqual(m.duration, 0); - assert.strictEqual(m.details, undefined); + assert.strictEqual(m.detail, null); }); clearMarks(); @@ -38,11 +38,18 @@ assert.throws(() => mark(Symbol('a')), { message: /Cannot convert a Symbol value to a string/ }); -[undefined, null, 1, 'any', {}, []].forEach((detail) => { +[undefined, null].forEach((detail) => { const m = mark('a', { detail }); assert.strictEqual(m.name, 'a'); assert.strictEqual(m.entryType, 'mark'); - assert.strictEqual(m.detail, detail); + assert.deepStrictEqual(m.detail, null); +}); +[1, 'any', {}, []].forEach((detail) => { + const m = mark('a', { detail }); + assert.strictEqual(m.name, 'a'); + assert.strictEqual(m.entryType, 'mark'); + // Value of detail is structured cloned. + assert.deepStrictEqual(m.detail, detail); }); clearMarks(); diff --git a/test/wpt/status/performance-timeline.json b/test/wpt/status/performance-timeline.json new file mode 100644 index 00000000000000..0967ef424bce67 --- /dev/null +++ b/test/wpt/status/performance-timeline.json @@ -0,0 +1 @@ +{} diff --git a/test/wpt/status/user-timing.json b/test/wpt/status/user-timing.json new file mode 100644 index 00000000000000..b1110e6a5e798d --- /dev/null +++ b/test/wpt/status/user-timing.json @@ -0,0 +1,11 @@ +{ + "invoke_with_timing_attributes.worker.js": { + "skip": "importScripts not supported" + }, + "performance-measure-invalid.worker.js": { + "skip": "importScripts not supported" + }, + "idlharness.any.js": { + "skip": "idlharness cannot recognize Node.js environment" + } +} diff --git a/test/wpt/test-performance-timeline.js b/test/wpt/test-performance-timeline.js new file mode 100644 index 00000000000000..36d13297ba57cc --- /dev/null +++ b/test/wpt/test-performance-timeline.js @@ -0,0 +1,27 @@ +'use strict'; +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('user-timing'); + +// Needed to access to DOMException. +runner.setFlags(['--expose-internals']); + +runner.setInitScript(` + const { + PerformanceMark, + PerformanceMeasure, + PerformanceObserver, + performance, + } = require('perf_hooks'); + global.PerformanceMark = performance; + global.PerformanceMeasure = performance; + global.PerformanceObserver = PerformanceObserver; + global.performance = performance; + + const { internalBinding } = require('internal/test/binding'); + const { DOMException } = internalBinding('messaging'); + global.DOMException = DOMException; +`); + +runner.runJsTests(); diff --git a/test/wpt/test-user-timing.js b/test/wpt/test-user-timing.js new file mode 100644 index 00000000000000..36d13297ba57cc --- /dev/null +++ b/test/wpt/test-user-timing.js @@ -0,0 +1,27 @@ +'use strict'; +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('user-timing'); + +// Needed to access to DOMException. +runner.setFlags(['--expose-internals']); + +runner.setInitScript(` + const { + PerformanceMark, + PerformanceMeasure, + PerformanceObserver, + performance, + } = require('perf_hooks'); + global.PerformanceMark = performance; + global.PerformanceMeasure = performance; + global.PerformanceObserver = PerformanceObserver; + global.performance = performance; + + const { internalBinding } = require('internal/test/binding'); + const { DOMException } = internalBinding('messaging'); + global.DOMException = DOMException; +`); + +runner.runJsTests();