diff --git a/lib/enu/diagnosticMessages.generated.json.lcg b/lib/enu/diagnosticMessages.generated.json.lcg index 0b66fd7760cc0..de808d1e48ad5 100644 --- a/lib/enu/diagnosticMessages.generated.json.lcg +++ b/lib/enu/diagnosticMessages.generated.json.lcg @@ -141,9 +141,9 @@ - + - + @@ -189,12 +189,6 @@ - - - - - - @@ -867,6 +861,12 @@ + + + + + + @@ -1209,6 +1209,12 @@ + + + + + + @@ -1863,6 +1869,12 @@ + + + + + + @@ -2037,6 +2049,12 @@ + + + + + + @@ -2061,12 +2079,6 @@ - - - - - - @@ -2151,6 +2163,12 @@ + + + + + + @@ -2199,6 +2217,12 @@ + + + + + + @@ -2433,6 +2457,12 @@ + + + + + + @@ -2663,7 +2693,7 @@ - + @@ -2811,6 +2841,24 @@ + + + + + + + + + + + + + + + + + + @@ -3033,18 +3081,6 @@ - - - - - - - - - - - - @@ -3795,6 +3831,12 @@ + + + + + + @@ -3963,6 +4005,12 @@ + + + + + + @@ -4173,6 +4221,12 @@ + + + + + + @@ -4887,6 +4941,12 @@ + + + + + + @@ -5679,6 +5739,12 @@ + + + + + + @@ -5889,6 +5955,12 @@ + + + + + + @@ -5967,6 +6039,12 @@ + + + + + + @@ -5991,6 +6069,12 @@ + + + + + + @@ -6717,6 +6801,12 @@ + + + + + + @@ -7227,6 +7317,12 @@ + + + + + + diff --git a/lib/lib.dom.d.ts b/lib/lib.dom.d.ts index 7817a0e426704..26664f7d070e0 100644 --- a/lib/lib.dom.d.ts +++ b/lib/lib.dom.d.ts @@ -171,7 +171,6 @@ interface ByteLengthChunk { } interface CacheQueryOptions { - cacheName?: string; ignoreMethod?: boolean; ignoreSearch?: boolean; ignoreVary?: boolean; @@ -256,7 +255,7 @@ interface ConstrainDoubleRange extends DoubleRange { ideal?: number; } -interface ConstrainLongRange extends LongRange { +interface ConstrainULongRange extends ULongRange { exact?: number; ideal?: number; } @@ -330,36 +329,41 @@ interface DelayOptions extends AudioNodeOptions { maxDelayTime?: number; } -interface DeviceAccelerationDict { +interface DeviceLightEventInit extends EventInit { + value?: number; +} + +interface DeviceMotionEventAccelerationInit { x?: number | null; y?: number | null; z?: number | null; } -interface DeviceLightEventInit extends EventInit { - value?: number; -} - interface DeviceMotionEventInit extends EventInit { - acceleration?: DeviceAccelerationDict | null; - accelerationIncludingGravity?: DeviceAccelerationDict | null; - interval?: number | null; - rotationRate?: DeviceRotationRateDict | null; + acceleration?: DeviceMotionEventAccelerationInit; + accelerationIncludingGravity?: DeviceMotionEventAccelerationInit; + interval?: number; + rotationRate?: DeviceMotionEventRotationRateInit; } -interface DeviceOrientationEventInit extends EventInit { - absolute?: boolean; +interface DeviceMotionEventRotationRateInit { alpha?: number | null; beta?: number | null; gamma?: number | null; } -interface DeviceRotationRateDict { +interface DeviceOrientationEventInit extends EventInit { + absolute?: boolean; alpha?: number | null; beta?: number | null; gamma?: number | null; } +interface DevicePermissionDescriptor extends PermissionDescriptor { + deviceId?: string; + name: "camera" | "microphone" | "speaker"; +} + interface DocumentTimelineOptions { originTime?: number; } @@ -412,6 +416,10 @@ interface EffectTiming { iterations?: number; } +interface ElementCreationOptions { + is?: string; +} + interface ElementDefinitionOptions { extends?: string; } @@ -553,11 +561,17 @@ interface IIRFilterOptions extends AudioNodeOptions { feedforward: number[]; } +interface ImageEncodeOptions { + quality?: number; + type?: string; +} + interface IntersectionObserverEntryInit { boundingClientRect: DOMRectInit; + intersectionRatio: number; intersectionRect: DOMRectInit; isIntersecting: boolean; - rootBounds: DOMRectInit; + rootBounds: DOMRectInit | null; target: Element; time: number; } @@ -616,11 +630,6 @@ interface KeyframeEffectOptions extends EffectTiming { iterationComposite?: IterationCompositeOperation; } -interface LongRange { - max?: number; - min?: number; -} - interface MediaElementAudioSourceOptions { mediaElement: HTMLMediaElement; } @@ -678,39 +687,45 @@ interface MediaStreamTrackAudioSourceOptions { } interface MediaStreamTrackEventInit extends EventInit { - track?: MediaStreamTrack | null; + track: MediaStreamTrack; } interface MediaTrackCapabilities { - aspectRatio?: number | DoubleRange; + aspectRatio?: DoubleRange; + autoGainControl?: boolean[]; + channelCount?: ULongRange; deviceId?: string; echoCancellation?: boolean[]; - facingMode?: string; - frameRate?: number | DoubleRange; + facingMode?: string[]; + frameRate?: DoubleRange; groupId?: string; - height?: number | LongRange; - sampleRate?: number | LongRange; - sampleSize?: number | LongRange; - volume?: number | DoubleRange; - width?: number | LongRange; + height?: ULongRange; + latency?: DoubleRange; + noiseSuppression?: boolean[]; + resizeMode?: string[]; + sampleRate?: ULongRange; + sampleSize?: ULongRange; + volume?: DoubleRange; + width?: ULongRange; } interface MediaTrackConstraintSet { - aspectRatio?: number | ConstrainDoubleRange; - channelCount?: number | ConstrainLongRange; - deviceId?: string | string[] | ConstrainDOMStringParameters; - displaySurface?: string | string[] | ConstrainDOMStringParameters; - echoCancellation?: boolean | ConstrainBooleanParameters; - facingMode?: string | string[] | ConstrainDOMStringParameters; - frameRate?: number | ConstrainDoubleRange; - groupId?: string | string[] | ConstrainDOMStringParameters; - height?: number | ConstrainLongRange; - latency?: number | ConstrainDoubleRange; - logicalSurface?: boolean | ConstrainBooleanParameters; - sampleRate?: number | ConstrainLongRange; - sampleSize?: number | ConstrainLongRange; - volume?: number | ConstrainDoubleRange; - width?: number | ConstrainLongRange; + aspectRatio?: ConstrainDouble; + autoGainControl?: ConstrainBoolean; + channelCount?: ConstrainULong; + deviceId?: ConstrainDOMString; + echoCancellation?: ConstrainBoolean; + facingMode?: ConstrainDOMString; + frameRate?: ConstrainDouble; + groupId?: ConstrainDOMString; + height?: ConstrainULong; + latency?: ConstrainDouble; + noiseSuppression?: ConstrainBoolean; + resizeMode?: ConstrainDOMString; + sampleRate?: ConstrainULong; + sampleSize?: ConstrainULong; + volume?: ConstrainDouble; + width?: ConstrainULong; } interface MediaTrackConstraints extends MediaTrackConstraintSet { @@ -719,12 +734,17 @@ interface MediaTrackConstraints extends MediaTrackConstraintSet { interface MediaTrackSettings { aspectRatio?: number; + autoGainControl?: boolean; + channelCount?: number; deviceId?: string; echoCancellation?: boolean; facingMode?: string; frameRate?: number; groupId?: string; height?: number; + latency?: number; + noiseSuppression?: boolean; + resizeMode?: string; sampleRate?: number; sampleSize?: number; volume?: number; @@ -733,12 +753,17 @@ interface MediaTrackSettings { interface MediaTrackSupportedConstraints { aspectRatio?: boolean; + autoGainControl?: boolean; + channelCount?: boolean; deviceId?: boolean; echoCancellation?: boolean; facingMode?: boolean; frameRate?: boolean; groupId?: boolean; height?: boolean; + latency?: boolean; + noiseSuppression?: boolean; + resizeMode?: boolean; sampleRate?: boolean; sampleSize?: boolean; volume?: boolean; @@ -753,6 +778,11 @@ interface MessageEventInit extends EventInit { source?: MessageEventSource | null; } +interface MidiPermissionDescriptor extends PermissionDescriptor { + name: "midi"; + sysex?: boolean; +} + interface MouseEventInit extends EventModifierInit { button?: number; buttons?: number; @@ -765,6 +795,10 @@ interface MouseEventInit extends EventModifierInit { screenY?: number; } +interface MultiCacheQueryOptions extends CacheQueryOptions { + cacheName?: string; +} + interface MutationObserverInit { attributeFilter?: string[]; attributeOldValue?: boolean; @@ -914,7 +948,8 @@ interface Pbkdf2Params extends Algorithm { interface PerformanceObserverInit { buffered?: boolean; - entryTypes: string[]; + entryTypes?: string[]; + type?: string; } interface PeriodicWaveConstraints { @@ -926,10 +961,15 @@ interface PeriodicWaveOptions extends PeriodicWaveConstraints { real?: number[] | Float32Array; } +interface PermissionDescriptor { + name: PermissionName; +} + interface PipeOptions { preventAbort?: boolean; preventCancel?: boolean; preventClose?: boolean; + signal?: AbortSignal; } interface PointerEventInit extends MouseEventInit { @@ -955,6 +995,10 @@ interface PositionOptions { timeout?: number; } +interface PostMessageOptions { + transfer?: any[]; +} + interface ProgressEventInit extends EventInit { lengthComputable?: boolean; loaded?: number; @@ -973,6 +1017,11 @@ interface PropertyIndexedKeyframes { [property: string]: string | string[] | number | null | (number | null)[] | undefined; } +interface PushPermissionDescriptor extends PermissionDescriptor { + name: "push"; + userVisibleOnly?: boolean; +} + interface PushSubscriptionJSON { endpoint?: string; expirationTime?: number | null; @@ -1519,6 +1568,11 @@ interface TextDecoderOptions { ignoreBOM?: boolean; } +interface TextEncoderEncodeIntoResult { + read?: number; + written?: number; +} + interface TouchEventInit extends EventModifierInit { changedTouches?: Touch[]; targetTouches?: Touch[]; @@ -1566,6 +1620,11 @@ interface UIEventInit extends EventInit { view?: Window | null; } +interface ULongRange { + max?: number; + min?: number; +} + interface UnderlyingByteSource { autoAllocateChunkSize?: number; cancel?: ReadableStreamErrorCallback; @@ -1650,6 +1709,8 @@ interface EventListener { (evt: Event): void; } +type XPathNSResolver = ((prefix: string | null) => string | null) | { lookupNamespaceURI(prefix: string | null): string | null; }; + /** The ANGLE_instanced_arrays extension is part of the WebGL API and allows to draw the same object, or groups of similar objects multiple times, if they share the same vertex data, primitive count and type. */ interface ANGLE_instanced_arrays { drawArraysInstancedANGLE(mode: GLenum, first: GLint, count: GLsizei, primcount: GLsizei): void; @@ -1658,7 +1719,7 @@ interface ANGLE_instanced_arrays { readonly VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE: GLenum; } -/** The AbortController interface represents a controller object that allows you to abort one or more DOM requests as and when desired. */ +/** A controller object that allows you to abort one or more DOM requests as and when desired. */ interface AbortController { /** * Returns the AbortSignal object associated with this object. @@ -1680,7 +1741,7 @@ interface AbortSignalEventMap { "abort": Event; } -/** The AbortSignal interface represents a signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ +/** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ interface AbortSignal extends EventTarget { /** * Returns true if this AbortSignal's AbortController has signaled to abort, and false @@ -1732,7 +1793,7 @@ interface AesCmacParams extends Algorithm { length: number; } -/** The AnalyserNode interface represents a node able to provide real-time frequency and time-domain analysis information. It is an AudioNode that passes the audio stream unchanged from the input to the output, but allows you to take the generated data, process it, and create audio visualizations. */ +/** A node able to provide real-time frequency and time-domain analysis information. It is an AudioNode that passes the audio stream unchanged from the input to the output, but allows you to take the generated data, process it, and create audio visualizations. */ interface AnalyserNode extends AudioNode { fftSize: number; readonly frequencyBinCount: number; @@ -1801,7 +1862,7 @@ declare var AnimationEffect: { new(): AnimationEffect; }; -/** The AnimationEvent interface represents events providing information related to animations. */ +/** Events providing information related to animations. */ interface AnimationEvent extends Event { readonly animationName: string; readonly elapsedTime: number; @@ -1891,7 +1952,7 @@ declare var ApplicationCache: { readonly UPDATEREADY: number; }; -/** This type represents a DOM element's attribute as an object. In most DOM methods, you will probably directly retrieve the attribute as a string (e.g., Element.getAttribute(), but certain functions (e.g., Element.getAttributeNode()) or means of iterating give Attr types. */ +/** A DOM element's attribute as an object. In most DOM methods, you will probably directly retrieve the attribute as a string (e.g., Element.getAttribute(), but certain functions (e.g., Element.getAttributeNode()) or means of iterating give Attr types. */ interface Attr extends Node { readonly localName: string; readonly name: string; @@ -1907,7 +1968,7 @@ declare var Attr: { new(): Attr; }; -/** Objects of these types are designed to hold small audio snippets, typically less than 45 s. For longer sounds, objects implementing the MediaElementAudioSourceNode are more suitable. The buffer contains data in the following format:  non-interleaved IEEE754 32-bit linear PCM with a nominal range between -1 and +1, that is, 32bits floating point buffer, with each samples between -1.0 and 1.0. If the AudioBuffer has multiple channels, they are stored in separate buffer. */ +/** A short audio asset residing in memory, created from an audio file using the AudioContext.decodeAudioData() method, or from raw data using AudioContext.createBuffer(). Once put into an AudioBuffer, the audio can then be played by being passed into an AudioBufferSourceNode. */ interface AudioBuffer { readonly duration: number; readonly length: number; @@ -1923,7 +1984,7 @@ declare var AudioBuffer: { new(options: AudioBufferOptions): AudioBuffer; }; -/** The AudioBufferSourceNode interface is an AudioScheduledSourceNode which represents an audio source consisting of in-memory audio data, stored in an AudioBuffer. It's especially useful for playing back audio which has particularly stringent timing accuracy requirements, such as for sounds that must match a specific rhythm and can be kept in memory rather than being played from disk or the network. */ +/** An AudioScheduledSourceNode which represents an audio source consisting of in-memory audio data, stored in an AudioBuffer. It's especially useful for playing back audio which has particularly stringent timing accuracy requirements, such as for sounds that must match a specific rhythm and can be kept in memory rather than being played from disk or the network. */ interface AudioBufferSourceNode extends AudioScheduledSourceNode { buffer: AudioBuffer | null; readonly detune: AudioParam; @@ -1943,7 +2004,7 @@ declare var AudioBufferSourceNode: { new(context: BaseAudioContext, options?: AudioBufferSourceOptions): AudioBufferSourceNode; }; -/** The AudioContext interface represents an audio-processing graph built from audio modules linked together, each represented by an AudioNode. */ +/** An audio-processing graph built from audio modules linked together, each represented by an AudioNode. */ interface AudioContext extends BaseAudioContext { readonly baseLatency: number; readonly outputLatency: number; @@ -1953,6 +2014,7 @@ interface AudioContext extends BaseAudioContext { createMediaStreamSource(mediaStream: MediaStream): MediaStreamAudioSourceNode; createMediaStreamTrackSource(mediaStreamTrack: MediaStreamTrack): MediaStreamTrackAudioSourceNode; getOutputTimestamp(): AudioTimestamp; + resume(): Promise; suspend(): Promise; addEventListener(type: K, listener: (this: AudioContext, ev: BaseAudioContextEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; @@ -1975,7 +2037,7 @@ declare var AudioDestinationNode: { new(): AudioDestinationNode; }; -/** The AudioListener interface represents the position and orientation of the unique person listening to the audio scene, and is used in audio spatialization. All PannerNodes spatialize in relation to the AudioListener stored in the BaseAudioContext.listener attribute. */ +/** The position and orientation of the unique person listening to the audio scene, and is used in audio spatialization. All PannerNodes spatialize in relation to the AudioListener stored in the BaseAudioContext.listener attribute. */ interface AudioListener { readonly forwardX: AudioParam; readonly forwardY: AudioParam; @@ -1997,7 +2059,7 @@ declare var AudioListener: { new(): AudioListener; }; -/** The AudioNode interface is a generic interface for representing an audio processing module. Examples include: */ +/** A generic interface for representing an audio processing module. Examples include: */ interface AudioNode extends EventTarget { channelCount: number; channelCountMode: ChannelCountMode; @@ -2051,7 +2113,7 @@ declare var AudioParamMap: { new(): AudioParamMap; }; -/** The Web Audio API AudioProcessingEvent represents events that occur when a ScriptProcessorNode input buffer is ready to be processed. */ +/** The Web Audio API events that occur when a ScriptProcessorNode input buffer is ready to be processed. */ interface AudioProcessingEvent extends Event { readonly inputBuffer: AudioBuffer; readonly outputBuffer: AudioBuffer; @@ -2082,7 +2144,7 @@ declare var AudioScheduledSourceNode: { new(): AudioScheduledSourceNode; }; -/** The AudioTrack interface represents a single audio track from one of the HTML media elements,