From 2214698d2673b7f1ebbc699e07938458a9f5c775 Mon Sep 17 00:00:00 2001 From: Justin Bay Date: Sat, 1 Dec 2018 18:58:29 -0500 Subject: [PATCH] fetch web api descriptions from MDN --- README.md | 1 + baselines/dom.generated.d.ts | 422 ++++++++++++++++++++++++++ baselines/webworker.generated.d.ts | 104 +++++++ inputfiles/README.md | 4 +- inputfiles/mdn/apiDescriptions.json | 442 ++++++++++++++++++++++++++++ package.json | 4 +- src/emitter.ts | 4 + src/{fetcher.ts => idlfetcher.ts} | 0 src/index.ts | 19 ++ src/mdnfetcher.ts | 44 +++ src/types.d.ts | 1 + 11 files changed, 1043 insertions(+), 2 deletions(-) create mode 100644 inputfiles/mdn/apiDescriptions.json rename src/{fetcher.ts => idlfetcher.ts} (100%) create mode 100644 src/mdnfetcher.ts diff --git a/README.md b/README.md index e95801378..5242487f1 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,7 @@ A "Living Standard" ([example](https://xhr.spec.whatwg.org/)) should be added he - `browser.webidl.preprocessed.json`: a JSON file generated by Microsoft Edge. **Do not edit this file**. - Due to the different update schedules between Edge and TypeScript, this may not be the most up-to-date version of the spec. +- `mdn/apiDescriptions.json`: a JSON file generated by fetching API descriptions from [MDN](https://developer.mozilla.org/en-US/docs/Web/API). **Do not edit this file**. - `addedTypes.json`: types that should exist in either browser or webworker but are missing from the Edge spec. The format of the file mimics that of `browser.webidl.preprocessed.json`. - `overridingTypes.json`: types that are defined in the spec file but has a better or more up-to-date definitions in the json files. - `removedTypes.json`: types that are defined in the spec file but should be removed. diff --git a/baselines/dom.generated.d.ts b/baselines/dom.generated.d.ts index e3f3d7702..c88e88c7c 100644 --- a/baselines/dom.generated.d.ts +++ b/baselines/dom.generated.d.ts @@ -1609,6 +1609,7 @@ interface EventListener { (evt: Event): void; } +/** The ANGLE_instanced_arrays extension is part of the WebGL API and allows to draw the same object, or groups of similar objects multiple times, if they share the same vertex data, primitive count and type. */ interface ANGLE_instanced_arrays { drawArraysInstancedANGLE(mode: GLenum, first: GLint, count: GLsizei, primcount: GLsizei): void; drawElementsInstancedANGLE(mode: GLenum, count: GLsizei, type: GLenum, offset: GLintptr, primcount: GLsizei): void; @@ -1616,6 +1617,7 @@ interface ANGLE_instanced_arrays { readonly VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE: GLenum; } +/** The AbortController interface represents a controller object that allows you to abort one or more DOM requests as and when desired. */ interface AbortController { /** * Returns the AbortSignal object associated with this object. @@ -1637,6 +1639,7 @@ interface AbortSignalEventMap { "abort": ProgressEvent; } +/** The AbortSignal interface represents a signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ interface AbortSignal extends EventTarget { /** * Returns true if this AbortSignal's AbortController has signaled to abort, and false @@ -1688,6 +1691,7 @@ interface AesCmacParams extends Algorithm { length: number; } +/** The AnalyserNode interface represents a node able to provide real-time frequency and time-domain analysis information. It is an AudioNode that passes the audio stream unchanged from the input to the output, but allows you to take the generated data, process it, and create audio visualizations. */ interface AnalyserNode extends AudioNode { fftSize: number; readonly frequencyBinCount: number; @@ -1756,6 +1760,7 @@ declare var AnimationEffect: { new(): AnimationEffect; }; +/** The AnimationEvent interface represents events providing information related to animations. */ interface AnimationEvent extends Event { readonly animationName: string; readonly elapsedTime: number; @@ -1845,6 +1850,7 @@ declare var ApplicationCache: { readonly UPDATEREADY: number; }; +/** This type represents a DOM element's attribute as an object. In most DOM methods, you will probably directly retrieve the attribute as a string (e.g., Element.getAttribute(), but certain functions (e.g., Element.getAttributeNode()) or means of iterating give Attr types. */ interface Attr extends Node { readonly localName: string; readonly name: string; @@ -1860,6 +1866,7 @@ declare var Attr: { new(): Attr; }; +/** Objects of these types are designed to hold small audio snippets, typically less than 45 s. For longer sounds, objects implementing the MediaElementAudioSourceNode are more suitable. The buffer contains data in the following format:  non-interleaved IEEE754 32-bit linear PCM with a nominal range between -1 and +1, that is, 32bits floating point buffer, with each samples between -1.0 and 1.0. If the AudioBuffer has multiple channels, they are stored in separate buffer. */ interface AudioBuffer { readonly duration: number; readonly length: number; @@ -1875,6 +1882,7 @@ declare var AudioBuffer: { new(options: AudioBufferOptions): AudioBuffer; }; +/** The AudioBufferSourceNode interface is an AudioScheduledSourceNode which represents an audio source consisting of in-memory audio data, stored in an AudioBuffer. It's especially useful for playing back audio which has particularly stringent timing accuracy requirements, such as for sounds that must match a specific rhythm and can be kept in memory rather than being played from disk or the network. */ interface AudioBufferSourceNode extends AudioScheduledSourceNode { buffer: AudioBuffer | null; readonly detune: AudioParam; @@ -1894,6 +1902,7 @@ declare var AudioBufferSourceNode: { new(context: BaseAudioContext, options?: AudioBufferSourceOptions): AudioBufferSourceNode; }; +/** The AudioContext interface represents an audio-processing graph built from audio modules linked together, each represented by an AudioNode. */ interface AudioContext extends BaseAudioContext { readonly baseLatency: number; readonly outputLatency: number; @@ -1915,6 +1924,7 @@ declare var AudioContext: { new(contextOptions?: AudioContextOptions): AudioContext; }; +/** AudioDestinationNode has no output (as it is the output, no more AudioNode can be linked after it in the audio graph) and one input. The number of channels in the input must be between 0 and the maxChannelCount value or an exception is raised. */ interface AudioDestinationNode extends AudioNode { readonly maxChannelCount: number; } @@ -1924,6 +1934,7 @@ declare var AudioDestinationNode: { new(): AudioDestinationNode; }; +/** The AudioListener interface represents the position and orientation of the unique person listening to the audio scene, and is used in audio spatialization. All PannerNodes spatialize in relation to the AudioListener stored in the BaseAudioContext.listener attribute. */ interface AudioListener { readonly forwardX: AudioParam; readonly forwardY: AudioParam; @@ -1945,6 +1956,7 @@ declare var AudioListener: { new(): AudioListener; }; +/** The AudioNode interface is a generic interface for representing an audio processing module. Examples include: */ interface AudioNode extends EventTarget { channelCount: number; channelCountMode: ChannelCountMode; @@ -1968,6 +1980,7 @@ declare var AudioNode: { new(): AudioNode; }; +/** The Web Audio API's AudioParam interface represents an audio-related parameter, usually a parameter of an AudioNode (such as GainNode.gain). */ interface AudioParam { automationRate: AutomationRate; readonly defaultValue: number; @@ -1997,6 +2010,7 @@ declare var AudioParamMap: { new(): AudioParamMap; }; +/** The Web Audio API AudioProcessingEvent represents events that occur when a ScriptProcessorNode input buffer is ready to be processed. */ interface AudioProcessingEvent extends Event { readonly inputBuffer: AudioBuffer; readonly outputBuffer: AudioBuffer; @@ -2027,6 +2041,7 @@ declare var AudioScheduledSourceNode: { new(): AudioScheduledSourceNode; }; +/** The AudioTrack interface represents a single audio track from one of the HTML media elements,