1 | /* |
2 | This file is part of the WebKit open source project. |
3 | This file has been generated by generate-bindings.pl. DO NOT MODIFY! |
4 | |
5 | This library is free software; you can redistribute it and/or |
6 | modify it under the terms of the GNU Library General Public |
7 | License as published by the Free Software Foundation; either |
8 | version 2 of the License, or (at your option) any later version. |
9 | |
10 | This library is distributed in the hope that it will be useful, |
11 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | Library General Public License for more details. |
14 | |
15 | You should have received a copy of the GNU Library General Public License |
16 | along with this library; see the file COPYING.LIB. If not, write to |
17 | the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
18 | Boston, MA 02110-1301, USA. |
19 | */ |
20 | |
21 | #include "config.h" |
22 | |
23 | #if ENABLE(WEB_AUDIO) |
24 | |
25 | #include "JSAudioContext.h" |
26 | |
27 | #include "Document.h" |
28 | #include "EventNames.h" |
29 | #include "JSAnalyserNode.h" |
30 | #include "JSAudioBuffer.h" |
31 | #include "JSAudioBufferCallback.h" |
32 | #include "JSAudioBufferSourceNode.h" |
33 | #include "JSAudioDestinationNode.h" |
34 | #include "JSAudioListener.h" |
35 | #include "JSBiquadFilterNode.h" |
36 | #include "JSChannelMergerNode.h" |
37 | #include "JSChannelSplitterNode.h" |
38 | #include "JSConvolverNode.h" |
39 | #include "JSDOMAttribute.h" |
40 | #include "JSDOMBinding.h" |
41 | #include "JSDOMConstructor.h" |
42 | #include "JSDOMConvertBoolean.h" |
43 | #include "JSDOMConvertBufferSource.h" |
44 | #include "JSDOMConvertCallbacks.h" |
45 | #include "JSDOMConvertInterface.h" |
46 | #include "JSDOMConvertNullable.h" |
47 | #include "JSDOMConvertNumbers.h" |
48 | #include "JSDOMExceptionHandling.h" |
49 | #include "JSDOMGlobalObject.h" |
50 | #include "JSDOMOperation.h" |
51 | #include "JSDOMOperationReturningPromise.h" |
52 | #include "JSDOMWrapperCache.h" |
53 | #include "JSDelayNode.h" |
54 | #include "JSDynamicsCompressorNode.h" |
55 | #include "JSEventListener.h" |
56 | #include "JSGainNode.h" |
57 | #include "JSOscillatorNode.h" |
58 | #include "JSPannerNode.h" |
59 | #include "JSPeriodicWave.h" |
60 | #include "JSScriptProcessorNode.h" |
61 | #include "JSWaveShaperNode.h" |
62 | #include "ScriptExecutionContext.h" |
63 | #include <JavaScriptCore/HeapSnapshotBuilder.h> |
64 | #include <JavaScriptCore/JSCInlines.h> |
65 | #include <JavaScriptCore/JSString.h> |
66 | #include <wtf/GetPtr.h> |
67 | #include <wtf/PointerPreparations.h> |
68 | #include <wtf/URL.h> |
69 | |
70 | #if ENABLE(MEDIA_STREAM) |
71 | #include "JSMediaStream.h" |
72 | #include "JSMediaStreamAudioDestinationNode.h" |
73 | #include "JSMediaStreamAudioSourceNode.h" |
74 | #endif |
75 | |
76 | #if ENABLE(VIDEO) |
77 | #include "JSHTMLMediaElement.h" |
78 | #include "JSMediaElementAudioSourceNode.h" |
79 | #endif |
80 | |
81 | |
82 | namespace WebCore { |
83 | using namespace JSC; |
84 | |
85 | String convertEnumerationToString(AudioContext::State enumerationValue) |
86 | { |
87 | static const NeverDestroyed<String> values[] = { |
88 | MAKE_STATIC_STRING_IMPL("suspended" ), |
89 | MAKE_STATIC_STRING_IMPL("running" ), |
90 | MAKE_STATIC_STRING_IMPL("interrupted" ), |
91 | MAKE_STATIC_STRING_IMPL("closed" ), |
92 | }; |
93 | static_assert(static_cast<size_t>(AudioContext::State::Suspended) == 0, "AudioContext::State::Suspended is not 0 as expected" ); |
94 | static_assert(static_cast<size_t>(AudioContext::State::Running) == 1, "AudioContext::State::Running is not 1 as expected" ); |
95 | static_assert(static_cast<size_t>(AudioContext::State::Interrupted) == 2, "AudioContext::State::Interrupted is not 2 as expected" ); |
96 | static_assert(static_cast<size_t>(AudioContext::State::Closed) == 3, "AudioContext::State::Closed is not 3 as expected" ); |
97 | ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values)); |
98 | return values[static_cast<size_t>(enumerationValue)]; |
99 | } |
100 | |
101 | template<> JSString* convertEnumerationToJS(ExecState& state, AudioContext::State enumerationValue) |
102 | { |
103 | return jsStringWithCache(&state, convertEnumerationToString(enumerationValue)); |
104 | } |
105 | |
106 | template<> Optional<AudioContext::State> parseEnumeration<AudioContext::State>(ExecState& state, JSValue value) |
107 | { |
108 | auto stringValue = value.toWTFString(&state); |
109 | if (stringValue == "suspended" ) |
110 | return AudioContext::State::Suspended; |
111 | if (stringValue == "running" ) |
112 | return AudioContext::State::Running; |
113 | if (stringValue == "interrupted" ) |
114 | return AudioContext::State::Interrupted; |
115 | if (stringValue == "closed" ) |
116 | return AudioContext::State::Closed; |
117 | return WTF::nullopt; |
118 | } |
119 | |
120 | template<> const char* expectedEnumerationValues<AudioContext::State>() |
121 | { |
122 | return "\"suspended\", \"running\", \"interrupted\", \"closed\"" ; |
123 | } |
124 | |
125 | // Functions |
126 | |
127 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionSuspend(JSC::ExecState*); |
128 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionResume(JSC::ExecState*); |
129 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionClose(JSC::ExecState*); |
130 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBuffer(JSC::ExecState*); |
131 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionDecodeAudioData(JSC::ExecState*); |
132 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBufferSource(JSC::ExecState*); |
133 | #if ENABLE(VIDEO) |
134 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaElementSource(JSC::ExecState*); |
135 | #endif |
136 | #if ENABLE(MEDIA_STREAM) |
137 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaStreamSource(JSC::ExecState*); |
138 | #endif |
139 | #if ENABLE(MEDIA_STREAM) |
140 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaStreamDestination(JSC::ExecState*); |
141 | #endif |
142 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateGain(JSC::ExecState*); |
143 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateDelay(JSC::ExecState*); |
144 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBiquadFilter(JSC::ExecState*); |
145 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateWaveShaper(JSC::ExecState*); |
146 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreatePanner(JSC::ExecState*); |
147 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateConvolver(JSC::ExecState*); |
148 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateDynamicsCompressor(JSC::ExecState*); |
149 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateAnalyser(JSC::ExecState*); |
150 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateScriptProcessor(JSC::ExecState*); |
151 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateOscillator(JSC::ExecState*); |
152 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreatePeriodicWave(JSC::ExecState*); |
153 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateChannelSplitter(JSC::ExecState*); |
154 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateChannelMerger(JSC::ExecState*); |
155 | JSC::EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionStartRendering(JSC::ExecState*); |
156 | |
157 | // Attributes |
158 | |
159 | JSC::EncodedJSValue jsAudioContextConstructor(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
160 | bool setJSAudioContextConstructor(JSC::ExecState*, JSC::EncodedJSValue, JSC::EncodedJSValue); |
161 | JSC::EncodedJSValue jsAudioContextDestination(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
162 | JSC::EncodedJSValue jsAudioContextCurrentTime(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
163 | JSC::EncodedJSValue jsAudioContextSampleRate(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
164 | JSC::EncodedJSValue jsAudioContextListener(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
165 | JSC::EncodedJSValue jsAudioContextState(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
166 | JSC::EncodedJSValue jsAudioContextOnstatechange(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
167 | bool setJSAudioContextOnstatechange(JSC::ExecState*, JSC::EncodedJSValue, JSC::EncodedJSValue); |
168 | JSC::EncodedJSValue jsAudioContextActiveSourceCount(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
169 | JSC::EncodedJSValue jsAudioContextOncomplete(JSC::ExecState*, JSC::EncodedJSValue, JSC::PropertyName); |
170 | bool setJSAudioContextOncomplete(JSC::ExecState*, JSC::EncodedJSValue, JSC::EncodedJSValue); |
171 | |
172 | class JSAudioContextPrototype : public JSC::JSNonFinalObject { |
173 | public: |
174 | using Base = JSC::JSNonFinalObject; |
175 | static JSAudioContextPrototype* create(JSC::VM& vm, JSDOMGlobalObject* globalObject, JSC::Structure* structure) |
176 | { |
177 | JSAudioContextPrototype* ptr = new (NotNull, JSC::allocateCell<JSAudioContextPrototype>(vm.heap)) JSAudioContextPrototype(vm, globalObject, structure); |
178 | ptr->finishCreation(vm); |
179 | return ptr; |
180 | } |
181 | |
182 | DECLARE_INFO; |
183 | static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) |
184 | { |
185 | return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); |
186 | } |
187 | |
188 | private: |
189 | JSAudioContextPrototype(JSC::VM& vm, JSC::JSGlobalObject*, JSC::Structure* structure) |
190 | : JSC::JSNonFinalObject(vm, structure) |
191 | { |
192 | } |
193 | |
194 | void finishCreation(JSC::VM&); |
195 | }; |
196 | |
197 | using JSAudioContextConstructor = JSDOMConstructor<JSAudioContext>; |
198 | |
199 | template<> EncodedJSValue JSC_HOST_CALL JSAudioContextConstructor::construct(ExecState* state) |
200 | { |
201 | VM& vm = state->vm(); |
202 | auto throwScope = DECLARE_THROW_SCOPE(vm); |
203 | UNUSED_PARAM(throwScope); |
204 | auto* castedThis = jsCast<JSAudioContextConstructor*>(state->jsCallee()); |
205 | ASSERT(castedThis); |
206 | auto* context = castedThis->scriptExecutionContext(); |
207 | if (UNLIKELY(!context)) |
208 | return throwConstructorScriptExecutionContextUnavailableError(*state, throwScope, "webkitAudioContext" ); |
209 | ASSERT(context->isDocument()); |
210 | auto& document = downcast<Document>(*context); |
211 | auto object = AudioContext::create(document); |
212 | return JSValue::encode(toJSNewlyCreated<IDLInterface<AudioContext>>(*state, *castedThis->globalObject(), WTFMove(object))); |
213 | } |
214 | |
215 | template<> JSValue JSAudioContextConstructor::prototypeForStructure(JSC::VM& vm, const JSDOMGlobalObject& globalObject) |
216 | { |
217 | return JSEventTarget::getConstructor(vm, &globalObject); |
218 | } |
219 | |
220 | template<> void JSAudioContextConstructor::initializeProperties(VM& vm, JSDOMGlobalObject& globalObject) |
221 | { |
222 | putDirect(vm, vm.propertyNames->prototype, JSAudioContext::prototype(vm, globalObject), JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontEnum); |
223 | putDirect(vm, vm.propertyNames->name, jsNontrivialString(&vm, String("webkitAudioContext"_s )), JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontEnum); |
224 | putDirect(vm, vm.propertyNames->length, jsNumber(0), JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontEnum); |
225 | } |
226 | |
227 | template<> const ClassInfo JSAudioContextConstructor::s_info = { "webkitAudioContext" , &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSAudioContextConstructor) }; |
228 | |
229 | /* Hash table for prototype */ |
230 | |
231 | static const HashTableValue JSAudioContextPrototypeTableValues[] = |
232 | { |
233 | { "constructor" , static_cast<unsigned>(JSC::PropertyAttribute::DontEnum), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextConstructor), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(setJSAudioContextConstructor) } }, |
234 | { "destination" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextDestination), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
235 | { "currentTime" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextCurrentTime), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
236 | { "sampleRate" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextSampleRate), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
237 | { "listener" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextListener), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
238 | { "state" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextState), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
239 | { "onstatechange" , static_cast<unsigned>(JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextOnstatechange), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(setJSAudioContextOnstatechange) } }, |
240 | { "activeSourceCount" , static_cast<unsigned>(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextActiveSourceCount), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(0) } }, |
241 | { "oncomplete" , static_cast<unsigned>(JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { (intptr_t)static_cast<PropertySlot::GetValueFunc>(jsAudioContextOncomplete), (intptr_t) static_cast<PutPropertySlot::PutValueFunc>(setJSAudioContextOncomplete) } }, |
242 | { "suspend" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionSuspend), (intptr_t) (0) } }, |
243 | { "resume" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionResume), (intptr_t) (0) } }, |
244 | { "close" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionClose), (intptr_t) (0) } }, |
245 | { "createBuffer" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateBuffer), (intptr_t) (2) } }, |
246 | { "decodeAudioData" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionDecodeAudioData), (intptr_t) (2) } }, |
247 | { "createBufferSource" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateBufferSource), (intptr_t) (0) } }, |
248 | #if ENABLE(VIDEO) |
249 | { "createMediaElementSource" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateMediaElementSource), (intptr_t) (1) } }, |
250 | #else |
251 | { 0, 0, NoIntrinsic, { 0, 0 } }, |
252 | #endif |
253 | #if ENABLE(MEDIA_STREAM) |
254 | { "createMediaStreamSource" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateMediaStreamSource), (intptr_t) (1) } }, |
255 | #else |
256 | { 0, 0, NoIntrinsic, { 0, 0 } }, |
257 | #endif |
258 | #if ENABLE(MEDIA_STREAM) |
259 | { "createMediaStreamDestination" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateMediaStreamDestination), (intptr_t) (0) } }, |
260 | #else |
261 | { 0, 0, NoIntrinsic, { 0, 0 } }, |
262 | #endif |
263 | { "createGain" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateGain), (intptr_t) (0) } }, |
264 | { "createDelay" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateDelay), (intptr_t) (0) } }, |
265 | { "createBiquadFilter" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateBiquadFilter), (intptr_t) (0) } }, |
266 | { "createWaveShaper" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateWaveShaper), (intptr_t) (0) } }, |
267 | { "createPanner" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreatePanner), (intptr_t) (0) } }, |
268 | { "createConvolver" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateConvolver), (intptr_t) (0) } }, |
269 | { "createDynamicsCompressor" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateDynamicsCompressor), (intptr_t) (0) } }, |
270 | { "createAnalyser" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateAnalyser), (intptr_t) (0) } }, |
271 | { "createScriptProcessor" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateScriptProcessor), (intptr_t) (0) } }, |
272 | { "createOscillator" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateOscillator), (intptr_t) (0) } }, |
273 | { "createPeriodicWave" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreatePeriodicWave), (intptr_t) (2) } }, |
274 | { "createChannelSplitter" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateChannelSplitter), (intptr_t) (0) } }, |
275 | { "createChannelMerger" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionCreateChannelMerger), (intptr_t) (0) } }, |
276 | { "startRendering" , static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { (intptr_t)static_cast<RawNativeFunction>(jsAudioContextPrototypeFunctionStartRendering), (intptr_t) (0) } }, |
277 | }; |
278 | |
279 | const ClassInfo JSAudioContextPrototype::s_info = { "webkitAudioContextPrototype" , &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSAudioContextPrototype) }; |
280 | |
281 | void JSAudioContextPrototype::finishCreation(VM& vm) |
282 | { |
283 | Base::finishCreation(vm); |
284 | reifyStaticProperties(vm, JSAudioContext::info(), JSAudioContextPrototypeTableValues, *this); |
285 | } |
286 | |
287 | const ClassInfo JSAudioContext::s_info = { "webkitAudioContext" , &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSAudioContext) }; |
288 | |
289 | JSAudioContext::JSAudioContext(Structure* structure, JSDOMGlobalObject& globalObject, Ref<AudioContext>&& impl) |
290 | : JSEventTarget(structure, globalObject, WTFMove(impl)) |
291 | { |
292 | } |
293 | |
294 | void JSAudioContext::finishCreation(VM& vm) |
295 | { |
296 | Base::finishCreation(vm); |
297 | ASSERT(inherits(vm, info())); |
298 | |
299 | } |
300 | |
301 | JSObject* JSAudioContext::createPrototype(VM& vm, JSDOMGlobalObject& globalObject) |
302 | { |
303 | return JSAudioContextPrototype::create(vm, &globalObject, JSAudioContextPrototype::createStructure(vm, &globalObject, JSEventTarget::prototype(vm, globalObject))); |
304 | } |
305 | |
306 | JSObject* JSAudioContext::prototype(VM& vm, JSDOMGlobalObject& globalObject) |
307 | { |
308 | return getDOMPrototype<JSAudioContext>(vm, globalObject); |
309 | } |
310 | |
311 | JSValue JSAudioContext::getConstructor(VM& vm, const JSGlobalObject* globalObject) |
312 | { |
313 | return getDOMConstructor<JSAudioContextConstructor>(vm, *jsCast<const JSDOMGlobalObject*>(globalObject)); |
314 | } |
315 | |
316 | template<> inline JSAudioContext* IDLAttribute<JSAudioContext>::cast(ExecState& state, EncodedJSValue thisValue) |
317 | { |
318 | return jsDynamicCast<JSAudioContext*>(state.vm(), JSValue::decode(thisValue)); |
319 | } |
320 | |
321 | template<> inline JSAudioContext* IDLOperation<JSAudioContext>::cast(ExecState& state) |
322 | { |
323 | return jsDynamicCast<JSAudioContext*>(state.vm(), state.thisValue()); |
324 | } |
325 | |
326 | EncodedJSValue jsAudioContextConstructor(ExecState* state, EncodedJSValue thisValue, PropertyName) |
327 | { |
328 | VM& vm = state->vm(); |
329 | auto throwScope = DECLARE_THROW_SCOPE(vm); |
330 | auto* prototype = jsDynamicCast<JSAudioContextPrototype*>(vm, JSValue::decode(thisValue)); |
331 | if (UNLIKELY(!prototype)) |
332 | return throwVMTypeError(state, throwScope); |
333 | return JSValue::encode(JSAudioContext::getConstructor(state->vm(), prototype->globalObject())); |
334 | } |
335 | |
336 | bool setJSAudioContextConstructor(ExecState* state, EncodedJSValue thisValue, EncodedJSValue encodedValue) |
337 | { |
338 | VM& vm = state->vm(); |
339 | auto throwScope = DECLARE_THROW_SCOPE(vm); |
340 | auto* prototype = jsDynamicCast<JSAudioContextPrototype*>(vm, JSValue::decode(thisValue)); |
341 | if (UNLIKELY(!prototype)) { |
342 | throwVMTypeError(state, throwScope); |
343 | return false; |
344 | } |
345 | // Shadowing a built-in constructor |
346 | return prototype->putDirect(vm, vm.propertyNames->constructor, JSValue::decode(encodedValue)); |
347 | } |
348 | |
349 | static inline JSValue jsAudioContextDestinationGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
350 | { |
351 | UNUSED_PARAM(throwScope); |
352 | UNUSED_PARAM(state); |
353 | auto& impl = thisObject.wrapped(); |
354 | JSValue result = toJS<IDLInterface<AudioDestinationNode>>(state, *thisObject.globalObject(), throwScope, impl.destination()); |
355 | return result; |
356 | } |
357 | |
358 | EncodedJSValue jsAudioContextDestination(ExecState* state, EncodedJSValue thisValue, PropertyName) |
359 | { |
360 | return IDLAttribute<JSAudioContext>::get<jsAudioContextDestinationGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "destination" ); |
361 | } |
362 | |
363 | static inline JSValue jsAudioContextCurrentTimeGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
364 | { |
365 | UNUSED_PARAM(throwScope); |
366 | UNUSED_PARAM(state); |
367 | auto& impl = thisObject.wrapped(); |
368 | JSValue result = toJS<IDLUnrestrictedDouble>(state, throwScope, impl.currentTime()); |
369 | return result; |
370 | } |
371 | |
372 | EncodedJSValue jsAudioContextCurrentTime(ExecState* state, EncodedJSValue thisValue, PropertyName) |
373 | { |
374 | return IDLAttribute<JSAudioContext>::get<jsAudioContextCurrentTimeGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "currentTime" ); |
375 | } |
376 | |
377 | static inline JSValue jsAudioContextSampleRateGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
378 | { |
379 | UNUSED_PARAM(throwScope); |
380 | UNUSED_PARAM(state); |
381 | auto& impl = thisObject.wrapped(); |
382 | JSValue result = toJS<IDLUnrestrictedFloat>(state, throwScope, impl.sampleRate()); |
383 | return result; |
384 | } |
385 | |
386 | EncodedJSValue jsAudioContextSampleRate(ExecState* state, EncodedJSValue thisValue, PropertyName) |
387 | { |
388 | return IDLAttribute<JSAudioContext>::get<jsAudioContextSampleRateGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "sampleRate" ); |
389 | } |
390 | |
391 | static inline JSValue jsAudioContextListenerGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
392 | { |
393 | UNUSED_PARAM(throwScope); |
394 | UNUSED_PARAM(state); |
395 | auto& impl = thisObject.wrapped(); |
396 | JSValue result = toJS<IDLInterface<AudioListener>>(state, *thisObject.globalObject(), throwScope, impl.listener()); |
397 | return result; |
398 | } |
399 | |
400 | EncodedJSValue jsAudioContextListener(ExecState* state, EncodedJSValue thisValue, PropertyName) |
401 | { |
402 | return IDLAttribute<JSAudioContext>::get<jsAudioContextListenerGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "listener" ); |
403 | } |
404 | |
405 | static inline JSValue jsAudioContextStateGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
406 | { |
407 | UNUSED_PARAM(throwScope); |
408 | UNUSED_PARAM(state); |
409 | auto& impl = thisObject.wrapped(); |
410 | JSValue result = toJS<IDLEnumeration<AudioContext::State>>(state, throwScope, impl.state()); |
411 | return result; |
412 | } |
413 | |
414 | EncodedJSValue jsAudioContextState(ExecState* state, EncodedJSValue thisValue, PropertyName) |
415 | { |
416 | return IDLAttribute<JSAudioContext>::get<jsAudioContextStateGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "state" ); |
417 | } |
418 | |
419 | static inline JSValue jsAudioContextOnstatechangeGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
420 | { |
421 | UNUSED_PARAM(throwScope); |
422 | UNUSED_PARAM(state); |
423 | return eventHandlerAttribute(thisObject.wrapped(), eventNames().statechangeEvent, worldForDOMObject(thisObject)); |
424 | } |
425 | |
426 | EncodedJSValue jsAudioContextOnstatechange(ExecState* state, EncodedJSValue thisValue, PropertyName) |
427 | { |
428 | return IDLAttribute<JSAudioContext>::get<jsAudioContextOnstatechangeGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "onstatechange" ); |
429 | } |
430 | |
431 | static inline bool setJSAudioContextOnstatechangeSetter(ExecState& state, JSAudioContext& thisObject, JSValue value, ThrowScope& throwScope) |
432 | { |
433 | UNUSED_PARAM(throwScope); |
434 | setEventHandlerAttribute(state, thisObject, thisObject.wrapped(), eventNames().statechangeEvent, value); |
435 | return true; |
436 | } |
437 | |
438 | bool setJSAudioContextOnstatechange(ExecState* state, EncodedJSValue thisValue, EncodedJSValue encodedValue) |
439 | { |
440 | return IDLAttribute<JSAudioContext>::set<setJSAudioContextOnstatechangeSetter>(*state, thisValue, encodedValue, "onstatechange" ); |
441 | } |
442 | |
443 | static inline JSValue jsAudioContextActiveSourceCountGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
444 | { |
445 | UNUSED_PARAM(throwScope); |
446 | UNUSED_PARAM(state); |
447 | auto& impl = thisObject.wrapped(); |
448 | JSValue result = toJS<IDLUnsignedLong>(state, throwScope, impl.activeSourceCount()); |
449 | return result; |
450 | } |
451 | |
452 | EncodedJSValue jsAudioContextActiveSourceCount(ExecState* state, EncodedJSValue thisValue, PropertyName) |
453 | { |
454 | return IDLAttribute<JSAudioContext>::get<jsAudioContextActiveSourceCountGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "activeSourceCount" ); |
455 | } |
456 | |
457 | static inline JSValue jsAudioContextOncompleteGetter(ExecState& state, JSAudioContext& thisObject, ThrowScope& throwScope) |
458 | { |
459 | UNUSED_PARAM(throwScope); |
460 | UNUSED_PARAM(state); |
461 | return eventHandlerAttribute(thisObject.wrapped(), eventNames().completeEvent, worldForDOMObject(thisObject)); |
462 | } |
463 | |
464 | EncodedJSValue jsAudioContextOncomplete(ExecState* state, EncodedJSValue thisValue, PropertyName) |
465 | { |
466 | return IDLAttribute<JSAudioContext>::get<jsAudioContextOncompleteGetter, CastedThisErrorBehavior::Assert>(*state, thisValue, "oncomplete" ); |
467 | } |
468 | |
469 | static inline bool setJSAudioContextOncompleteSetter(ExecState& state, JSAudioContext& thisObject, JSValue value, ThrowScope& throwScope) |
470 | { |
471 | UNUSED_PARAM(throwScope); |
472 | setEventHandlerAttribute(state, thisObject, thisObject.wrapped(), eventNames().completeEvent, value); |
473 | return true; |
474 | } |
475 | |
476 | bool setJSAudioContextOncomplete(ExecState* state, EncodedJSValue thisValue, EncodedJSValue encodedValue) |
477 | { |
478 | return IDLAttribute<JSAudioContext>::set<setJSAudioContextOncompleteSetter>(*state, thisValue, encodedValue, "oncomplete" ); |
479 | } |
480 | |
481 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionSuspendBody(JSC::ExecState* state, typename IDLOperationReturningPromise<JSAudioContext>::ClassParameter castedThis, Ref<DeferredPromise>&& promise, JSC::ThrowScope& throwScope) |
482 | { |
483 | UNUSED_PARAM(state); |
484 | UNUSED_PARAM(throwScope); |
485 | auto& impl = castedThis->wrapped(); |
486 | impl.suspend(WTFMove(promise)); |
487 | return JSValue::encode(jsUndefined()); |
488 | } |
489 | |
490 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionSuspend(ExecState* state) |
491 | { |
492 | return IDLOperationReturningPromise<JSAudioContext>::call<jsAudioContextPrototypeFunctionSuspendBody, PromiseExecutionScope::WindowOnly>(*state, "suspend" ); |
493 | } |
494 | |
495 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionResumeBody(JSC::ExecState* state, typename IDLOperationReturningPromise<JSAudioContext>::ClassParameter castedThis, Ref<DeferredPromise>&& promise, JSC::ThrowScope& throwScope) |
496 | { |
497 | UNUSED_PARAM(state); |
498 | UNUSED_PARAM(throwScope); |
499 | auto& impl = castedThis->wrapped(); |
500 | impl.resume(WTFMove(promise)); |
501 | return JSValue::encode(jsUndefined()); |
502 | } |
503 | |
504 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionResume(ExecState* state) |
505 | { |
506 | return IDLOperationReturningPromise<JSAudioContext>::call<jsAudioContextPrototypeFunctionResumeBody, PromiseExecutionScope::WindowOnly>(*state, "resume" ); |
507 | } |
508 | |
509 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCloseBody(JSC::ExecState* state, typename IDLOperationReturningPromise<JSAudioContext>::ClassParameter castedThis, Ref<DeferredPromise>&& promise, JSC::ThrowScope& throwScope) |
510 | { |
511 | UNUSED_PARAM(state); |
512 | UNUSED_PARAM(throwScope); |
513 | auto& impl = castedThis->wrapped(); |
514 | impl.close(WTFMove(promise)); |
515 | return JSValue::encode(jsUndefined()); |
516 | } |
517 | |
518 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionClose(ExecState* state) |
519 | { |
520 | return IDLOperationReturningPromise<JSAudioContext>::call<jsAudioContextPrototypeFunctionCloseBody, PromiseExecutionScope::WindowOnly>(*state, "close" ); |
521 | } |
522 | |
523 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateBuffer1Body(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
524 | { |
525 | UNUSED_PARAM(state); |
526 | UNUSED_PARAM(throwScope); |
527 | auto& impl = castedThis->wrapped(); |
528 | auto numberOfChannels = convert<IDLUnsignedLong>(*state, state->uncheckedArgument(0)); |
529 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
530 | auto numberOfFrames = convert<IDLUnsignedLong>(*state, state->uncheckedArgument(1)); |
531 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
532 | auto sampleRate = convert<IDLUnrestrictedFloat>(*state, state->uncheckedArgument(2)); |
533 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
534 | return JSValue::encode(toJS<IDLInterface<AudioBuffer>>(*state, *castedThis->globalObject(), throwScope, impl.createBuffer(WTFMove(numberOfChannels), WTFMove(numberOfFrames), WTFMove(sampleRate)))); |
535 | } |
536 | |
537 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateBuffer2Body(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
538 | { |
539 | UNUSED_PARAM(state); |
540 | UNUSED_PARAM(throwScope); |
541 | auto& impl = castedThis->wrapped(); |
542 | auto buffer = convert<IDLArrayBuffer>(*state, state->uncheckedArgument(0), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 0, "buffer" , "webkitAudioContext" , "createBuffer" , "ArrayBuffer" ); }); |
543 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
544 | auto mixToMono = convert<IDLBoolean>(*state, state->uncheckedArgument(1)); |
545 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
546 | return JSValue::encode(toJS<IDLInterface<AudioBuffer>>(*state, *castedThis->globalObject(), throwScope, impl.createBuffer(*buffer, WTFMove(mixToMono)))); |
547 | } |
548 | |
549 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateBufferOverloadDispatcher(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
550 | { |
551 | UNUSED_PARAM(state); |
552 | UNUSED_PARAM(throwScope); |
553 | VM& vm = state->vm(); |
554 | UNUSED_PARAM(vm); |
555 | size_t argsCount = std::min<size_t>(3, state->argumentCount()); |
556 | if (argsCount == 2) { |
557 | return jsAudioContextPrototypeFunctionCreateBuffer2Body(state, castedThis, throwScope); |
558 | } |
559 | if (argsCount == 3) { |
560 | return jsAudioContextPrototypeFunctionCreateBuffer1Body(state, castedThis, throwScope); |
561 | } |
562 | return argsCount < 2 ? throwVMError(state, throwScope, createNotEnoughArgumentsError(state)) : throwVMTypeError(state, throwScope); |
563 | } |
564 | |
565 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBuffer(ExecState* state) |
566 | { |
567 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateBufferOverloadDispatcher>(*state, "createBuffer" ); |
568 | } |
569 | |
570 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionDecodeAudioDataBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
571 | { |
572 | UNUSED_PARAM(state); |
573 | UNUSED_PARAM(throwScope); |
574 | auto& impl = castedThis->wrapped(); |
575 | if (UNLIKELY(state->argumentCount() < 2)) |
576 | return throwVMError(state, throwScope, createNotEnoughArgumentsError(state)); |
577 | auto audioData = convert<IDLArrayBuffer>(*state, state->uncheckedArgument(0), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 0, "audioData" , "webkitAudioContext" , "decodeAudioData" , "ArrayBuffer" ); }); |
578 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
579 | auto successCallback = convert<IDLNullable<IDLCallbackFunction<JSAudioBufferCallback>>>(*state, state->uncheckedArgument(1), *castedThis->globalObject(), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentMustBeFunctionError(state, scope, 1, "successCallback" , "webkitAudioContext" , "decodeAudioData" ); }); |
580 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
581 | auto errorCallback = convert<IDLNullable<IDLCallbackFunction<JSAudioBufferCallback>>>(*state, state->argument(2), *castedThis->globalObject(), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentMustBeFunctionError(state, scope, 2, "errorCallback" , "webkitAudioContext" , "decodeAudioData" ); }); |
582 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
583 | impl.decodeAudioData(*audioData, WTFMove(successCallback), WTFMove(errorCallback)); |
584 | return JSValue::encode(jsUndefined()); |
585 | } |
586 | |
587 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionDecodeAudioData(ExecState* state) |
588 | { |
589 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionDecodeAudioDataBody>(*state, "decodeAudioData" ); |
590 | } |
591 | |
592 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateBufferSourceBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
593 | { |
594 | UNUSED_PARAM(state); |
595 | UNUSED_PARAM(throwScope); |
596 | auto& impl = castedThis->wrapped(); |
597 | return JSValue::encode(toJS<IDLInterface<AudioBufferSourceNode>>(*state, *castedThis->globalObject(), throwScope, impl.createBufferSource())); |
598 | } |
599 | |
600 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBufferSource(ExecState* state) |
601 | { |
602 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateBufferSourceBody>(*state, "createBufferSource" ); |
603 | } |
604 | |
605 | #if ENABLE(VIDEO) |
606 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateMediaElementSourceBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
607 | { |
608 | UNUSED_PARAM(state); |
609 | UNUSED_PARAM(throwScope); |
610 | auto& impl = castedThis->wrapped(); |
611 | if (UNLIKELY(state->argumentCount() < 1)) |
612 | return throwVMError(state, throwScope, createNotEnoughArgumentsError(state)); |
613 | auto mediaElement = convert<IDLInterface<HTMLMediaElement>>(*state, state->uncheckedArgument(0), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 0, "mediaElement" , "webkitAudioContext" , "createMediaElementSource" , "HTMLMediaElement" ); }); |
614 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
615 | return JSValue::encode(toJS<IDLInterface<MediaElementAudioSourceNode>>(*state, *castedThis->globalObject(), throwScope, impl.createMediaElementSource(*mediaElement))); |
616 | } |
617 | |
618 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaElementSource(ExecState* state) |
619 | { |
620 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateMediaElementSourceBody>(*state, "createMediaElementSource" ); |
621 | } |
622 | |
623 | #endif |
624 | |
625 | #if ENABLE(MEDIA_STREAM) |
626 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateMediaStreamSourceBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
627 | { |
628 | UNUSED_PARAM(state); |
629 | UNUSED_PARAM(throwScope); |
630 | auto& impl = castedThis->wrapped(); |
631 | if (UNLIKELY(state->argumentCount() < 1)) |
632 | return throwVMError(state, throwScope, createNotEnoughArgumentsError(state)); |
633 | auto mediaStream = convert<IDLInterface<MediaStream>>(*state, state->uncheckedArgument(0), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 0, "mediaStream" , "webkitAudioContext" , "createMediaStreamSource" , "MediaStream" ); }); |
634 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
635 | return JSValue::encode(toJS<IDLInterface<MediaStreamAudioSourceNode>>(*state, *castedThis->globalObject(), throwScope, impl.createMediaStreamSource(*mediaStream))); |
636 | } |
637 | |
638 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaStreamSource(ExecState* state) |
639 | { |
640 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateMediaStreamSourceBody>(*state, "createMediaStreamSource" ); |
641 | } |
642 | |
643 | #endif |
644 | |
645 | #if ENABLE(MEDIA_STREAM) |
646 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateMediaStreamDestinationBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
647 | { |
648 | UNUSED_PARAM(state); |
649 | UNUSED_PARAM(throwScope); |
650 | auto& impl = castedThis->wrapped(); |
651 | return JSValue::encode(toJS<IDLInterface<MediaStreamAudioDestinationNode>>(*state, *castedThis->globalObject(), throwScope, impl.createMediaStreamDestination())); |
652 | } |
653 | |
654 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateMediaStreamDestination(ExecState* state) |
655 | { |
656 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateMediaStreamDestinationBody>(*state, "createMediaStreamDestination" ); |
657 | } |
658 | |
659 | #endif |
660 | |
661 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateGainBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
662 | { |
663 | UNUSED_PARAM(state); |
664 | UNUSED_PARAM(throwScope); |
665 | auto& impl = castedThis->wrapped(); |
666 | return JSValue::encode(toJS<IDLInterface<GainNode>>(*state, *castedThis->globalObject(), throwScope, impl.createGain())); |
667 | } |
668 | |
669 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateGain(ExecState* state) |
670 | { |
671 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateGainBody>(*state, "createGain" ); |
672 | } |
673 | |
674 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateDelayBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
675 | { |
676 | UNUSED_PARAM(state); |
677 | UNUSED_PARAM(throwScope); |
678 | auto& impl = castedThis->wrapped(); |
679 | auto maxDelayTime = state->argument(0).isUndefined() ? 1 : convert<IDLUnrestrictedDouble>(*state, state->uncheckedArgument(0)); |
680 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
681 | return JSValue::encode(toJS<IDLInterface<DelayNode>>(*state, *castedThis->globalObject(), throwScope, impl.createDelay(WTFMove(maxDelayTime)))); |
682 | } |
683 | |
684 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateDelay(ExecState* state) |
685 | { |
686 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateDelayBody>(*state, "createDelay" ); |
687 | } |
688 | |
689 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateBiquadFilterBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
690 | { |
691 | UNUSED_PARAM(state); |
692 | UNUSED_PARAM(throwScope); |
693 | auto& impl = castedThis->wrapped(); |
694 | return JSValue::encode(toJS<IDLInterface<BiquadFilterNode>>(*state, *castedThis->globalObject(), throwScope, impl.createBiquadFilter())); |
695 | } |
696 | |
697 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateBiquadFilter(ExecState* state) |
698 | { |
699 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateBiquadFilterBody>(*state, "createBiquadFilter" ); |
700 | } |
701 | |
702 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateWaveShaperBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
703 | { |
704 | UNUSED_PARAM(state); |
705 | UNUSED_PARAM(throwScope); |
706 | auto& impl = castedThis->wrapped(); |
707 | return JSValue::encode(toJS<IDLInterface<WaveShaperNode>>(*state, *castedThis->globalObject(), throwScope, impl.createWaveShaper())); |
708 | } |
709 | |
710 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateWaveShaper(ExecState* state) |
711 | { |
712 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateWaveShaperBody>(*state, "createWaveShaper" ); |
713 | } |
714 | |
715 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreatePannerBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
716 | { |
717 | UNUSED_PARAM(state); |
718 | UNUSED_PARAM(throwScope); |
719 | auto& impl = castedThis->wrapped(); |
720 | return JSValue::encode(toJS<IDLInterface<PannerNode>>(*state, *castedThis->globalObject(), throwScope, impl.createPanner())); |
721 | } |
722 | |
723 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreatePanner(ExecState* state) |
724 | { |
725 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreatePannerBody>(*state, "createPanner" ); |
726 | } |
727 | |
728 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateConvolverBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
729 | { |
730 | UNUSED_PARAM(state); |
731 | UNUSED_PARAM(throwScope); |
732 | auto& impl = castedThis->wrapped(); |
733 | return JSValue::encode(toJS<IDLInterface<ConvolverNode>>(*state, *castedThis->globalObject(), throwScope, impl.createConvolver())); |
734 | } |
735 | |
736 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateConvolver(ExecState* state) |
737 | { |
738 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateConvolverBody>(*state, "createConvolver" ); |
739 | } |
740 | |
741 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateDynamicsCompressorBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
742 | { |
743 | UNUSED_PARAM(state); |
744 | UNUSED_PARAM(throwScope); |
745 | auto& impl = castedThis->wrapped(); |
746 | return JSValue::encode(toJS<IDLInterface<DynamicsCompressorNode>>(*state, *castedThis->globalObject(), throwScope, impl.createDynamicsCompressor())); |
747 | } |
748 | |
749 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateDynamicsCompressor(ExecState* state) |
750 | { |
751 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateDynamicsCompressorBody>(*state, "createDynamicsCompressor" ); |
752 | } |
753 | |
754 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateAnalyserBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
755 | { |
756 | UNUSED_PARAM(state); |
757 | UNUSED_PARAM(throwScope); |
758 | auto& impl = castedThis->wrapped(); |
759 | return JSValue::encode(toJS<IDLInterface<AnalyserNode>>(*state, *castedThis->globalObject(), throwScope, impl.createAnalyser())); |
760 | } |
761 | |
762 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateAnalyser(ExecState* state) |
763 | { |
764 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateAnalyserBody>(*state, "createAnalyser" ); |
765 | } |
766 | |
767 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateScriptProcessorBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
768 | { |
769 | UNUSED_PARAM(state); |
770 | UNUSED_PARAM(throwScope); |
771 | auto& impl = castedThis->wrapped(); |
772 | auto bufferSize = convert<IDLUnsignedLong>(*state, state->argument(0)); |
773 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
774 | auto numberOfInputChannels = state->argument(1).isUndefined() ? 2 : convert<IDLUnsignedLong>(*state, state->uncheckedArgument(1)); |
775 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
776 | auto numberOfOutputChannels = state->argument(2).isUndefined() ? 2 : convert<IDLUnsignedLong>(*state, state->uncheckedArgument(2)); |
777 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
778 | return JSValue::encode(toJS<IDLInterface<ScriptProcessorNode>>(*state, *castedThis->globalObject(), throwScope, impl.createScriptProcessor(WTFMove(bufferSize), WTFMove(numberOfInputChannels), WTFMove(numberOfOutputChannels)))); |
779 | } |
780 | |
781 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateScriptProcessor(ExecState* state) |
782 | { |
783 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateScriptProcessorBody>(*state, "createScriptProcessor" ); |
784 | } |
785 | |
786 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateOscillatorBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
787 | { |
788 | UNUSED_PARAM(state); |
789 | UNUSED_PARAM(throwScope); |
790 | auto& impl = castedThis->wrapped(); |
791 | return JSValue::encode(toJS<IDLInterface<OscillatorNode>>(*state, *castedThis->globalObject(), throwScope, impl.createOscillator())); |
792 | } |
793 | |
794 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateOscillator(ExecState* state) |
795 | { |
796 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateOscillatorBody>(*state, "createOscillator" ); |
797 | } |
798 | |
799 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreatePeriodicWaveBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
800 | { |
801 | UNUSED_PARAM(state); |
802 | UNUSED_PARAM(throwScope); |
803 | auto& impl = castedThis->wrapped(); |
804 | if (UNLIKELY(state->argumentCount() < 2)) |
805 | return throwVMError(state, throwScope, createNotEnoughArgumentsError(state)); |
806 | auto real = convert<IDLFloat32Array>(*state, state->uncheckedArgument(0), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 0, "real" , "webkitAudioContext" , "createPeriodicWave" , "Float32Array" ); }); |
807 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
808 | auto imag = convert<IDLFloat32Array>(*state, state->uncheckedArgument(1), [](JSC::ExecState& state, JSC::ThrowScope& scope) { throwArgumentTypeError(state, scope, 1, "imag" , "webkitAudioContext" , "createPeriodicWave" , "Float32Array" ); }); |
809 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
810 | return JSValue::encode(toJS<IDLInterface<PeriodicWave>>(*state, *castedThis->globalObject(), throwScope, impl.createPeriodicWave(real.releaseNonNull(), imag.releaseNonNull()))); |
811 | } |
812 | |
813 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreatePeriodicWave(ExecState* state) |
814 | { |
815 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreatePeriodicWaveBody>(*state, "createPeriodicWave" ); |
816 | } |
817 | |
818 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateChannelSplitterBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
819 | { |
820 | UNUSED_PARAM(state); |
821 | UNUSED_PARAM(throwScope); |
822 | auto& impl = castedThis->wrapped(); |
823 | auto numberOfOutputs = state->argument(0).isUndefined() ? 6 : convert<IDLUnsignedLong>(*state, state->uncheckedArgument(0)); |
824 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
825 | return JSValue::encode(toJS<IDLInterface<ChannelSplitterNode>>(*state, *castedThis->globalObject(), throwScope, impl.createChannelSplitter(WTFMove(numberOfOutputs)))); |
826 | } |
827 | |
828 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateChannelSplitter(ExecState* state) |
829 | { |
830 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateChannelSplitterBody>(*state, "createChannelSplitter" ); |
831 | } |
832 | |
833 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionCreateChannelMergerBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
834 | { |
835 | UNUSED_PARAM(state); |
836 | UNUSED_PARAM(throwScope); |
837 | auto& impl = castedThis->wrapped(); |
838 | auto numberOfInputs = state->argument(0).isUndefined() ? 6 : convert<IDLUnsignedLong>(*state, state->uncheckedArgument(0)); |
839 | RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); |
840 | return JSValue::encode(toJS<IDLInterface<ChannelMergerNode>>(*state, *castedThis->globalObject(), throwScope, impl.createChannelMerger(WTFMove(numberOfInputs)))); |
841 | } |
842 | |
843 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionCreateChannelMerger(ExecState* state) |
844 | { |
845 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionCreateChannelMergerBody>(*state, "createChannelMerger" ); |
846 | } |
847 | |
848 | static inline JSC::EncodedJSValue jsAudioContextPrototypeFunctionStartRenderingBody(JSC::ExecState* state, typename IDLOperation<JSAudioContext>::ClassParameter castedThis, JSC::ThrowScope& throwScope) |
849 | { |
850 | UNUSED_PARAM(state); |
851 | UNUSED_PARAM(throwScope); |
852 | auto& impl = castedThis->wrapped(); |
853 | impl.startRendering(); |
854 | return JSValue::encode(jsUndefined()); |
855 | } |
856 | |
857 | EncodedJSValue JSC_HOST_CALL jsAudioContextPrototypeFunctionStartRendering(ExecState* state) |
858 | { |
859 | return IDLOperation<JSAudioContext>::call<jsAudioContextPrototypeFunctionStartRenderingBody>(*state, "startRendering" ); |
860 | } |
861 | |
862 | void JSAudioContext::heapSnapshot(JSCell* cell, HeapSnapshotBuilder& builder) |
863 | { |
864 | auto* thisObject = jsCast<JSAudioContext*>(cell); |
865 | builder.setWrappedObjectForCell(cell, &thisObject->wrapped()); |
866 | if (thisObject->scriptExecutionContext()) |
867 | builder.setLabelForCell(cell, "url " + thisObject->scriptExecutionContext()->url().string()); |
868 | Base::heapSnapshot(cell, builder); |
869 | } |
870 | |
871 | bool JSAudioContextOwner::isReachableFromOpaqueRoots(JSC::Handle<JSC::Unknown> handle, void*, SlotVisitor& visitor, const char** reason) |
872 | { |
873 | auto* jsAudioContext = jsCast<JSAudioContext*>(handle.slot()->asCell()); |
874 | if (jsAudioContext->wrapped().hasPendingActivity()) { |
875 | if (UNLIKELY(reason)) |
876 | *reason = "ActiveDOMObject with pending activity" ; |
877 | return true; |
878 | } |
879 | if (jsAudioContext->wrapped().isFiringEventListeners()) { |
880 | if (UNLIKELY(reason)) |
881 | *reason = "EventTarget firing event listeners" ; |
882 | return true; |
883 | } |
884 | UNUSED_PARAM(visitor); |
885 | UNUSED_PARAM(reason); |
886 | return false; |
887 | } |
888 | |
889 | void JSAudioContextOwner::finalize(JSC::Handle<JSC::Unknown> handle, void* context) |
890 | { |
891 | auto* jsAudioContext = static_cast<JSAudioContext*>(handle.slot()->asCell()); |
892 | auto& world = *static_cast<DOMWrapperWorld*>(context); |
893 | uncacheWrapper(world, &jsAudioContext->wrapped(), jsAudioContext); |
894 | } |
895 | |
896 | #if ENABLE(BINDING_INTEGRITY) |
897 | #if PLATFORM(WIN) |
898 | #pragma warning(disable: 4483) |
899 | extern "C" { extern void (*const __identifier("??_7AudioContext@WebCore@@6B@" )[])(); } |
900 | #else |
901 | extern "C" { extern void* _ZTVN7WebCore12AudioContextE[]; } |
902 | #endif |
903 | #endif |
904 | |
905 | JSC::JSValue toJSNewlyCreated(JSC::ExecState*, JSDOMGlobalObject* globalObject, Ref<AudioContext>&& impl) |
906 | { |
907 | |
908 | #if ENABLE(BINDING_INTEGRITY) |
909 | void* actualVTablePointer = *(reinterpret_cast<void**>(impl.ptr())); |
910 | #if PLATFORM(WIN) |
911 | void* expectedVTablePointer = WTF_PREPARE_VTBL_POINTER_FOR_INSPECTION(__identifier("??_7AudioContext@WebCore@@6B@" )); |
912 | #else |
913 | void* expectedVTablePointer = WTF_PREPARE_VTBL_POINTER_FOR_INSPECTION(&_ZTVN7WebCore12AudioContextE[2]); |
914 | #endif |
915 | |
916 | // If this fails AudioContext does not have a vtable, so you need to add the |
917 | // ImplementationLacksVTable attribute to the interface definition |
918 | static_assert(std::is_polymorphic<AudioContext>::value, "AudioContext is not polymorphic" ); |
919 | |
920 | // If you hit this assertion you either have a use after free bug, or |
921 | // AudioContext has subclasses. If AudioContext has subclasses that get passed |
922 | // to toJS() we currently require AudioContext you to opt out of binding hardening |
923 | // by adding the SkipVTableValidation attribute to the interface IDL definition |
924 | RELEASE_ASSERT(actualVTablePointer == expectedVTablePointer); |
925 | #endif |
926 | return createWrapper<AudioContext>(globalObject, WTFMove(impl)); |
927 | } |
928 | |
929 | JSC::JSValue toJS(JSC::ExecState* state, JSDOMGlobalObject* globalObject, AudioContext& impl) |
930 | { |
931 | return wrap(state, globalObject, impl); |
932 | } |
933 | |
934 | AudioContext* JSAudioContext::toWrapped(JSC::VM& vm, JSC::JSValue value) |
935 | { |
936 | if (auto* wrapper = jsDynamicCast<JSAudioContext*>(vm, value)) |
937 | return &wrapper->wrapped(); |
938 | return nullptr; |
939 | } |
940 | |
941 | } |
942 | |
943 | #endif // ENABLE(WEB_AUDIO) |
944 | |