1/*
2 * Copyright (C) 2014 Igalia S.L
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19#include "config.h"
20#include "AudioSourceProviderGStreamer.h"
21
22#if ENABLE(WEB_AUDIO) && ENABLE(VIDEO) && USE(GSTREAMER)
23
24#include "AudioBus.h"
25#include "AudioSourceProviderClient.h"
26#include <gst/app/gstappsink.h>
27#include <gst/audio/audio-info.h>
28#include <gst/base/gstadapter.h>
29
30#if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC)
31#include "GStreamerAudioData.h"
32#include "GStreamerMediaStreamSource.h"
33#endif
34
35namespace WebCore {
36
37// For now the provider supports only stereo files at a fixed sample
38// bitrate.
39static const int gNumberOfChannels = 2;
40static const float gSampleBitRate = 44100;
41
42static GstFlowReturn onAppsinkNewBufferCallback(GstAppSink* sink, gpointer userData)
43{
44 return static_cast<AudioSourceProviderGStreamer*>(userData)->handleAudioBuffer(sink);
45}
46
47static void onGStreamerDeinterleavePadAddedCallback(GstElement*, GstPad* pad, AudioSourceProviderGStreamer* provider)
48{
49 provider->handleNewDeinterleavePad(pad);
50}
51
52static void onGStreamerDeinterleaveReadyCallback(GstElement*, AudioSourceProviderGStreamer* provider)
53{
54 provider->deinterleavePadsConfigured();
55}
56
57static void onGStreamerDeinterleavePadRemovedCallback(GstElement*, GstPad* pad, AudioSourceProviderGStreamer* provider)
58{
59 provider->handleRemovedDeinterleavePad(pad);
60}
61
62static GstPadProbeReturn onAppsinkFlushCallback(GstPad*, GstPadProbeInfo* info, gpointer userData)
63{
64 if (GST_PAD_PROBE_INFO_TYPE(info) & (GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH)) {
65 GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
66 if (GST_EVENT_TYPE(event) == GST_EVENT_FLUSH_STOP) {
67 AudioSourceProviderGStreamer* provider = reinterpret_cast<AudioSourceProviderGStreamer*>(userData);
68 provider->clearAdapters();
69 }
70 }
71 return GST_PAD_PROBE_OK;
72}
73
74static void copyGStreamerBuffersToAudioChannel(GstAdapter* adapter, AudioBus* bus , int channelNumber, size_t framesToProcess)
75{
76 if (!gst_adapter_available(adapter)) {
77 bus->zero();
78 return;
79 }
80
81 size_t bytes = framesToProcess * sizeof(float);
82 if (gst_adapter_available(adapter) >= bytes) {
83 gst_adapter_copy(adapter, bus->channel(channelNumber)->mutableData(), 0, bytes);
84 gst_adapter_flush(adapter, bytes);
85 } else
86 bus->zero();
87}
88
89AudioSourceProviderGStreamer::AudioSourceProviderGStreamer()
90 : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
91 , m_client(nullptr)
92 , m_deinterleaveSourcePads(0)
93 , m_deinterleavePadAddedHandlerId(0)
94 , m_deinterleaveNoMorePadsHandlerId(0)
95 , m_deinterleavePadRemovedHandlerId(0)
96{
97 m_frontLeftAdapter = gst_adapter_new();
98 m_frontRightAdapter = gst_adapter_new();
99}
100
101#if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC)
102AudioSourceProviderGStreamer::AudioSourceProviderGStreamer(MediaStreamTrackPrivate& source)
103 : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
104 , m_client(nullptr)
105 , m_deinterleaveSourcePads(0)
106 , m_deinterleavePadAddedHandlerId(0)
107 , m_deinterleaveNoMorePadsHandlerId(0)
108 , m_deinterleavePadRemovedHandlerId(0)
109{
110 m_frontLeftAdapter = gst_adapter_new();
111 m_frontRightAdapter = gst_adapter_new();
112 auto pipelineName = makeString("WebAudioProvider_MediaStreamTrack_", source.id());
113 m_pipeline = adoptGRef(GST_ELEMENT(g_object_ref_sink(gst_element_factory_make("pipeline", pipelineName.utf8().data()))));
114 auto src = webkitMediaStreamSrcNew();
115 webkitMediaStreamSrcAddTrack(WEBKIT_MEDIA_STREAM_SRC(src), &source, true);
116
117 m_audioSinkBin = adoptGRef(GST_ELEMENT(g_object_ref_sink(gst_parse_bin_from_description("tee name=audioTee", true, nullptr))));
118
119 gst_bin_add_many(GST_BIN(m_pipeline.get()), src, m_audioSinkBin.get(), nullptr);
120 gst_element_link(src, m_audioSinkBin.get());
121
122 connectSimpleBusMessageCallback(m_pipeline.get());
123}
124#endif
125
126AudioSourceProviderGStreamer::~AudioSourceProviderGStreamer()
127{
128 m_notifier->invalidate();
129
130 GRefPtr<GstElement> deinterleave = adoptGRef(gst_bin_get_by_name(GST_BIN(m_audioSinkBin.get()), "deinterleave"));
131 if (deinterleave && m_client) {
132 g_signal_handler_disconnect(deinterleave.get(), m_deinterleavePadAddedHandlerId);
133 g_signal_handler_disconnect(deinterleave.get(), m_deinterleaveNoMorePadsHandlerId);
134 g_signal_handler_disconnect(deinterleave.get(), m_deinterleavePadRemovedHandlerId);
135 }
136
137 if (m_pipeline)
138 gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
139
140 g_object_unref(m_frontLeftAdapter);
141 g_object_unref(m_frontRightAdapter);
142}
143
144void AudioSourceProviderGStreamer::configureAudioBin(GstElement* audioBin, GstElement* teePredecessor)
145{
146 m_audioSinkBin = audioBin;
147
148 GstElement* audioTee = gst_element_factory_make("tee", "audioTee");
149 GstElement* audioQueue = gst_element_factory_make("queue", nullptr);
150 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr);
151 GstElement* audioConvert2 = gst_element_factory_make("audioconvert", nullptr);
152 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr);
153 GstElement* audioResample2 = gst_element_factory_make("audioresample", nullptr);
154 GstElement* volumeElement = gst_element_factory_make("volume", "volume");
155 GstElement* audioSink = gst_element_factory_make("autoaudiosink", nullptr);
156
157 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), audioTee, audioQueue, audioConvert, audioResample, volumeElement, audioConvert2, audioResample2, audioSink, nullptr);
158
159 // In cases where the audio-sink needs elements before tee (such
160 // as scaletempo) they need to be linked to tee which in this case
161 // doesn't need a ghost pad. It is assumed that the teePredecessor
162 // chain already configured a ghost pad.
163 if (teePredecessor)
164 gst_element_link_pads_full(teePredecessor, "src", audioTee, "sink", GST_PAD_LINK_CHECK_NOTHING);
165 else {
166 // Add a ghostpad to the bin so it can proxy to tee.
167 GRefPtr<GstPad> audioTeeSinkPad = adoptGRef(gst_element_get_static_pad(audioTee, "sink"));
168 gst_element_add_pad(m_audioSinkBin.get(), gst_ghost_pad_new("sink", audioTeeSinkPad.get()));
169 }
170
171 // Link a new src pad from tee to queue ! audioconvert !
172 // audioresample ! volume ! audioconvert ! audioresample !
173 // autoaudiosink. The audioresample and audioconvert are needed to
174 // ensure the audio sink receives buffers in the correct format.
175 gst_element_link_pads_full(audioTee, "src_%u", audioQueue, "sink", GST_PAD_LINK_CHECK_NOTHING);
176 gst_element_link_pads_full(audioQueue, "src", audioConvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
177 gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING);
178 gst_element_link_pads_full(audioResample, "src", volumeElement, "sink", GST_PAD_LINK_CHECK_NOTHING);
179 gst_element_link_pads_full(volumeElement, "src", audioConvert2, "sink", GST_PAD_LINK_CHECK_NOTHING);
180 gst_element_link_pads_full(audioConvert2, "src", audioResample2, "sink", GST_PAD_LINK_CHECK_NOTHING);
181 gst_element_link_pads_full(audioResample2, "src", audioSink, "sink", GST_PAD_LINK_CHECK_NOTHING);
182}
183
184void AudioSourceProviderGStreamer::provideInput(AudioBus* bus, size_t framesToProcess)
185{
186 auto locker = holdLock(m_adapterMutex);
187 copyGStreamerBuffersToAudioChannel(m_frontLeftAdapter, bus, 0, framesToProcess);
188 copyGStreamerBuffersToAudioChannel(m_frontRightAdapter, bus, 1, framesToProcess);
189}
190
191GstFlowReturn AudioSourceProviderGStreamer::handleAudioBuffer(GstAppSink* sink)
192{
193 if (!m_client)
194 return GST_FLOW_OK;
195
196 // Pull a buffer from appsink and store it the appropriate buffer
197 // list for the audio channel it represents.
198 GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(sink));
199 if (!sample)
200 return gst_app_sink_is_eos(sink) ? GST_FLOW_EOS : GST_FLOW_ERROR;
201
202 GstBuffer* buffer = gst_sample_get_buffer(sample.get());
203 if (!buffer)
204 return GST_FLOW_ERROR;
205
206 GstCaps* caps = gst_sample_get_caps(sample.get());
207 if (!caps)
208 return GST_FLOW_ERROR;
209
210 GstAudioInfo info;
211 gst_audio_info_from_caps(&info, caps);
212
213 auto locker = holdLock(m_adapterMutex);
214
215 // Check the first audio channel. The buffer is supposed to store
216 // data of a single channel anyway.
217 switch (GST_AUDIO_INFO_POSITION(&info, 0)) {
218 case GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT:
219 case GST_AUDIO_CHANNEL_POSITION_MONO:
220 gst_adapter_push(m_frontLeftAdapter, gst_buffer_ref(buffer));
221 break;
222 case GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT:
223 gst_adapter_push(m_frontRightAdapter, gst_buffer_ref(buffer));
224 break;
225 default:
226 break;
227 }
228
229 return GST_FLOW_OK;
230}
231
232void AudioSourceProviderGStreamer::setClient(AudioSourceProviderClient* client)
233{
234 if (m_client)
235 return;
236
237 ASSERT(client);
238 m_client = client;
239
240 if (m_pipeline)
241 gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
242
243 // The volume element is used to mute audio playback towards the
244 // autoaudiosink. This is needed to avoid double playback of audio
245 // from our audio sink and from the WebAudio AudioDestination node
246 // supposedly configured already by application side.
247 GRefPtr<GstElement> volumeElement = adoptGRef(gst_bin_get_by_name(GST_BIN(m_audioSinkBin.get()), "volume"));
248
249 if (volumeElement)
250 g_object_set(volumeElement.get(), "mute", TRUE, nullptr);
251
252 // The audioconvert and audioresample elements are needed to
253 // ensure deinterleave and the sinks downstream receive buffers in
254 // the format specified by the capsfilter.
255 GstElement* audioQueue = gst_element_factory_make("queue", nullptr);
256 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr);
257 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr);
258 GstElement* capsFilter = gst_element_factory_make("capsfilter", nullptr);
259 GstElement* deInterleave = gst_element_factory_make("deinterleave", "deinterleave");
260
261 g_object_set(deInterleave, "keep-positions", TRUE, nullptr);
262 m_deinterleavePadAddedHandlerId = g_signal_connect(deInterleave, "pad-added", G_CALLBACK(onGStreamerDeinterleavePadAddedCallback), this);
263 m_deinterleaveNoMorePadsHandlerId = g_signal_connect(deInterleave, "no-more-pads", G_CALLBACK(onGStreamerDeinterleaveReadyCallback), this);
264 m_deinterleavePadRemovedHandlerId = g_signal_connect(deInterleave, "pad-removed", G_CALLBACK(onGStreamerDeinterleavePadRemovedCallback), this);
265
266 GstCaps* caps = gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(gSampleBitRate),
267 "channels", G_TYPE_INT, gNumberOfChannels,
268 "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
269 "layout", G_TYPE_STRING, "interleaved", nullptr);
270
271 g_object_set(capsFilter, "caps", caps, nullptr);
272 gst_caps_unref(caps);
273
274 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), audioQueue, audioConvert, audioResample, capsFilter, deInterleave, nullptr);
275
276 GRefPtr<GstElement> audioTee = adoptGRef(gst_bin_get_by_name(GST_BIN(m_audioSinkBin.get()), "audioTee"));
277
278 // Link a new src pad from tee to queue ! audioconvert !
279 // audioresample ! capsfilter ! deinterleave. Later
280 // on each deinterleaved planar audio channel will be routed to an
281 // appsink for data extraction and processing.
282 gst_element_link_pads_full(audioTee.get(), "src_%u", audioQueue, "sink", GST_PAD_LINK_CHECK_NOTHING);
283 gst_element_link_pads_full(audioQueue, "src", audioConvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
284 gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING);
285 gst_element_link_pads_full(audioResample, "src", capsFilter, "sink", GST_PAD_LINK_CHECK_NOTHING);
286 gst_element_link_pads_full(capsFilter, "src", deInterleave, "sink", GST_PAD_LINK_CHECK_NOTHING);
287
288 gst_element_sync_state_with_parent(audioQueue);
289 gst_element_sync_state_with_parent(audioConvert);
290 gst_element_sync_state_with_parent(audioResample);
291 gst_element_sync_state_with_parent(capsFilter);
292 gst_element_sync_state_with_parent(deInterleave);
293}
294
295void AudioSourceProviderGStreamer::handleNewDeinterleavePad(GstPad* pad)
296{
297 m_deinterleaveSourcePads++;
298
299 if (m_deinterleaveSourcePads > 2) {
300 g_warning("The AudioSourceProvider supports only mono and stereo audio. Silencing out this new channel.");
301 GstElement* queue = gst_element_factory_make("queue", nullptr);
302 GstElement* sink = gst_element_factory_make("fakesink", nullptr);
303 g_object_set(sink, "async", FALSE, nullptr);
304 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);
305
306 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
307 gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
308
309 GQuark quark = g_quark_from_static_string("peer");
310 g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());
311 gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);
312 gst_element_sync_state_with_parent(queue);
313 gst_element_sync_state_with_parent(sink);
314 return;
315 }
316
317 // A new pad for a planar channel was added in deinterleave. Plug
318 // in an appsink so we can pull the data from each
319 // channel. Pipeline looks like:
320 // ... deinterleave ! queue ! appsink.
321 GstElement* queue = gst_element_factory_make("queue", nullptr);
322 GstElement* sink = gst_element_factory_make("appsink", nullptr);
323
324 GstAppSinkCallbacks callbacks;
325 callbacks.eos = nullptr;
326 callbacks.new_preroll = nullptr;
327 callbacks.new_sample = onAppsinkNewBufferCallback;
328 gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, nullptr);
329
330 g_object_set(sink, "async", FALSE, nullptr);
331
332 GRefPtr<GstCaps> caps = adoptGRef(gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(gSampleBitRate),
333 "channels", G_TYPE_INT, 1,
334 "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
335 "layout", G_TYPE_STRING, "interleaved", nullptr));
336
337 gst_app_sink_set_caps(GST_APP_SINK(sink), caps.get());
338
339 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);
340
341 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
342 gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
343
344 GQuark quark = g_quark_from_static_string("peer");
345 g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());
346
347 gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);
348
349 sinkPad = adoptGRef(gst_element_get_static_pad(sink, "sink"));
350 gst_pad_add_probe(sinkPad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, onAppsinkFlushCallback, this, nullptr);
351
352 gst_element_sync_state_with_parent(queue);
353 gst_element_sync_state_with_parent(sink);
354}
355
356void AudioSourceProviderGStreamer::handleRemovedDeinterleavePad(GstPad* pad)
357{
358 m_deinterleaveSourcePads--;
359
360 // Remove the queue ! appsink chain downstream of deinterleave.
361 GQuark quark = g_quark_from_static_string("peer");
362 GstPad* sinkPad = GST_PAD_CAST(g_object_get_qdata(G_OBJECT(pad), quark));
363 if (!sinkPad)
364 return;
365
366 GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(sinkPad));
367 GRefPtr<GstPad> queueSrcPad = adoptGRef(gst_element_get_static_pad(queue.get(), "src"));
368 GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_pad_get_peer(queueSrcPad.get()));
369 GRefPtr<GstElement> sink = adoptGRef(gst_pad_get_parent_element(appsinkSinkPad.get()));
370 gst_element_set_state(sink.get(), GST_STATE_NULL);
371 gst_element_set_state(queue.get(), GST_STATE_NULL);
372 gst_element_unlink(queue.get(), sink.get());
373 gst_bin_remove_many(GST_BIN(m_audioSinkBin.get()), queue.get(), sink.get(), nullptr);
374}
375
376void AudioSourceProviderGStreamer::deinterleavePadsConfigured()
377{
378 m_notifier->notify(MainThreadNotification::DeinterleavePadsConfigured, [this] {
379 ASSERT(m_client);
380 ASSERT(m_deinterleaveSourcePads == gNumberOfChannels);
381
382 m_client->setFormat(m_deinterleaveSourcePads, gSampleBitRate);
383 });
384}
385
386void AudioSourceProviderGStreamer::clearAdapters()
387{
388 auto locker = holdLock(m_adapterMutex);
389 gst_adapter_clear(m_frontLeftAdapter);
390 gst_adapter_clear(m_frontRightAdapter);
391}
392
393} // WebCore
394
395#endif // ENABLE(WEB_AUDIO) && ENABLE(VIDEO) && USE(GSTREAMER)
396