1/*
2 * Copyright (C) 2018 Metrological Group B.V.
3 * Author: Thibault Saunier <tsaunier@igalia.com>
4 * Author: Alejandro G. Castro <alex@igalia.com>
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
15 *
16 * You should have received a copy of the GNU Library General Public License
17 * aint with this library; see the file COPYING.LIB. If not, write to
18 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
20 */
21
22#include "config.h"
23
24#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
25#include "GStreamerMediaStreamSource.h"
26
27#include "AudioTrackPrivate.h"
28#include "GStreamerAudioData.h"
29#include "GStreamerCommon.h"
30#include "GStreamerVideoCaptureSource.h"
31#include "MediaSampleGStreamer.h"
32#include "VideoTrackPrivate.h"
33
34#include <gst/app/gstappsrc.h>
35#include <gst/base/gstflowcombiner.h>
36
37#if GST_CHECK_VERSION(1, 10, 0)
38
39namespace WebCore {
40
41static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
42static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
43static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate&);
44static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType);
45
46static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src",
47 GST_PAD_SRC,
48 GST_PAD_SOMETIMES,
49 GST_STATIC_CAPS("video/x-raw;video/x-h264;video/x-vp8"));
50
51static GstStaticPadTemplate audioSrcTemplate = GST_STATIC_PAD_TEMPLATE("audio_src",
52 GST_PAD_SRC,
53 GST_PAD_SOMETIMES,
54 GST_STATIC_CAPS("audio/x-raw(ANY);"));
55
56static GstTagList* mediaStreamTrackPrivateGetTags(MediaStreamTrackPrivate* track)
57{
58 auto taglist = gst_tag_list_new_empty();
59
60 if (!track->label().isEmpty()) {
61 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
62 GST_TAG_TITLE, track->label().utf8().data(), nullptr);
63 }
64
65 if (track->type() == RealtimeMediaSource::Type::Audio) {
66 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
67 static_cast<int>(AudioTrackPrivate::Kind::Main), nullptr);
68 } else if (track->type() == RealtimeMediaSource::Type::Video) {
69 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
70 static_cast<int>(VideoTrackPrivate::Kind::Main), nullptr);
71
72 if (track->isCaptureTrack()) {
73 GStreamerVideoCaptureSource& source = static_cast<GStreamerVideoCaptureSource&>(
74 track->source());
75
76 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
77 WEBKIT_MEDIA_TRACK_TAG_WIDTH, source.size().width(),
78 WEBKIT_MEDIA_TRACK_TAG_HEIGHT, source.size().height(), nullptr);
79 }
80 }
81
82 return taglist;
83}
84
85GstStream* webkitMediaStreamNew(MediaStreamTrackPrivate* track)
86{
87 GRefPtr<GstCaps> caps;
88 GstStreamType type;
89
90 if (track->type() == RealtimeMediaSource::Type::Audio) {
91 caps = adoptGRef(gst_static_pad_template_get_caps(&audioSrcTemplate));
92 type = GST_STREAM_TYPE_AUDIO;
93 } else if (track->type() == RealtimeMediaSource::Type::Video) {
94 caps = adoptGRef(gst_static_pad_template_get_caps(&videoSrcTemplate));
95 type = GST_STREAM_TYPE_VIDEO;
96 } else {
97 GST_FIXME("Handle %d type", static_cast<int>(track->type()));
98
99 return nullptr;
100 }
101
102 auto gststream = (GstStream*)gst_stream_new(track->id().utf8().data(),
103 caps.get(), type, GST_STREAM_FLAG_SELECT);
104 auto tags = adoptGRef(mediaStreamTrackPrivateGetTags(track));
105 gst_stream_set_tags(gststream, tags.get());
106
107 return gststream;
108}
109
110class WebKitMediaStreamTrackObserver
111 : public MediaStreamTrackPrivate::Observer {
112public:
113 virtual ~WebKitMediaStreamTrackObserver() { };
114 WebKitMediaStreamTrackObserver(WebKitMediaStreamSrc* src)
115 : m_mediaStreamSrc(src) { }
116 void trackStarted(MediaStreamTrackPrivate&) final { };
117
118 void trackEnded(MediaStreamTrackPrivate& track) final
119 {
120 webkitMediaStreamSrcTrackEnded(m_mediaStreamSrc, track);
121 }
122
123 void trackMutedChanged(MediaStreamTrackPrivate&) final { };
124 void trackSettingsChanged(MediaStreamTrackPrivate&) final { };
125 void trackEnabledChanged(MediaStreamTrackPrivate&) final { };
126 void readyStateChanged(MediaStreamTrackPrivate&) final { };
127
128 void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample& sample) final
129 {
130 auto gstsample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
131
132 webkitMediaStreamSrcPushVideoSample(m_mediaStreamSrc, gstsample);
133 }
134
135 void audioSamplesAvailable(MediaStreamTrackPrivate&, const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription&, size_t) final
136 {
137 auto audiodata = static_cast<const GStreamerAudioData&>(audioData);
138
139 webkitMediaStreamSrcPushAudioSample(m_mediaStreamSrc, audiodata.getSample());
140 }
141
142private:
143 WebKitMediaStreamSrc* m_mediaStreamSrc;
144};
145
146class WebKitMediaStreamObserver
147 : public MediaStreamPrivate::Observer {
148public:
149 virtual ~WebKitMediaStreamObserver() { };
150 WebKitMediaStreamObserver(WebKitMediaStreamSrc* src)
151 : m_mediaStreamSrc(src) { }
152
153 void characteristicsChanged() final { GST_DEBUG_OBJECT(m_mediaStreamSrc.get(), "renegotiation should happen"); }
154 void activeStatusChanged() final { }
155
156 void didAddTrack(MediaStreamTrackPrivate& track) final
157 {
158 webkitMediaStreamSrcAddTrack(m_mediaStreamSrc.get(), &track, false);
159 }
160
161 void didRemoveTrack(MediaStreamTrackPrivate& track) final
162 {
163 webkitMediaStreamSrcRemoveTrackByType(m_mediaStreamSrc.get(), track.type());
164 }
165
166private:
167 GRefPtr<WebKitMediaStreamSrc> m_mediaStreamSrc;
168};
169
170typedef struct _WebKitMediaStreamSrcClass WebKitMediaStreamSrcClass;
171struct _WebKitMediaStreamSrc {
172 GstBin parent_instance;
173
174 gchar* uri;
175
176 GstElement* audioSrc;
177 GstClockTime firstAudioBufferPts;
178 GstElement* videoSrc;
179 GstClockTime firstFramePts;
180
181 std::unique_ptr<WebKitMediaStreamTrackObserver> mediaStreamTrackObserver;
182 std::unique_ptr<WebKitMediaStreamObserver> mediaStreamObserver;
183 volatile gint npads;
184 RefPtr<MediaStreamPrivate> stream;
185 RefPtr<MediaStreamTrackPrivate> track;
186
187 GstFlowCombiner* flowCombiner;
188 GRefPtr<GstStreamCollection> streamCollection;
189};
190
191struct _WebKitMediaStreamSrcClass {
192 GstBinClass parent_class;
193};
194
195enum {
196 PROP_0,
197 PROP_IS_LIVE,
198 PROP_LAST
199};
200
201static GstURIType webkit_media_stream_src_uri_get_type(GType)
202{
203 return GST_URI_SRC;
204}
205
206static const gchar* const* webkit_media_stream_src_uri_get_protocols(GType)
207{
208 static const gchar* protocols[] = { "mediastream", nullptr };
209
210 return protocols;
211}
212
213static gchar* webkit_media_stream_src_uri_get_uri(GstURIHandler* handler)
214{
215 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
216
217 /* FIXME: make thread-safe */
218 return g_strdup(self->uri);
219}
220
221static gboolean webkitMediaStreamSrcUriSetUri(GstURIHandler* handler, const gchar* uri,
222 GError**)
223{
224 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
225 self->uri = g_strdup(uri);
226
227 return TRUE;
228}
229
230static void webkitMediaStreamSrcUriHandlerInit(gpointer g_iface, gpointer)
231{
232 GstURIHandlerInterface* iface = (GstURIHandlerInterface*)g_iface;
233
234 iface->get_type = webkit_media_stream_src_uri_get_type;
235 iface->get_protocols = webkit_media_stream_src_uri_get_protocols;
236 iface->get_uri = webkit_media_stream_src_uri_get_uri;
237 iface->set_uri = webkitMediaStreamSrcUriSetUri;
238}
239
240GST_DEBUG_CATEGORY_STATIC(webkitMediaStreamSrcDebug);
241#define GST_CAT_DEFAULT webkitMediaStreamSrcDebug
242
243#define doInit \
244 G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webkitMediaStreamSrcUriHandlerInit); \
245 GST_DEBUG_CATEGORY_INIT(webkitMediaStreamSrcDebug, "webkitwebmediastreamsrc", 0, "mediastreamsrc element"); \
246 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_WIDTH, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream width", "Webkit MediaStream width", gst_tag_merge_use_first); \
247 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_HEIGHT, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream height", "Webkit MediaStream height", gst_tag_merge_use_first); \
248 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_KIND, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream Kind", "Webkit MediaStream Kind", gst_tag_merge_use_first);
249
250G_DEFINE_TYPE_WITH_CODE(WebKitMediaStreamSrc, webkit_media_stream_src, GST_TYPE_BIN, doInit);
251
252static void webkitMediaStreamSrcSetProperty(GObject* object, guint prop_id,
253 const GValue*, GParamSpec* pspec)
254{
255 switch (prop_id) {
256 default:
257 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
258 break;
259 }
260}
261
262static void webkitMediaStreamSrcGetProperty(GObject* object, guint prop_id, GValue* value,
263 GParamSpec* pspec)
264{
265 switch (prop_id) {
266 case PROP_IS_LIVE:
267 g_value_set_boolean(value, TRUE);
268 break;
269 default:
270 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
271 break;
272 }
273}
274
275static void webkitMediaStreamSrcDispose(GObject* object)
276{
277 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
278
279 if (self->audioSrc) {
280 gst_bin_remove(GST_BIN(self), self->audioSrc);
281 self->audioSrc = nullptr;
282 }
283
284 if (self->videoSrc) {
285 gst_bin_remove(GST_BIN(self), self->videoSrc);
286 self->videoSrc = nullptr;
287 }
288}
289
290static void webkitMediaStreamSrcFinalize(GObject* object)
291{
292 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
293
294 GST_OBJECT_LOCK(self);
295 if (self->stream) {
296 for (auto& track : self->stream->tracks())
297 track->removeObserver(*self->mediaStreamTrackObserver.get());
298
299 self->stream->removeObserver(*self->mediaStreamObserver);
300 self->stream = nullptr;
301 }
302 GST_OBJECT_UNLOCK(self);
303
304 g_clear_pointer(&self->uri, g_free);
305 gst_flow_combiner_free(self->flowCombiner);
306}
307
308static GstStateChangeReturn webkitMediaStreamSrcChangeState(GstElement* element, GstStateChange transition)
309{
310 GstStateChangeReturn result;
311 auto* self = WEBKIT_MEDIA_STREAM_SRC(element);
312
313 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
314
315 GST_OBJECT_LOCK(self);
316 if (self->stream) {
317 for (auto& track : self->stream->tracks())
318 track->removeObserver(*self->mediaStreamTrackObserver.get());
319 } else if (self->track)
320 self->track->removeObserver(*self->mediaStreamTrackObserver.get());
321 GST_OBJECT_UNLOCK(self);
322 }
323
324 result = GST_ELEMENT_CLASS(webkit_media_stream_src_parent_class)->change_state(element, transition);
325
326 if (transition == GST_STATE_CHANGE_READY_TO_PAUSED)
327 result = GST_STATE_CHANGE_NO_PREROLL;
328
329 return result;
330}
331
332static void webkit_media_stream_src_class_init(WebKitMediaStreamSrcClass* klass)
333{
334 GObjectClass* gobject_class = G_OBJECT_CLASS(klass);
335 GstElementClass* gstelement_klass = GST_ELEMENT_CLASS(klass);
336
337 gobject_class->finalize = webkitMediaStreamSrcFinalize;
338 gobject_class->dispose = webkitMediaStreamSrcDispose;
339 gobject_class->get_property = webkitMediaStreamSrcGetProperty;
340 gobject_class->set_property = webkitMediaStreamSrcSetProperty;
341
342 g_object_class_install_property(gobject_class, PROP_IS_LIVE,
343 g_param_spec_boolean("is-live", "Is Live",
344 "Let playbin3 know we are a live source.",
345 TRUE, (GParamFlags)(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
346
347 gstelement_klass->change_state = webkitMediaStreamSrcChangeState;
348 gst_element_class_add_pad_template(gstelement_klass,
349 gst_static_pad_template_get(&videoSrcTemplate));
350 gst_element_class_add_pad_template(gstelement_klass,
351 gst_static_pad_template_get(&audioSrcTemplate));
352}
353
354static void webkit_media_stream_src_init(WebKitMediaStreamSrc* self)
355{
356 self->mediaStreamTrackObserver = std::make_unique<WebKitMediaStreamTrackObserver>(self);
357 self->mediaStreamObserver = std::make_unique<WebKitMediaStreamObserver>(self);
358 self->flowCombiner = gst_flow_combiner_new();
359 self->firstAudioBufferPts = GST_CLOCK_TIME_NONE;
360 self->firstFramePts = GST_CLOCK_TIME_NONE;
361}
362
363typedef struct {
364 WebKitMediaStreamSrc* self;
365 RefPtr<MediaStreamTrackPrivate> track;
366 GstStaticPadTemplate* pad_template;
367} ProbeData;
368
369static GstFlowReturn webkitMediaStreamSrcChain(GstPad* pad, GstObject* parent, GstBuffer* buffer)
370{
371 GstFlowReturn result, chain_result;
372 GRefPtr<WebKitMediaStreamSrc> self = adoptGRef(WEBKIT_MEDIA_STREAM_SRC(gst_object_get_parent(parent)));
373
374 chain_result = gst_proxy_pad_chain_default(pad, GST_OBJECT(self.get()), buffer);
375 result = gst_flow_combiner_update_pad_flow(self.get()->flowCombiner, pad, chain_result);
376
377 if (result == GST_FLOW_FLUSHING)
378 return chain_result;
379
380 return result;
381}
382
383static void webkitMediaStreamSrcAddPad(WebKitMediaStreamSrc* self, GstPad* target, GstStaticPadTemplate* pad_template)
384{
385 auto padname = makeString("src_", g_atomic_int_add(&(self->npads), 1));
386 auto ghostpad = gst_ghost_pad_new_from_template(padname.utf8().data(), target,
387 gst_static_pad_template_get(pad_template));
388
389 GST_DEBUG_OBJECT(self, "%s Ghosting %" GST_PTR_FORMAT,
390 gst_object_get_path_string(GST_OBJECT_CAST(self)),
391 target);
392
393 auto proxypad = adoptGRef(GST_PAD(gst_proxy_pad_get_internal(GST_PROXY_PAD(ghostpad))));
394 gst_pad_set_active(ghostpad, TRUE);
395 if (!gst_element_add_pad(GST_ELEMENT(self), GST_PAD(ghostpad))) {
396 GST_ERROR_OBJECT(self, "Could not add pad %s:%s", GST_DEBUG_PAD_NAME(ghostpad));
397 ASSERT_NOT_REACHED();
398
399 return;
400 }
401
402 gst_flow_combiner_add_pad(self->flowCombiner, proxypad.get());
403 gst_pad_set_chain_function(proxypad.get(),
404 static_cast<GstPadChainFunction>(webkitMediaStreamSrcChain));
405}
406
407static GstPadProbeReturn webkitMediaStreamSrcPadProbeCb(GstPad* pad, GstPadProbeInfo* info, ProbeData* data)
408{
409 GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
410 WebKitMediaStreamSrc* self = data->self;
411
412 switch (GST_EVENT_TYPE(event)) {
413 case GST_EVENT_STREAM_START: {
414 const gchar* stream_id;
415 GRefPtr<GstStream> stream = nullptr;
416
417 gst_event_parse_stream_start(event, &stream_id);
418 if (!g_strcmp0(stream_id, data->track->id().utf8().data())) {
419 GST_INFO_OBJECT(pad, "Event has been sticked already");
420 return GST_PAD_PROBE_OK;
421 }
422
423 auto stream_start = gst_event_new_stream_start(data->track->id().utf8().data());
424 gst_event_set_group_id(stream_start, 1);
425 gst_event_unref(event);
426
427 gst_pad_push_event(pad, stream_start);
428 gst_pad_push_event(pad, gst_event_new_tag(mediaStreamTrackPrivateGetTags(data->track.get())));
429
430 webkitMediaStreamSrcAddPad(self, pad, data->pad_template);
431
432 return GST_PAD_PROBE_HANDLED;
433 }
434 default:
435 break;
436 }
437
438 return GST_PAD_PROBE_OK;
439}
440
441static gboolean webkitMediaStreamSrcSetupSrc(WebKitMediaStreamSrc* self,
442 MediaStreamTrackPrivate* track, GstElement* element,
443 GstStaticPadTemplate* pad_template, gboolean observe_track,
444 bool onlyTrack)
445{
446 auto pad = adoptGRef(gst_element_get_static_pad(element, "src"));
447
448 gst_bin_add(GST_BIN(self), element);
449
450 if (!onlyTrack) {
451 ProbeData* data = new ProbeData;
452 data->self = WEBKIT_MEDIA_STREAM_SRC(self);
453 data->pad_template = pad_template;
454 data->track = track;
455
456 gst_pad_add_probe(pad.get(), (GstPadProbeType)GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
457 (GstPadProbeCallback)webkitMediaStreamSrcPadProbeCb, data,
458 [](gpointer data) {
459 delete (ProbeData*)data;
460 });
461 } else
462 webkitMediaStreamSrcAddPad(self, pad.get(), pad_template);
463
464 if (observe_track)
465 track->addObserver(*self->mediaStreamTrackObserver.get());
466
467 gst_element_sync_state_with_parent(element);
468 return TRUE;
469}
470
471static gboolean webkitMediaStreamSrcSetupAppSrc(WebKitMediaStreamSrc* self,
472 MediaStreamTrackPrivate* track, GstElement** element,
473 GstStaticPadTemplate* pad_template, bool onlyTrack)
474{
475 *element = gst_element_factory_make("appsrc", nullptr);
476 g_object_set(*element, "is-live", true, "format", GST_FORMAT_TIME, nullptr);
477
478 return webkitMediaStreamSrcSetupSrc(self, track, *element, pad_template, TRUE, onlyTrack);
479}
480
481static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
482{
483 GST_OBJECT_LOCK(self);
484 self->streamCollection = adoptGRef(gst_stream_collection_new(stream->id().utf8().data()));
485 for (auto& track : stream->tracks()) {
486 auto gststream = webkitMediaStreamNew(track.get());
487
488 gst_stream_collection_add_stream(self->streamCollection.get(), gststream);
489 }
490 GST_OBJECT_UNLOCK(self);
491
492 gst_element_post_message(GST_ELEMENT(self),
493 gst_message_new_stream_collection(GST_OBJECT(self), self->streamCollection.get()));
494}
495
496bool webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, bool onlyTrack)
497{
498 bool res = false;
499 if (track->type() == RealtimeMediaSource::Type::Audio)
500 res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->audioSrc, &audioSrcTemplate, onlyTrack);
501 else if (track->type() == RealtimeMediaSource::Type::Video)
502 res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->videoSrc, &videoSrcTemplate, onlyTrack);
503 else
504 GST_INFO("Unsupported track type: %d", static_cast<int>(track->type()));
505
506 if (onlyTrack && res)
507 self->track = track;
508
509 return false;
510}
511
512static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType)
513{
514 if (trackType == RealtimeMediaSource::Type::Audio) {
515 if (self->audioSrc) {
516 gst_element_set_state(self->audioSrc, GST_STATE_NULL);
517 gst_bin_remove(GST_BIN(self), self->audioSrc);
518 self->audioSrc = nullptr;
519 }
520 } else if (trackType == RealtimeMediaSource::Type::Video) {
521 if (self->videoSrc) {
522 gst_element_set_state(self->videoSrc, GST_STATE_NULL);
523 gst_bin_remove(GST_BIN(self), self->videoSrc);
524 self->videoSrc = nullptr;
525 }
526 } else
527 GST_INFO("Unsupported track type: %d", static_cast<int>(trackType));
528}
529
530bool webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
531{
532 ASSERT(WEBKIT_IS_MEDIA_STREAM_SRC(self));
533
534 webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Audio);
535 webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Video);
536
537 webkitMediaStreamSrcPostStreamCollection(self, stream);
538
539 self->stream = stream;
540 self->stream->addObserver(*self->mediaStreamObserver.get());
541 for (auto& track : stream->tracks())
542 webkitMediaStreamSrcAddTrack(self, track.get(), false);
543
544 return TRUE;
545}
546
547static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
548{
549 if (self->videoSrc) {
550 if (!GST_CLOCK_TIME_IS_VALID(self->firstFramePts)) {
551 auto buffer = gst_sample_get_buffer(gstsample);
552
553 self->firstFramePts = GST_BUFFER_PTS(buffer);
554 auto pad = adoptGRef(gst_element_get_static_pad(self->videoSrc, "src"));
555 gst_pad_set_offset(pad.get(), -self->firstFramePts);
556 }
557
558 gst_app_src_push_sample(GST_APP_SRC(self->videoSrc), gstsample);
559 }
560}
561
562static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
563{
564 if (self->audioSrc) {
565 if (!GST_CLOCK_TIME_IS_VALID(self->firstAudioBufferPts)) {
566 auto buffer = gst_sample_get_buffer(gstsample);
567
568 self->firstAudioBufferPts = GST_BUFFER_PTS(buffer);
569 auto pad = adoptGRef(gst_element_get_static_pad(self->audioSrc, "src"));
570 gst_pad_set_offset(pad.get(), -self->firstAudioBufferPts);
571 }
572 gst_app_src_push_sample(GST_APP_SRC(self->audioSrc), gstsample);
573 }
574}
575
576static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self,
577 MediaStreamTrackPrivate& track)
578{
579 GRefPtr<GstPad> pad = nullptr;
580
581 GST_OBJECT_LOCK(self);
582 for (auto tmp = GST_ELEMENT(self)->srcpads; tmp; tmp = tmp->next) {
583 GstPad* tmppad = GST_PAD(tmp->data);
584 const gchar* stream_id;
585
586 GstEvent* stream_start = gst_pad_get_sticky_event(tmppad, GST_EVENT_STREAM_START, 0);
587 if (!stream_start)
588 continue;
589
590 gst_event_parse_stream_start(stream_start, &stream_id);
591 if (String(stream_id) == track.id()) {
592 pad = tmppad;
593 break;
594 }
595 }
596 GST_OBJECT_UNLOCK(self);
597
598 if (!pad) {
599 GST_ERROR_OBJECT(self, "No pad found for %s", track.id().utf8().data());
600
601 return;
602 }
603
604 // Make sure that the video.videoWidth is reset to 0
605 webkitMediaStreamSrcPostStreamCollection(self, self->stream.get());
606 auto tags = mediaStreamTrackPrivateGetTags(&track);
607 gst_pad_push_event(pad.get(), gst_event_new_tag(tags));
608 gst_pad_push_event(pad.get(), gst_event_new_eos());
609}
610
611GstElement* webkitMediaStreamSrcNew(void)
612{
613 return GST_ELEMENT(g_object_new(webkit_media_stream_src_get_type(), nullptr));
614}
615
616} // WebCore
617#endif // GST_CHECK_VERSION(1, 10, 0)
618#endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC)
619