1 | /* |
2 | * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com> |
3 | * Copyright (C) 2016 Metrological Group B.V. |
4 | * Copyright (C) 2016 Igalia S.L |
5 | * |
6 | * This library is free software; you can redistribute it and/or |
7 | * modify it under the terms of the GNU Library General Public |
8 | * License as published by the Free Software Foundation; either |
9 | * version 2 of the License, or (at your option) any later version. |
10 | * |
11 | * This library is distributed in the hope that it will be useful, |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
14 | * Library General Public License for more details. |
15 | * |
16 | * You should have received a copy of the GNU Library General Public License |
17 | * aint with this library; see the file COPYING.LIB. If not, write to |
18 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
19 | * Boston, MA 02110-1301, USA. |
20 | */ |
21 | |
22 | #include "config.h" |
23 | #include "PlaybackPipeline.h" |
24 | |
25 | #if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE) |
26 | |
27 | #include "AudioTrackPrivateGStreamer.h" |
28 | #include "GStreamerCommon.h" |
29 | #include "MediaSampleGStreamer.h" |
30 | #include "MediaSample.h" |
31 | #include "SourceBufferPrivateGStreamer.h" |
32 | #include "VideoTrackPrivateGStreamer.h" |
33 | |
34 | #include <gst/app/gstappsrc.h> |
35 | #include <gst/gst.h> |
36 | #include <wtf/MainThread.h> |
37 | #include <wtf/RefCounted.h> |
38 | #include <wtf/glib/GRefPtr.h> |
39 | #include <wtf/glib/GUniquePtr.h> |
40 | #include <wtf/text/AtomicString.h> |
41 | |
42 | GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug); |
43 | #define GST_CAT_DEFAULT webkit_mse_debug |
44 | |
45 | static Stream* getStreamByTrackId(WebKitMediaSrc*, AtomicString); |
46 | static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc*, WebCore::SourceBufferPrivateGStreamer*); |
47 | |
48 | static Stream* getStreamByTrackId(WebKitMediaSrc* source, AtomicString trackIdString) |
49 | { |
50 | // WebKitMediaSrc should be locked at this point. |
51 | for (Stream* stream : source->priv->streams) { |
52 | if (stream->type != WebCore::Invalid |
53 | && ((stream->audioTrack && stream->audioTrack->id() == trackIdString) |
54 | || (stream->videoTrack && stream->videoTrack->id() == trackIdString) ) ) { |
55 | return stream; |
56 | } |
57 | } |
58 | return nullptr; |
59 | } |
60 | |
61 | static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc* source, WebCore::SourceBufferPrivateGStreamer* sourceBufferPrivate) |
62 | { |
63 | for (Stream* stream : source->priv->streams) { |
64 | if (stream->sourceBuffer == sourceBufferPrivate) |
65 | return stream; |
66 | } |
67 | return nullptr; |
68 | } |
69 | |
70 | // FIXME: Use gst_app_src_push_sample() instead when we switch to the appropriate GStreamer version. |
71 | static GstFlowReturn pushSample(GstAppSrc* appsrc, GstSample* sample) |
72 | { |
73 | g_return_val_if_fail(GST_IS_SAMPLE(sample), GST_FLOW_ERROR); |
74 | |
75 | GstCaps* caps = gst_sample_get_caps(sample); |
76 | if (caps) |
77 | gst_app_src_set_caps(appsrc, caps); |
78 | else |
79 | GST_WARNING_OBJECT(appsrc, "received sample without caps" ); |
80 | |
81 | GstBuffer* buffer = gst_sample_get_buffer(sample); |
82 | if (UNLIKELY(!buffer)) { |
83 | GST_WARNING_OBJECT(appsrc, "received sample without buffer" ); |
84 | return GST_FLOW_OK; |
85 | } |
86 | |
87 | // gst_app_src_push_buffer() steals the reference, we need an additional one. |
88 | return gst_app_src_push_buffer(appsrc, gst_buffer_ref(buffer)); |
89 | } |
90 | |
91 | namespace WebCore { |
92 | |
93 | void PlaybackPipeline::setWebKitMediaSrc(WebKitMediaSrc* webKitMediaSrc) |
94 | { |
95 | GST_DEBUG("webKitMediaSrc=%p" , webKitMediaSrc); |
96 | m_webKitMediaSrc = webKitMediaSrc; |
97 | } |
98 | |
99 | WebKitMediaSrc* PlaybackPipeline::webKitMediaSrc() |
100 | { |
101 | return m_webKitMediaSrc.get(); |
102 | } |
103 | |
104 | MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate) |
105 | { |
106 | WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv; |
107 | |
108 | if (priv->allTracksConfigured) { |
109 | GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet" ); |
110 | return MediaSourcePrivate::NotSupported; |
111 | } |
112 | |
113 | GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d" , int(GST_STATE(m_webKitMediaSrc.get()))); |
114 | |
115 | Stream* stream = new Stream{ }; |
116 | stream->parent = m_webKitMediaSrc.get(); |
117 | stream->appsrc = gst_element_factory_make("appsrc" , nullptr); |
118 | stream->appsrcNeedDataFlag = false; |
119 | stream->sourceBuffer = sourceBufferPrivate.get(); |
120 | |
121 | // No track has been attached yet. |
122 | stream->type = Invalid; |
123 | stream->caps = nullptr; |
124 | stream->audioTrack = nullptr; |
125 | stream->videoTrack = nullptr; |
126 | stream->presentationSize = WebCore::FloatSize(); |
127 | stream->lastEnqueuedTime = MediaTime::invalidTime(); |
128 | |
129 | gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &enabledAppsrcCallbacks, stream->parent, nullptr); |
130 | gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE); |
131 | gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE); |
132 | |
133 | gst_app_src_set_max_bytes(GST_APP_SRC(stream->appsrc), 2 * WTF::MB); |
134 | g_object_set(G_OBJECT(stream->appsrc), "block" , FALSE, "min-percent" , 20, "format" , GST_FORMAT_TIME, nullptr); |
135 | |
136 | GST_OBJECT_LOCK(m_webKitMediaSrc.get()); |
137 | priv->streams.append(stream); |
138 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
139 | |
140 | gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc); |
141 | gst_element_sync_state_with_parent(stream->appsrc); |
142 | |
143 | return MediaSourcePrivate::Ok; |
144 | } |
145 | |
146 | void PlaybackPipeline::removeSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate) |
147 | { |
148 | ASSERT(WTF::isMainThread()); |
149 | |
150 | GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Element removed from MediaSource" ); |
151 | GST_OBJECT_LOCK(m_webKitMediaSrc.get()); |
152 | WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv; |
153 | Stream* stream = getStreamBySourceBufferPrivate(m_webKitMediaSrc.get(), sourceBufferPrivate.get()); |
154 | if (stream) |
155 | priv->streams.removeFirst(stream); |
156 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
157 | |
158 | if (stream) |
159 | webKitMediaSrcFreeStream(m_webKitMediaSrc.get(), stream); |
160 | } |
161 | |
162 | void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) |
163 | { |
164 | WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); |
165 | |
166 | GST_OBJECT_LOCK(webKitMediaSrc); |
167 | Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); |
168 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
169 | |
170 | ASSERT(stream); |
171 | |
172 | GST_OBJECT_LOCK(webKitMediaSrc); |
173 | unsigned padId = stream->parent->priv->numberOfPads; |
174 | stream->parent->priv->numberOfPads++; |
175 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
176 | |
177 | const char* mediaType = capsMediaType(caps); |
178 | GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s" , trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType); |
179 | |
180 | GST_OBJECT_LOCK(webKitMediaSrc); |
181 | stream->type = Unknown; |
182 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
183 | |
184 | GRefPtr<GstPad> sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src" )); |
185 | ASSERT(sourcePad); |
186 | |
187 | // FIXME: Is padId the best way to identify the Stream? What about trackId? |
188 | g_object_set_data(G_OBJECT(sourcePad.get()), "padId" , GINT_TO_POINTER(padId)); |
189 | webKitMediaSrcLinkSourcePad(sourcePad.get(), caps, stream); |
190 | |
191 | ASSERT(stream->parent->priv->mediaPlayerPrivate); |
192 | int signal = -1; |
193 | |
194 | GST_OBJECT_LOCK(webKitMediaSrc); |
195 | if (doCapsHaveType(caps, GST_AUDIO_CAPS_TYPE_PREFIX)) { |
196 | stream->type = Audio; |
197 | stream->parent->priv->numberOfAudioStreams++; |
198 | signal = SIGNAL_AUDIO_CHANGED; |
199 | stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); |
200 | } else if (doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) { |
201 | stream->type = Video; |
202 | stream->parent->priv->numberOfVideoStreams++; |
203 | signal = SIGNAL_VIDEO_CHANGED; |
204 | stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); |
205 | } else if (doCapsHaveType(caps, GST_TEXT_CAPS_TYPE_PREFIX)) { |
206 | stream->type = Text; |
207 | stream->parent->priv->numberOfTextStreams++; |
208 | signal = SIGNAL_TEXT_CHANGED; |
209 | |
210 | // FIXME: Support text tracks. |
211 | } |
212 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
213 | |
214 | if (signal != -1) |
215 | g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr); |
216 | } |
217 | |
218 | void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) |
219 | { |
220 | GST_DEBUG("Re-attaching track" ); |
221 | |
222 | // FIXME: Maybe remove this method. Now the caps change is managed by gst_appsrc_push_sample() in enqueueSample() |
223 | // and flushAndEnqueueNonDisplayingSamples(). |
224 | |
225 | WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); |
226 | |
227 | GST_OBJECT_LOCK(webKitMediaSrc); |
228 | Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); |
229 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
230 | |
231 | ASSERT(stream && stream->type != Invalid); |
232 | |
233 | int signal = -1; |
234 | |
235 | GST_OBJECT_LOCK(webKitMediaSrc); |
236 | if (doCapsHaveType(caps, GST_AUDIO_CAPS_TYPE_PREFIX)) { |
237 | ASSERT(stream->type == Audio); |
238 | signal = SIGNAL_AUDIO_CHANGED; |
239 | stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); |
240 | } else if (doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) { |
241 | ASSERT(stream->type == Video); |
242 | signal = SIGNAL_VIDEO_CHANGED; |
243 | stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); |
244 | } else if (doCapsHaveType(caps, GST_TEXT_CAPS_TYPE_PREFIX)) { |
245 | ASSERT(stream->type == Text); |
246 | signal = SIGNAL_TEXT_CHANGED; |
247 | |
248 | // FIXME: Support text tracks. |
249 | } |
250 | GST_OBJECT_UNLOCK(webKitMediaSrc); |
251 | |
252 | if (signal != -1) |
253 | g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr); |
254 | } |
255 | |
256 | void PlaybackPipeline::notifyDurationChanged() |
257 | { |
258 | gst_element_post_message(GST_ELEMENT(m_webKitMediaSrc.get()), gst_message_new_duration_changed(GST_OBJECT(m_webKitMediaSrc.get()))); |
259 | // WebKitMediaSrc will ask MediaPlayerPrivateGStreamerMSE for the new duration later, when somebody asks for it. |
260 | } |
261 | |
262 | void PlaybackPipeline::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus) |
263 | { |
264 | WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv; |
265 | |
266 | GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Have EOS" ); |
267 | |
268 | GST_OBJECT_LOCK(m_webKitMediaSrc.get()); |
269 | bool allTracksConfigured = priv->allTracksConfigured; |
270 | if (!allTracksConfigured) |
271 | priv->allTracksConfigured = true; |
272 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
273 | |
274 | if (!allTracksConfigured) { |
275 | gst_element_no_more_pads(GST_ELEMENT(m_webKitMediaSrc.get())); |
276 | webKitMediaSrcDoAsyncDone(m_webKitMediaSrc.get()); |
277 | } |
278 | |
279 | Vector<GstAppSrc*> appsrcs; |
280 | |
281 | GST_OBJECT_LOCK(m_webKitMediaSrc.get()); |
282 | for (Stream* stream : priv->streams) { |
283 | if (stream->appsrc) |
284 | appsrcs.append(GST_APP_SRC(stream->appsrc)); |
285 | } |
286 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
287 | |
288 | for (GstAppSrc* appsrc : appsrcs) |
289 | gst_app_src_end_of_stream(appsrc); |
290 | } |
291 | |
292 | void PlaybackPipeline::flush(AtomicString trackId) |
293 | { |
294 | ASSERT(WTF::isMainThread()); |
295 | |
296 | GST_DEBUG("flush: trackId=%s" , trackId.string().utf8().data()); |
297 | |
298 | GST_OBJECT_LOCK(m_webKitMediaSrc.get()); |
299 | Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId); |
300 | |
301 | if (!stream) { |
302 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
303 | return; |
304 | } |
305 | |
306 | stream->lastEnqueuedTime = MediaTime::invalidTime(); |
307 | GstElement* appsrc = stream->appsrc; |
308 | GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); |
309 | |
310 | if (!appsrc) |
311 | return; |
312 | |
313 | gint64 position = GST_CLOCK_TIME_NONE; |
314 | GRefPtr<GstQuery> query = adoptGRef(gst_query_new_position(GST_FORMAT_TIME)); |
315 | if (gst_element_query(pipeline(), query.get())) |
316 | gst_query_parse_position(query.get(), 0, &position); |
317 | |
318 | GST_TRACE("Position: %" GST_TIME_FORMAT, GST_TIME_ARGS(position)); |
319 | |
320 | if (static_cast<guint64>(position) == GST_CLOCK_TIME_NONE) { |
321 | GST_DEBUG("Can't determine position, avoiding flush" ); |
322 | return; |
323 | } |
324 | |
325 | if (!gst_element_send_event(GST_ELEMENT(appsrc), gst_event_new_flush_start())) { |
326 | GST_WARNING("Failed to send flush-start event for trackId=%s" , trackId.string().utf8().data()); |
327 | } |
328 | |
329 | if (!gst_element_send_event(GST_ELEMENT(appsrc), gst_event_new_flush_stop(false))) { |
330 | GST_WARNING("Failed to send flush-stop event for trackId=%s" , trackId.string().utf8().data()); |
331 | } |
332 | |
333 | GST_DEBUG("trackId=%s flushed" , trackId.string().utf8().data()); |
334 | } |
335 | |
336 | void PlaybackPipeline::enqueueSample(Ref<MediaSample>&& mediaSample) |
337 | { |
338 | ASSERT(WTF::isMainThread()); |
339 | |
340 | AtomicString trackId = mediaSample->trackID(); |
341 | |
342 | GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT, |
343 | trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(), |
344 | mediaSample->presentationSize().width(), mediaSample->presentationSize().height(), |
345 | GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime())), |
346 | GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration()))); |
347 | |
348 | // No need to lock to access the Stream here because the only chance of conflict with this read and with the usage |
349 | // of the sample fields done in this method would be the deletion of the stream. However, that operation can only |
350 | // happen in the main thread, but we're already there. Therefore there's no conflict and locking would only cause |
351 | // a performance penalty on the readers working in other threads. |
352 | Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId); |
353 | |
354 | if (!stream) { |
355 | GST_WARNING("No stream!" ); |
356 | return; |
357 | } |
358 | |
359 | if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) { |
360 | GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet" , trackId.string().utf8().data()); |
361 | return; |
362 | } |
363 | |
364 | // This field doesn't change after creation, no need to lock. |
365 | GstElement* appsrc = stream->appsrc; |
366 | |
367 | // Only modified by the main thread, no need to lock. |
368 | MediaTime lastEnqueuedTime = stream->lastEnqueuedTime; |
369 | |
370 | ASSERT(mediaSample->platformSample().type == PlatformSample::GStreamerSampleType); |
371 | GRefPtr<GstSample> gstSample = mediaSample->platformSample().sample.gstSample; |
372 | if (gstSample && gst_sample_get_buffer(gstSample.get())) { |
373 | GstBuffer* buffer = gst_sample_get_buffer(gstSample.get()); |
374 | lastEnqueuedTime = mediaSample->presentationTime(); |
375 | |
376 | GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY); |
377 | pushSample(GST_APP_SRC(appsrc), gstSample.get()); |
378 | // gst_app_src_push_sample() uses transfer-none for gstSample. |
379 | |
380 | stream->lastEnqueuedTime = lastEnqueuedTime; |
381 | } |
382 | } |
383 | |
384 | void PlaybackPipeline::allSamplesInTrackEnqueued(const AtomicString& trackId) |
385 | { |
386 | Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId); |
387 | gst_app_src_end_of_stream(GST_APP_SRC(stream->appsrc)); |
388 | } |
389 | |
390 | GstElement* PlaybackPipeline::pipeline() |
391 | { |
392 | if (!m_webKitMediaSrc || !GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get()))) |
393 | return nullptr; |
394 | |
395 | return GST_ELEMENT_PARENT(GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get()))); |
396 | } |
397 | |
398 | } // namespace WebCore. |
399 | |
400 | #endif // USE(GSTREAMER) |
401 | |