1 | /* |
2 | * Copyright (C) 2007, 2009 Apple Inc. All rights reserved. |
3 | * Copyright (C) 2007 Collabora Ltd. All rights reserved. |
4 | * Copyright (C) 2007 Alp Toker <alp@atoker.com> |
5 | * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org> |
6 | * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2016, 2017 Igalia S.L |
7 | * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com> |
8 | * Copyright (C) 2015, 2016, 2017 Metrological Group B.V. |
9 | * |
10 | * This library is free software; you can redistribute it and/or |
11 | * modify it under the terms of the GNU Library General Public |
12 | * License as published by the Free Software Foundation; either |
13 | * version 2 of the License, or (at your option) any later version. |
14 | * |
15 | * This library is distributed in the hope that it will be useful, |
16 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
17 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
18 | * Library General Public License for more details. |
19 | * |
20 | * You should have received a copy of the GNU Library General Public License |
21 | * aint with this library; see the file COPYING.LIB. If not, write to |
22 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
23 | * Boston, MA 02110-1301, USA. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "MediaPlayerPrivateGStreamerMSE.h" |
28 | |
29 | #if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE) |
30 | |
31 | #include "AppendPipeline.h" |
32 | #include "AudioTrackPrivateGStreamer.h" |
33 | #include "GStreamerCommon.h" |
34 | #include "GStreamerRegistryScannerMSE.h" |
35 | #include "InbandTextTrackPrivateGStreamer.h" |
36 | #include "MIMETypeRegistry.h" |
37 | #include "MediaDescription.h" |
38 | #include "MediaPlayer.h" |
39 | #include "NotImplemented.h" |
40 | #include "PlaybackPipeline.h" |
41 | #include "SourceBufferPrivateGStreamer.h" |
42 | #include "TimeRanges.h" |
43 | #include "VideoTrackPrivateGStreamer.h" |
44 | |
45 | #include <gst/app/gstappsink.h> |
46 | #include <gst/app/gstappsrc.h> |
47 | #include <gst/gst.h> |
48 | #include <gst/pbutils/pbutils.h> |
49 | #include <gst/video/video.h> |
50 | #include <wtf/Condition.h> |
51 | #include <wtf/HashSet.h> |
52 | #include <wtf/NeverDestroyed.h> |
53 | #include <wtf/StringPrintStream.h> |
54 | #include <wtf/URL.h> |
55 | #include <wtf/text/AtomicString.h> |
56 | #include <wtf/text/AtomicStringHash.h> |
57 | |
58 | static const char* dumpReadyState(WebCore::MediaPlayer::ReadyState readyState) |
59 | { |
60 | switch (readyState) { |
61 | case WebCore::MediaPlayer::HaveNothing: return "HaveNothing" ; |
62 | case WebCore::MediaPlayer::HaveMetadata: return "HaveMetadata" ; |
63 | case WebCore::MediaPlayer::HaveCurrentData: return "HaveCurrentData" ; |
64 | case WebCore::MediaPlayer::HaveFutureData: return "HaveFutureData" ; |
65 | case WebCore::MediaPlayer::HaveEnoughData: return "HaveEnoughData" ; |
66 | default: return "(unknown)" ; |
67 | } |
68 | } |
69 | |
70 | GST_DEBUG_CATEGORY(webkit_mse_debug); |
71 | #define GST_CAT_DEFAULT webkit_mse_debug |
72 | |
73 | namespace WebCore { |
74 | |
75 | void MediaPlayerPrivateGStreamerMSE::registerMediaEngine(MediaEngineRegistrar registrar) |
76 | { |
77 | initializeGStreamerAndRegisterWebKitElements(); |
78 | GST_DEBUG_CATEGORY_INIT(webkit_mse_debug, "webkitmse" , 0, "WebKit MSE media player" ); |
79 | if (isAvailable()) { |
80 | registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamerMSE>(player); }, |
81 | getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem); |
82 | } |
83 | } |
84 | |
85 | MediaPlayerPrivateGStreamerMSE::MediaPlayerPrivateGStreamerMSE(MediaPlayer* player) |
86 | : MediaPlayerPrivateGStreamer(player) |
87 | { |
88 | GST_TRACE("creating the player (%p)" , this); |
89 | } |
90 | |
91 | MediaPlayerPrivateGStreamerMSE::~MediaPlayerPrivateGStreamerMSE() |
92 | { |
93 | GST_TRACE("destroying the player (%p)" , this); |
94 | |
95 | // Clear the AppendPipeline map. This should cause the destruction of all the AppendPipeline's since there should |
96 | // be no alive references at this point. |
97 | #ifndef NDEBUG |
98 | for (auto iterator : m_appendPipelinesMap) |
99 | ASSERT(iterator.value->hasOneRef()); |
100 | #endif |
101 | m_appendPipelinesMap.clear(); |
102 | |
103 | if (m_source) { |
104 | webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), nullptr); |
105 | g_signal_handlers_disconnect_by_data(m_source.get(), this); |
106 | } |
107 | |
108 | if (m_playbackPipeline) |
109 | m_playbackPipeline->setWebKitMediaSrc(nullptr); |
110 | } |
111 | |
112 | void MediaPlayerPrivateGStreamerMSE::load(const String& urlString) |
113 | { |
114 | if (!urlString.startsWith("mediasource" )) { |
115 | // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate. |
116 | m_networkState = MediaPlayer::FormatError; |
117 | m_player->networkStateChanged(); |
118 | return; |
119 | } |
120 | |
121 | if (!m_playbackPipeline) |
122 | m_playbackPipeline = PlaybackPipeline::create(); |
123 | |
124 | MediaPlayerPrivateGStreamer::load(urlString); |
125 | } |
126 | |
127 | void MediaPlayerPrivateGStreamerMSE::load(const String& url, MediaSourcePrivateClient* mediaSource) |
128 | { |
129 | m_mediaSource = mediaSource; |
130 | load(makeString("mediasource" , url)); |
131 | } |
132 | |
133 | void MediaPlayerPrivateGStreamerMSE::pause() |
134 | { |
135 | m_paused = true; |
136 | MediaPlayerPrivateGStreamer::pause(); |
137 | } |
138 | |
139 | MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const |
140 | { |
141 | if (UNLIKELY(!m_pipeline || m_errorOccured)) |
142 | return MediaTime(); |
143 | |
144 | return m_mediaTimeDuration; |
145 | } |
146 | |
147 | void MediaPlayerPrivateGStreamerMSE::seek(const MediaTime& time) |
148 | { |
149 | if (UNLIKELY(!m_pipeline || m_errorOccured)) |
150 | return; |
151 | |
152 | GST_INFO("[Seek] seek attempt to %s secs" , toString(time).utf8().data()); |
153 | |
154 | // Avoid useless seeking. |
155 | MediaTime current = currentMediaTime(); |
156 | if (time == current) { |
157 | if (!m_seeking) |
158 | timeChanged(); |
159 | return; |
160 | } |
161 | |
162 | if (isLiveStream()) |
163 | return; |
164 | |
165 | if (m_seeking && m_seekIsPending) { |
166 | m_seekTime = time; |
167 | return; |
168 | } |
169 | |
170 | GST_DEBUG("Seeking from %s to %s seconds" , toString(current).utf8().data(), toString(time).utf8().data()); |
171 | |
172 | MediaTime previousSeekTime = m_seekTime; |
173 | m_seekTime = time; |
174 | |
175 | if (!doSeek()) { |
176 | m_seekTime = previousSeekTime; |
177 | GST_WARNING("Seeking to %s failed" , toString(time).utf8().data()); |
178 | return; |
179 | } |
180 | |
181 | m_isEndReached = false; |
182 | GST_DEBUG("m_seeking=%s, m_seekTime=%s" , boolForPrinting(m_seeking), toString(m_seekTime).utf8().data()); |
183 | } |
184 | |
185 | void MediaPlayerPrivateGStreamerMSE::configurePlaySink() |
186 | { |
187 | MediaPlayerPrivateGStreamer::configurePlaySink(); |
188 | |
189 | GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_pipeline.get()), "playsink" )); |
190 | if (playsink) { |
191 | // The default value (0) means "send events to all the sinks", instead |
192 | // of "only to the first that returns true". This is needed for MSE seek. |
193 | g_object_set(G_OBJECT(playsink.get()), "send-event-mode" , 0, nullptr); |
194 | } |
195 | } |
196 | |
197 | bool MediaPlayerPrivateGStreamerMSE::changePipelineState(GstState newState) |
198 | { |
199 | if (seeking()) { |
200 | GST_DEBUG("Rejected state change to %s while seeking" , |
201 | gst_element_state_get_name(newState)); |
202 | return true; |
203 | } |
204 | |
205 | return MediaPlayerPrivateGStreamer::changePipelineState(newState); |
206 | } |
207 | |
208 | void MediaPlayerPrivateGStreamerMSE::notifySeekNeedsDataForTime(const MediaTime& seekTime) |
209 | { |
210 | // Reenqueue samples needed to resume playback in the new position. |
211 | m_mediaSource->seekToTime(seekTime); |
212 | |
213 | GST_DEBUG("MSE seek to %s finished" , toString(seekTime).utf8().data()); |
214 | |
215 | if (!m_gstSeekCompleted) { |
216 | m_gstSeekCompleted = true; |
217 | maybeFinishSeek(); |
218 | } |
219 | } |
220 | |
221 | bool MediaPlayerPrivateGStreamerMSE::doSeek(const MediaTime&, float, GstSeekFlags) |
222 | { |
223 | // Use doSeek() instead. If anybody is calling this version of doSeek(), something is wrong. |
224 | ASSERT_NOT_REACHED(); |
225 | return false; |
226 | } |
227 | |
228 | bool MediaPlayerPrivateGStreamerMSE::doSeek() |
229 | { |
230 | MediaTime seekTime = m_seekTime; |
231 | double rate = m_player->rate(); |
232 | GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE); |
233 | |
234 | // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete. |
235 | m_seeking = true; |
236 | |
237 | // Check if playback pipeline is ready for seek. |
238 | GstState state, newState; |
239 | GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0); |
240 | if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) { |
241 | GST_DEBUG("[Seek] cannot seek, current state change is %s" , gst_element_state_change_return_get_name(getStateResult)); |
242 | webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true); |
243 | m_seeking = false; |
244 | return false; |
245 | } |
246 | if ((getStateResult == GST_STATE_CHANGE_ASYNC |
247 | && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) |
248 | || state < GST_STATE_PAUSED |
249 | || m_isEndReached |
250 | || !m_gstSeekCompleted) { |
251 | CString reason = "Unknown reason" ; |
252 | if (getStateResult == GST_STATE_CHANGE_ASYNC) { |
253 | reason = makeString("In async change " , |
254 | gst_element_state_get_name(state), " --> " , |
255 | gst_element_state_get_name(newState)).utf8(); |
256 | } else if (state < GST_STATE_PAUSED) |
257 | reason = "State less than PAUSED" ; |
258 | else if (m_isEndReached) |
259 | reason = "End reached" ; |
260 | else if (!m_gstSeekCompleted) |
261 | reason = "Previous seek is not finished yet" ; |
262 | |
263 | GST_DEBUG("[Seek] Delaying the seek: %s" , reason.data()); |
264 | |
265 | m_seekIsPending = true; |
266 | |
267 | if (m_isEndReached) { |
268 | GST_DEBUG("[Seek] reset pipeline" ); |
269 | m_resetPipeline = true; |
270 | m_seeking = false; |
271 | if (!changePipelineState(GST_STATE_PAUSED)) |
272 | loadingFailed(MediaPlayer::Empty); |
273 | else |
274 | m_seeking = true; |
275 | } |
276 | |
277 | return m_seeking; |
278 | } |
279 | |
280 | // Stop accepting new samples until actual seek is finished. |
281 | webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), false); |
282 | |
283 | // Correct seek time if it helps to fix a small gap. |
284 | if (!isTimeBuffered(seekTime)) { |
285 | // Look if a near future time (<0.1 sec.) is buffered and change the seek target time. |
286 | if (m_mediaSource) { |
287 | const MediaTime miniGap = MediaTime(1, 10); |
288 | MediaTime nearest = m_mediaSource->buffered()->nearest(seekTime); |
289 | if (nearest.isValid() && nearest > seekTime && (nearest - seekTime) <= miniGap && isTimeBuffered(nearest + miniGap)) { |
290 | GST_DEBUG("[Seek] Changed the seek target time from %s to %s, a near point in the future" , toString(seekTime).utf8().data(), toString(nearest).utf8().data()); |
291 | seekTime = nearest; |
292 | } |
293 | } |
294 | } |
295 | |
296 | // Check if MSE has samples for requested time and defer actual seek if needed. |
297 | if (!isTimeBuffered(seekTime)) { |
298 | GST_DEBUG("[Seek] Delaying the seek: MSE is not ready" ); |
299 | GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED); |
300 | if (setStateResult == GST_STATE_CHANGE_FAILURE) { |
301 | GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline." ); |
302 | webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true); |
303 | m_seeking = false; |
304 | return false; |
305 | } |
306 | m_readyState = MediaPlayer::HaveMetadata; |
307 | notifySeekNeedsDataForTime(seekTime); |
308 | ASSERT(!m_mseSeekCompleted); |
309 | return true; |
310 | } |
311 | |
312 | // Complete previous MSE seek if needed. |
313 | if (!m_mseSeekCompleted) { |
314 | m_mediaSource->monitorSourceBuffers(); |
315 | ASSERT(m_mseSeekCompleted); |
316 | // Note: seekCompleted will recursively call us. |
317 | return m_seeking; |
318 | } |
319 | |
320 | GST_DEBUG("We can seek now" ); |
321 | |
322 | MediaTime startTime = seekTime, endTime = MediaTime::invalidTime(); |
323 | |
324 | if (rate < 0) { |
325 | startTime = MediaTime::zeroTime(); |
326 | endTime = seekTime; |
327 | } |
328 | |
329 | if (!rate) |
330 | rate = 1; |
331 | |
332 | GST_DEBUG("Actual seek to %s, end time: %s, rate: %f" , toString(startTime).utf8().data(), toString(endTime).utf8().data(), rate); |
333 | |
334 | // This will call notifySeekNeedsData() after some time to tell that the pipeline is ready for sample enqueuing. |
335 | webKitMediaSrcPrepareSeek(WEBKIT_MEDIA_SRC(m_source.get()), seekTime); |
336 | |
337 | m_gstSeekCompleted = false; |
338 | if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime))) { |
339 | webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true); |
340 | m_seeking = false; |
341 | m_gstSeekCompleted = true; |
342 | GST_DEBUG("doSeek(): gst_element_seek() failed, returning false" ); |
343 | return false; |
344 | } |
345 | |
346 | // The samples will be enqueued in notifySeekNeedsData(). |
347 | GST_DEBUG("doSeek(): gst_element_seek() succeeded, returning true" ); |
348 | return true; |
349 | } |
350 | |
351 | void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek() |
352 | { |
353 | if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted) |
354 | return; |
355 | |
356 | GstState state, newState; |
357 | GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0); |
358 | |
359 | if (getStateResult == GST_STATE_CHANGE_ASYNC |
360 | && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) { |
361 | GST_DEBUG("[Seek] Delaying seek finish" ); |
362 | return; |
363 | } |
364 | |
365 | if (m_seekIsPending) { |
366 | GST_DEBUG("[Seek] Committing pending seek to %s" , toString(m_seekTime).utf8().data()); |
367 | m_seekIsPending = false; |
368 | if (!doSeek()) { |
369 | GST_WARNING("[Seek] Seeking to %s failed" , toString(m_seekTime).utf8().data()); |
370 | m_cachedPosition = MediaTime::invalidTime(); |
371 | } |
372 | return; |
373 | } |
374 | |
375 | GST_DEBUG("[Seek] Seeked to %s" , toString(m_seekTime).utf8().data()); |
376 | |
377 | webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true); |
378 | m_seeking = false; |
379 | m_cachedPosition = MediaTime::invalidTime(); |
380 | // The pipeline can still have a pending state. In this case a position query will fail. |
381 | // Right now we can use m_seekTime as a fallback. |
382 | m_canFallBackToLastFinishedSeekPosition = true; |
383 | timeChanged(); |
384 | } |
385 | |
386 | void MediaPlayerPrivateGStreamerMSE::updatePlaybackRate() |
387 | { |
388 | notImplemented(); |
389 | } |
390 | |
391 | bool MediaPlayerPrivateGStreamerMSE::seeking() const |
392 | { |
393 | return m_seeking; |
394 | } |
395 | |
396 | // FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually. |
397 | void MediaPlayerPrivateGStreamerMSE::setReadyState(MediaPlayer::ReadyState readyState) |
398 | { |
399 | if (readyState == m_readyState) |
400 | return; |
401 | |
402 | if (seeking()) { |
403 | GST_DEBUG("Skip ready state change(%s -> %s) due to seek\n" , dumpReadyState(m_readyState), dumpReadyState(readyState)); |
404 | return; |
405 | } |
406 | |
407 | GST_DEBUG("Ready State Changed manually from %u to %u" , m_readyState, readyState); |
408 | MediaPlayer::ReadyState oldReadyState = m_readyState; |
409 | m_readyState = readyState; |
410 | GST_DEBUG("m_readyState: %s -> %s" , dumpReadyState(oldReadyState), dumpReadyState(m_readyState)); |
411 | |
412 | if (oldReadyState < MediaPlayer::HaveCurrentData && m_readyState >= MediaPlayer::HaveCurrentData) { |
413 | GST_DEBUG("[Seek] Reporting load state changed to trigger seek continuation" ); |
414 | loadStateChanged(); |
415 | } |
416 | m_player->readyStateChanged(); |
417 | |
418 | GstState pipelineState; |
419 | GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &pipelineState, nullptr, 250 * GST_NSECOND); |
420 | bool isPlaying = (getStateResult == GST_STATE_CHANGE_SUCCESS && pipelineState == GST_STATE_PLAYING); |
421 | |
422 | if (m_readyState == MediaPlayer::HaveMetadata && oldReadyState > MediaPlayer::HaveMetadata && isPlaying) { |
423 | GST_TRACE("Changing pipeline to PAUSED..." ); |
424 | bool ok = changePipelineState(GST_STATE_PAUSED); |
425 | GST_TRACE("Changed pipeline to PAUSED: %s" , ok ? "Success" : "Error" ); |
426 | } |
427 | } |
428 | |
429 | void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted() |
430 | { |
431 | if (!m_seeking) |
432 | return; |
433 | |
434 | GST_DEBUG("Waiting for MSE seek completed" ); |
435 | m_mseSeekCompleted = false; |
436 | } |
437 | |
438 | void MediaPlayerPrivateGStreamerMSE::seekCompleted() |
439 | { |
440 | if (m_mseSeekCompleted) |
441 | return; |
442 | |
443 | GST_DEBUG("MSE seek completed" ); |
444 | m_mseSeekCompleted = true; |
445 | |
446 | doSeek(); |
447 | |
448 | if (!seeking() && m_readyState >= MediaPlayer::HaveFutureData) |
449 | changePipelineState(GST_STATE_PLAYING); |
450 | |
451 | if (!seeking()) |
452 | m_player->timeChanged(); |
453 | } |
454 | |
455 | void MediaPlayerPrivateGStreamerMSE::setRate(float) |
456 | { |
457 | notImplemented(); |
458 | } |
459 | |
460 | std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamerMSE::buffered() const |
461 | { |
462 | return m_mediaSource ? m_mediaSource->buffered() : std::make_unique<PlatformTimeRanges>(); |
463 | } |
464 | |
465 | void MediaPlayerPrivateGStreamerMSE::sourceSetup(GstElement* sourceElement) |
466 | { |
467 | m_source = sourceElement; |
468 | |
469 | ASSERT(WEBKIT_IS_MEDIA_SRC(m_source.get())); |
470 | |
471 | m_playbackPipeline->setWebKitMediaSrc(WEBKIT_MEDIA_SRC(m_source.get())); |
472 | |
473 | MediaSourceGStreamer::open(*m_mediaSource.get(), *this); |
474 | g_signal_connect_swapped(m_source.get(), "video-changed" , G_CALLBACK(videoChangedCallback), this); |
475 | g_signal_connect_swapped(m_source.get(), "audio-changed" , G_CALLBACK(audioChangedCallback), this); |
476 | g_signal_connect_swapped(m_source.get(), "text-changed" , G_CALLBACK(textChangedCallback), this); |
477 | webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), this); |
478 | } |
479 | |
480 | void MediaPlayerPrivateGStreamerMSE::updateStates() |
481 | { |
482 | if (UNLIKELY(!m_pipeline || m_errorOccured)) |
483 | return; |
484 | |
485 | MediaPlayer::NetworkState oldNetworkState = m_networkState; |
486 | MediaPlayer::ReadyState oldReadyState = m_readyState; |
487 | GstState state, pending; |
488 | |
489 | GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND); |
490 | |
491 | bool shouldUpdatePlaybackState = false; |
492 | switch (getStateResult) { |
493 | case GST_STATE_CHANGE_SUCCESS: { |
494 | GST_DEBUG("State: %s, pending: %s" , gst_element_state_get_name(state), gst_element_state_get_name(pending)); |
495 | |
496 | // Do nothing if on EOS and state changed to READY to avoid recreating the player |
497 | // on HTMLMediaElement and properly generate the video 'ended' event. |
498 | if (m_isEndReached && state == GST_STATE_READY) |
499 | break; |
500 | |
501 | m_resetPipeline = (state <= GST_STATE_READY); |
502 | if (m_resetPipeline) |
503 | m_mediaTimeDuration = MediaTime::zeroTime(); |
504 | |
505 | // Update ready and network states. |
506 | switch (state) { |
507 | case GST_STATE_NULL: |
508 | m_readyState = MediaPlayer::HaveNothing; |
509 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
510 | m_networkState = MediaPlayer::Empty; |
511 | break; |
512 | case GST_STATE_READY: |
513 | m_readyState = MediaPlayer::HaveMetadata; |
514 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
515 | m_networkState = MediaPlayer::Empty; |
516 | break; |
517 | case GST_STATE_PAUSED: |
518 | case GST_STATE_PLAYING: |
519 | if (seeking()) { |
520 | m_readyState = MediaPlayer::HaveMetadata; |
521 | // FIXME: Should we manage NetworkState too? |
522 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
523 | } else if (m_buffering) { |
524 | if (m_bufferingPercentage == 100) { |
525 | GST_DEBUG("[Buffering] Complete." ); |
526 | m_buffering = false; |
527 | m_readyState = MediaPlayer::HaveEnoughData; |
528 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
529 | m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading; |
530 | } else { |
531 | m_readyState = MediaPlayer::HaveCurrentData; |
532 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
533 | m_networkState = MediaPlayer::Loading; |
534 | } |
535 | } else if (m_downloadFinished) { |
536 | m_readyState = MediaPlayer::HaveEnoughData; |
537 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
538 | m_networkState = MediaPlayer::Loaded; |
539 | } else { |
540 | m_readyState = MediaPlayer::HaveFutureData; |
541 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
542 | m_networkState = MediaPlayer::Loading; |
543 | } |
544 | |
545 | if (m_eosMarked && state == GST_STATE_PLAYING) |
546 | m_eosPending = true; |
547 | |
548 | break; |
549 | default: |
550 | ASSERT_NOT_REACHED(); |
551 | break; |
552 | } |
553 | |
554 | // Sync states where needed. |
555 | if (state == GST_STATE_PAUSED) { |
556 | if (!m_volumeAndMuteInitialized) { |
557 | notifyPlayerOfVolumeChange(); |
558 | notifyPlayerOfMute(); |
559 | m_volumeAndMuteInitialized = true; |
560 | } |
561 | |
562 | if (!seeking() && !m_buffering && !m_paused && m_playbackRate) { |
563 | GST_DEBUG("[Buffering] Restarting playback." ); |
564 | changePipelineState(GST_STATE_PLAYING); |
565 | } |
566 | } else if (state == GST_STATE_PLAYING) { |
567 | m_paused = false; |
568 | |
569 | if ((m_buffering && !isLiveStream()) || !m_playbackRate) { |
570 | GST_DEBUG("[Buffering] Pausing stream for buffering." ); |
571 | changePipelineState(GST_STATE_PAUSED); |
572 | } |
573 | } else |
574 | m_paused = true; |
575 | |
576 | if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) { |
577 | shouldUpdatePlaybackState = true; |
578 | GST_DEBUG("Requested state change to %s was completed" , gst_element_state_get_name(state)); |
579 | } |
580 | |
581 | break; |
582 | } |
583 | case GST_STATE_CHANGE_ASYNC: |
584 | GST_DEBUG("Async: State: %s, pending: %s" , gst_element_state_get_name(state), gst_element_state_get_name(pending)); |
585 | // Change in progress. |
586 | break; |
587 | case GST_STATE_CHANGE_FAILURE: |
588 | GST_WARNING("Failure: State: %s, pending: %s" , gst_element_state_get_name(state), gst_element_state_get_name(pending)); |
589 | // Change failed. |
590 | return; |
591 | case GST_STATE_CHANGE_NO_PREROLL: |
592 | GST_DEBUG("No preroll: State: %s, pending: %s" , gst_element_state_get_name(state), gst_element_state_get_name(pending)); |
593 | |
594 | // Live pipelines go in PAUSED without prerolling. |
595 | m_isStreaming = true; |
596 | |
597 | if (state == GST_STATE_READY) { |
598 | m_readyState = MediaPlayer::HaveNothing; |
599 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
600 | } else if (state == GST_STATE_PAUSED) { |
601 | m_readyState = MediaPlayer::HaveEnoughData; |
602 | GST_DEBUG("m_readyState=%s" , dumpReadyState(m_readyState)); |
603 | m_paused = true; |
604 | } else if (state == GST_STATE_PLAYING) |
605 | m_paused = false; |
606 | |
607 | if (!m_paused && m_playbackRate) |
608 | changePipelineState(GST_STATE_PLAYING); |
609 | |
610 | m_networkState = MediaPlayer::Loading; |
611 | break; |
612 | default: |
613 | GST_DEBUG("Else : %d" , getStateResult); |
614 | break; |
615 | } |
616 | |
617 | m_requestedState = GST_STATE_VOID_PENDING; |
618 | |
619 | if (shouldUpdatePlaybackState) |
620 | m_player->playbackStateChanged(); |
621 | |
622 | if (m_networkState != oldNetworkState) { |
623 | GST_DEBUG("Network State Changed from %u to %u" , oldNetworkState, m_networkState); |
624 | m_player->networkStateChanged(); |
625 | } |
626 | if (m_readyState != oldReadyState) { |
627 | GST_DEBUG("Ready State Changed from %u to %u" , oldReadyState, m_readyState); |
628 | m_player->readyStateChanged(); |
629 | } |
630 | |
631 | if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) { |
632 | updatePlaybackRate(); |
633 | maybeFinishSeek(); |
634 | } |
635 | } |
636 | void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone() |
637 | { |
638 | if (UNLIKELY(!m_pipeline || m_errorOccured)) |
639 | return; |
640 | |
641 | if (m_seeking) |
642 | maybeFinishSeek(); |
643 | else |
644 | updateStates(); |
645 | } |
646 | |
647 | bool MediaPlayerPrivateGStreamerMSE::isTimeBuffered(const MediaTime &time) const |
648 | { |
649 | bool result = m_mediaSource && m_mediaSource->buffered()->contain(time); |
650 | GST_DEBUG("Time %s buffered? %s" , toString(time).utf8().data(), boolForPrinting(result)); |
651 | return result; |
652 | } |
653 | |
654 | void MediaPlayerPrivateGStreamerMSE::setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE> client) |
655 | { |
656 | m_mediaSourceClient = client.ptr(); |
657 | } |
658 | |
659 | RefPtr<MediaSourceClientGStreamerMSE> MediaPlayerPrivateGStreamerMSE::mediaSourceClient() |
660 | { |
661 | return m_mediaSourceClient; |
662 | } |
663 | |
664 | void MediaPlayerPrivateGStreamerMSE::blockDurationChanges() |
665 | { |
666 | ASSERT(isMainThread()); |
667 | m_areDurationChangesBlocked = true; |
668 | m_shouldReportDurationWhenUnblocking = false; |
669 | } |
670 | |
671 | void MediaPlayerPrivateGStreamerMSE::unblockDurationChanges() |
672 | { |
673 | ASSERT(isMainThread()); |
674 | if (m_shouldReportDurationWhenUnblocking) { |
675 | m_player->durationChanged(); |
676 | m_playbackPipeline->notifyDurationChanged(); |
677 | m_shouldReportDurationWhenUnblocking = false; |
678 | } |
679 | |
680 | m_areDurationChangesBlocked = false; |
681 | } |
682 | |
683 | void MediaPlayerPrivateGStreamerMSE::durationChanged() |
684 | { |
685 | ASSERT(isMainThread()); |
686 | if (!m_mediaSourceClient) { |
687 | GST_DEBUG("m_mediaSourceClient is null, doing nothing" ); |
688 | return; |
689 | } |
690 | |
691 | MediaTime previousDuration = m_mediaTimeDuration; |
692 | m_mediaTimeDuration = m_mediaSourceClient->duration(); |
693 | |
694 | GST_TRACE("previous=%s, new=%s" , toString(previousDuration).utf8().data(), toString(m_mediaTimeDuration).utf8().data()); |
695 | |
696 | // Avoid emiting durationchanged in the case where the previous duration was 0 because that case is already handled |
697 | // by the HTMLMediaElement. |
698 | if (m_mediaTimeDuration != previousDuration && m_mediaTimeDuration.isValid() && previousDuration.isValid()) { |
699 | if (!m_areDurationChangesBlocked) { |
700 | m_player->durationChanged(); |
701 | m_playbackPipeline->notifyDurationChanged(); |
702 | } else |
703 | m_shouldReportDurationWhenUnblocking = true; |
704 | m_mediaSource->durationChanged(m_mediaTimeDuration); |
705 | } |
706 | } |
707 | |
708 | void MediaPlayerPrivateGStreamerMSE::trackDetected(RefPtr<AppendPipeline> appendPipeline, RefPtr<WebCore::TrackPrivateBase> newTrack, bool firstTrackDetected) |
709 | { |
710 | ASSERT(appendPipeline->track() == newTrack); |
711 | |
712 | GstCaps* caps = appendPipeline->appsinkCaps(); |
713 | ASSERT(caps); |
714 | GST_DEBUG("track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps); |
715 | |
716 | if (doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) { |
717 | Optional<FloatSize> size = getVideoResolutionFromCaps(caps); |
718 | if (size.hasValue()) |
719 | m_videoSize = size.value(); |
720 | } |
721 | |
722 | if (firstTrackDetected) |
723 | m_playbackPipeline->attachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps); |
724 | else |
725 | m_playbackPipeline->reattachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps); |
726 | } |
727 | |
728 | void MediaPlayerPrivateGStreamerMSE::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types) |
729 | { |
730 | auto& gstRegistryScanner = GStreamerRegistryScannerMSE::singleton(); |
731 | types = gstRegistryScanner.mimeTypeSet(); |
732 | } |
733 | |
734 | MediaPlayer::SupportsType MediaPlayerPrivateGStreamerMSE::supportsType(const MediaEngineSupportParameters& parameters) |
735 | { |
736 | MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported; |
737 | if (!parameters.isMediaSource) |
738 | return result; |
739 | |
740 | auto containerType = parameters.type.containerType(); |
741 | |
742 | // YouTube TV provides empty types for some videos and we want to be selected as best media engine for them. |
743 | if (containerType.isEmpty()) { |
744 | result = MediaPlayer::MayBeSupported; |
745 | GST_DEBUG("mime-type \"%s\" supported: %s" , parameters.type.raw().utf8().data(), convertEnumerationToString(result).utf8().data()); |
746 | return result; |
747 | } |
748 | |
749 | GST_DEBUG("Checking mime-type \"%s\"" , parameters.type.raw().utf8().data()); |
750 | auto& gstRegistryScanner = GStreamerRegistryScannerMSE::singleton(); |
751 | // Spec says we should not return "probably" if the codecs string is empty. |
752 | if (gstRegistryScanner.isContainerTypeSupported(containerType)) { |
753 | Vector<String> codecs = parameters.type.codecs(); |
754 | result = codecs.isEmpty() ? MediaPlayer::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported); |
755 | } |
756 | |
757 | auto finalResult = extendedSupportsType(parameters, result); |
758 | GST_DEBUG("Supported: %s" , convertEnumerationToString(finalResult).utf8().data()); |
759 | return finalResult; |
760 | } |
761 | |
762 | void MediaPlayerPrivateGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status) |
763 | { |
764 | if (status != MediaSourcePrivate::EosNoError) |
765 | return; |
766 | |
767 | GST_DEBUG("Marking end of stream" ); |
768 | m_eosMarked = true; |
769 | updateStates(); |
770 | } |
771 | |
772 | MediaTime MediaPlayerPrivateGStreamerMSE::currentMediaTime() const |
773 | { |
774 | MediaTime position = MediaPlayerPrivateGStreamer::currentMediaTime(); |
775 | |
776 | if (m_eosPending && (paused() || (position >= durationMediaTime()))) { |
777 | if (m_networkState != MediaPlayer::Loaded) { |
778 | m_networkState = MediaPlayer::Loaded; |
779 | m_player->networkStateChanged(); |
780 | } |
781 | |
782 | m_eosPending = false; |
783 | m_isEndReached = true; |
784 | m_cachedPosition = m_mediaTimeDuration; |
785 | m_player->timeChanged(); |
786 | } |
787 | return position; |
788 | } |
789 | |
790 | MediaTime MediaPlayerPrivateGStreamerMSE::maxMediaTimeSeekable() const |
791 | { |
792 | if (UNLIKELY(m_errorOccured)) |
793 | return MediaTime::zeroTime(); |
794 | |
795 | GST_DEBUG("maxMediaTimeSeekable" ); |
796 | MediaTime result = durationMediaTime(); |
797 | // Infinite duration means live stream. |
798 | if (result.isPositiveInfinite()) { |
799 | MediaTime maxBufferedTime = buffered()->maximumBufferedTime(); |
800 | // Return the highest end time reported by the buffered attribute. |
801 | result = maxBufferedTime.isValid() ? maxBufferedTime : MediaTime::zeroTime(); |
802 | } |
803 | |
804 | return result; |
805 | } |
806 | |
807 | } // namespace WebCore. |
808 | |
809 | #endif // USE(GSTREAMER) |
810 | |