1 | /* |
2 | * Copyright (C) 2007, 2009 Apple Inc. All rights reserved. |
3 | * Copyright (C) 2007 Collabora Ltd. All rights reserved. |
4 | * Copyright (C) 2007 Alp Toker <alp@atoker.com> |
5 | * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org> |
6 | * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L |
7 | * Copyright (C) 2015, 2016 Metrological Group B.V. |
8 | * |
9 | * This library is free software; you can redistribute it and/or |
10 | * modify it under the terms of the GNU Library General Public |
11 | * License as published by the Free Software Foundation; either |
12 | * version 2 of the License, or (at your option) any later version. |
13 | * |
14 | * This library is distributed in the hope that it will be useful, |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | * Library General Public License for more details. |
18 | * |
19 | * You should have received a copy of the GNU Library General Public License |
20 | * aint with this library; see the file COPYING.LIB. If not, write to |
21 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
22 | * Boston, MA 02110-1301, USA. |
23 | */ |
24 | |
25 | #include "config.h" |
26 | #include "MediaPlayerPrivateGStreamerBase.h" |
27 | |
28 | #if ENABLE(VIDEO) && USE(GSTREAMER) |
29 | |
30 | #include "GStreamerCommon.h" |
31 | #include "GraphicsContext.h" |
32 | #include "ImageGStreamer.h" |
33 | #include "ImageOrientation.h" |
34 | #include "IntRect.h" |
35 | #include "Logging.h" |
36 | #include "MediaPlayer.h" |
37 | #include "NotImplemented.h" |
38 | #include "VideoSinkGStreamer.h" |
39 | #include "WebKitWebSourceGStreamer.h" |
40 | #include <wtf/glib/GUniquePtr.h> |
41 | #include <wtf/text/AtomicString.h> |
42 | #include <wtf/text/CString.h> |
43 | #include <wtf/MathExtras.h> |
44 | #include <wtf/StringPrintStream.h> |
45 | |
46 | #include <gst/audio/streamvolume.h> |
47 | #include <gst/video/gstvideometa.h> |
48 | |
49 | #if ENABLE(ENCRYPTED_MEDIA) |
50 | #include "CDMInstance.h" |
51 | #include "GStreamerEMEUtilities.h" |
52 | #include "SharedBuffer.h" |
53 | #include "WebKitCommonEncryptionDecryptorGStreamer.h" |
54 | #endif |
55 | |
56 | #if USE(GSTREAMER_GL) |
57 | #if G_BYTE_ORDER == G_LITTLE_ENDIAN |
58 | #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }" |
59 | #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA |
60 | #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA |
61 | #else |
62 | #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }" |
63 | #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA |
64 | #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA |
65 | #endif |
66 | |
67 | #include <gst/app/gstappsink.h> |
68 | |
69 | #if USE(LIBEPOXY) |
70 | // Include the <epoxy/gl.h> header before <gst/gl/gl.h>. |
71 | #include <epoxy/gl.h> |
72 | |
73 | // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639> |
74 | #if !GST_CHECK_VERSION(1, 14, 0) |
75 | #include <gst/gl/gstglconfig.h> |
76 | #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX |
77 | #define __gl2_h_ |
78 | #undef GST_GL_HAVE_GLSYNC |
79 | #define GST_GL_HAVE_GLSYNC 1 |
80 | #endif |
81 | #endif // !GST_CHECK_VERSION(1, 14, 0) |
82 | #endif // USE(LIBEPOXY) |
83 | |
84 | #define GST_USE_UNSTABLE_API |
85 | #include <gst/gl/gl.h> |
86 | #undef GST_USE_UNSTABLE_API |
87 | |
88 | #include "GLContext.h" |
89 | #if USE(GLX) |
90 | #include "GLContextGLX.h" |
91 | #include <gst/gl/x11/gstgldisplay_x11.h> |
92 | #endif |
93 | |
94 | #if USE(EGL) |
95 | #include "GLContextEGL.h" |
96 | #include <gst/gl/egl/gstgldisplay_egl.h> |
97 | #endif |
98 | |
99 | #if PLATFORM(X11) |
100 | #include "PlatformDisplayX11.h" |
101 | #endif |
102 | |
103 | #if PLATFORM(WAYLAND) |
104 | #include "PlatformDisplayWayland.h" |
105 | #elif PLATFORM(WPE) |
106 | #include "PlatformDisplayLibWPE.h" |
107 | #endif |
108 | |
109 | // gstglapi.h may include eglplatform.h and it includes X.h, which |
110 | // defines None, breaking MediaPlayer::None enum |
111 | #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL |
112 | #undef None |
113 | #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL |
114 | #include "VideoTextureCopierGStreamer.h" |
115 | #endif // USE(GSTREAMER_GL) |
116 | |
117 | #if USE(TEXTURE_MAPPER_GL) |
118 | #include "BitmapTextureGL.h" |
119 | #include "BitmapTexturePool.h" |
120 | #include "GraphicsContext3D.h" |
121 | #include "TextureMapperContextAttributes.h" |
122 | #include "TextureMapperPlatformLayerBuffer.h" |
123 | #include "TextureMapperPlatformLayerProxy.h" |
124 | #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) |
125 | #include <cairo-gl.h> |
126 | #endif |
127 | #endif // USE(TEXTURE_MAPPER_GL) |
128 | |
129 | GST_DEBUG_CATEGORY(webkit_media_player_debug); |
130 | #define GST_CAT_DEFAULT webkit_media_player_debug |
131 | |
132 | |
133 | namespace WebCore { |
134 | using namespace std; |
135 | |
136 | #if USE(GSTREAMER_HOLEPUNCH) |
137 | static const FloatSize s_holePunchDefaultFrameSize(1280, 720); |
138 | #endif |
139 | |
140 | static int greatestCommonDivisor(int a, int b) |
141 | { |
142 | while (b) { |
143 | int temp = a; |
144 | a = b; |
145 | b = temp % b; |
146 | } |
147 | |
148 | return ABS(a); |
149 | } |
150 | |
151 | #if USE(TEXTURE_MAPPER_GL) |
152 | class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder { |
153 | public: |
154 | explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled) |
155 | { |
156 | GstVideoInfo videoInfo; |
157 | if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo))) |
158 | return; |
159 | |
160 | m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); |
161 | m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo); |
162 | m_buffer = gst_sample_get_buffer(sample); |
163 | if (UNLIKELY(!GST_IS_BUFFER(m_buffer))) |
164 | return; |
165 | |
166 | #if USE(GSTREAMER_GL) |
167 | m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0); |
168 | |
169 | if (gstGLEnabled) { |
170 | m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)); |
171 | if (m_isMapped) |
172 | m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]); |
173 | } else |
174 | #else |
175 | UNUSED_PARAM(flags); |
176 | UNUSED_PARAM(gstGLEnabled); |
177 | #endif // USE(GSTREAMER_GL) |
178 | |
179 | { |
180 | m_textureID = 0; |
181 | m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ); |
182 | if (m_isMapped) { |
183 | // Right now the TextureMapper only supports chromas with one plane |
184 | ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); |
185 | } |
186 | } |
187 | } |
188 | |
189 | virtual ~GstVideoFrameHolder() |
190 | { |
191 | if (UNLIKELY(!m_isMapped)) |
192 | return; |
193 | |
194 | gst_video_frame_unmap(&m_videoFrame); |
195 | } |
196 | |
197 | const IntSize& size() const { return m_size; } |
198 | bool hasAlphaChannel() const { return m_hasAlphaChannel; } |
199 | TextureMapperGL::Flags flags() const { return m_flags; } |
200 | GLuint textureID() const { return m_textureID; } |
201 | |
202 | void updateTexture(BitmapTextureGL& texture) |
203 | { |
204 | ASSERT(!m_textureID); |
205 | GstVideoGLTextureUploadMeta* meta; |
206 | if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { |
207 | if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. |
208 | guint ids[4] = { texture.id(), 0, 0, 0 }; |
209 | |
210 | if (gst_video_gl_texture_upload_meta_upload(meta, ids)) |
211 | return; |
212 | } |
213 | } |
214 | |
215 | int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0); |
216 | const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0); |
217 | texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride); |
218 | } |
219 | |
220 | private: |
221 | GstBuffer* m_buffer; |
222 | GstVideoFrame m_videoFrame { }; |
223 | IntSize m_size; |
224 | bool m_hasAlphaChannel; |
225 | TextureMapperGL::Flags m_flags { }; |
226 | GLuint m_textureID { 0 }; |
227 | bool m_isMapped { false }; |
228 | }; |
229 | #endif |
230 | |
231 | void MediaPlayerPrivateGStreamerBase::initializeDebugCategory() |
232 | { |
233 | GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer" , 0, "WebKit media player" ); |
234 | } |
235 | |
236 | MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player) |
237 | : m_notifier(MainThreadNotifier<MainThreadNotification>::create()) |
238 | , m_player(player) |
239 | , m_fpsSink(nullptr) |
240 | , m_readyState(MediaPlayer::HaveNothing) |
241 | , m_networkState(MediaPlayer::Empty) |
242 | , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint) |
243 | #if USE(TEXTURE_MAPPER_GL) |
244 | #if USE(NICOSIA) |
245 | , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this))) |
246 | #else |
247 | , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy())) |
248 | #endif |
249 | #endif |
250 | { |
251 | } |
252 | |
253 | MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase() |
254 | { |
255 | #if USE(GSTREAMER_GL) |
256 | if (m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux) |
257 | flushCurrentBuffer(); |
258 | #endif |
259 | #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA) |
260 | downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient(); |
261 | #endif |
262 | |
263 | #if ENABLE(ENCRYPTED_MEDIA) |
264 | m_protectionCondition.notifyAll(); |
265 | #endif |
266 | m_notifier->invalidate(); |
267 | |
268 | if (m_videoSink) { |
269 | g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this); |
270 | #if USE(GSTREAMER_GL) |
271 | if (GST_IS_BIN(m_videoSink.get())) { |
272 | GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink" )); |
273 | g_signal_handlers_disconnect_by_data(appsink.get(), this); |
274 | } |
275 | #endif |
276 | } |
277 | |
278 | if (m_volumeElement) |
279 | g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this); |
280 | |
281 | // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call |
282 | // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition. |
283 | cancelRepaint(true); |
284 | |
285 | // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry |
286 | // about handlers running in the GStreamer thread. |
287 | if (m_pipeline) |
288 | gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); |
289 | |
290 | m_player = nullptr; |
291 | } |
292 | |
293 | void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline) |
294 | { |
295 | m_pipeline = pipeline; |
296 | |
297 | GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); |
298 | gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) { |
299 | auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData); |
300 | |
301 | if (player.handleSyncMessage(message)) { |
302 | gst_message_unref(message); |
303 | return GST_BUS_DROP; |
304 | } |
305 | |
306 | return GST_BUS_PASS; |
307 | }, this, nullptr); |
308 | } |
309 | |
310 | bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message) |
311 | { |
312 | UNUSED_PARAM(message); |
313 | if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT) |
314 | return false; |
315 | |
316 | const gchar* contextType; |
317 | gst_message_parse_context_type(message, &contextType); |
318 | GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s" , contextType, GST_MESSAGE_SRC_NAME(message)); |
319 | |
320 | if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) { |
321 | GRefPtr<GstContext> context = adoptGRef(gst_context_new(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME, FALSE)); |
322 | GstStructure* contextStructure = gst_context_writable_structure(context.get()); |
323 | |
324 | ASSERT(m_player); |
325 | gst_structure_set(contextStructure, "player" , G_TYPE_POINTER, m_player, nullptr); |
326 | gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get()); |
327 | return true; |
328 | } |
329 | |
330 | #if USE(GSTREAMER_GL) |
331 | GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType)); |
332 | if (elementContext) { |
333 | gst_element_set_context(GST_ELEMENT(message->src), elementContext.get()); |
334 | return true; |
335 | } |
336 | #endif // USE(GSTREAMER_GL) |
337 | |
338 | #if ENABLE(ENCRYPTED_MEDIA) |
339 | if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id" )) { |
340 | if (isMainThread()) { |
341 | GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread" ); |
342 | ASSERT_NOT_REACHED(); |
343 | return false; |
344 | } |
345 | GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message" ); |
346 | LockHolder lock(m_protectionMutex); |
347 | ProtectionSystemEvents protectionSystemEvents(message); |
348 | GST_TRACE("found %lu protection events, %lu decryptors available" , protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size()); |
349 | InitData initData; |
350 | |
351 | for (auto& event : protectionSystemEvents.events()) { |
352 | const char* eventKeySystemId = nullptr; |
353 | GstBuffer* data = nullptr; |
354 | gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr); |
355 | |
356 | initData.append({eventKeySystemId, data}); |
357 | m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get())); |
358 | } |
359 | |
360 | initializationDataEncountered(WTFMove(initData)); |
361 | |
362 | GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance" ); |
363 | m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] { |
364 | return this->m_cdmInstance; |
365 | }); |
366 | |
367 | if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) { |
368 | const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem()); |
369 | GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s" , m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message)); |
370 | |
371 | GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id" , FALSE)); |
372 | GstStructure* contextStructure = gst_context_writable_structure(context.get()); |
373 | gst_structure_set(contextStructure, "decryption-system-id" , G_TYPE_STRING, preferredKeySystemUuid, nullptr); |
374 | gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get()); |
375 | } else |
376 | GST_WARNING("CDM instance not initializaed" ); |
377 | |
378 | return true; |
379 | } |
380 | #endif // ENABLE(ENCRYPTED_MEDIA) |
381 | |
382 | return false; |
383 | } |
384 | |
385 | #if USE(GSTREAMER_GL) |
386 | GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType) |
387 | { |
388 | if (!ensureGstGLContext()) |
389 | return nullptr; |
390 | |
391 | if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) { |
392 | GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); |
393 | gst_context_set_gl_display(displayContext, gstGLDisplay()); |
394 | return displayContext; |
395 | } |
396 | |
397 | if (!g_strcmp0(contextType, "gst.gl.app_context" )) { |
398 | GstContext* appContext = gst_context_new("gst.gl.app_context" , TRUE); |
399 | GstStructure* structure = gst_context_writable_structure(appContext); |
400 | #if GST_CHECK_VERSION(1, 11, 0) |
401 | gst_structure_set(structure, "context" , GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr); |
402 | #else |
403 | gst_structure_set(structure, "context" , GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr); |
404 | #endif |
405 | return appContext; |
406 | } |
407 | |
408 | return nullptr; |
409 | } |
410 | |
411 | bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext() |
412 | { |
413 | if (m_glContext) |
414 | return true; |
415 | |
416 | auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing(); |
417 | |
418 | // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062. |
419 | bool shouldAdoptRef = webkitGstCheckVersion(1, 14, 0); |
420 | if (!m_glDisplay) { |
421 | #if PLATFORM(X11) |
422 | #if USE(GLX) |
423 | if (is<PlatformDisplayX11>(sharedDisplay)) { |
424 | GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display" ); |
425 | if (shouldAdoptRef) |
426 | m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()))); |
427 | else |
428 | m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())); |
429 | } |
430 | #elif USE(EGL) |
431 | if (is<PlatformDisplayX11>(sharedDisplay)) { |
432 | GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display" ); |
433 | if (shouldAdoptRef) |
434 | m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()))); |
435 | else |
436 | m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())); |
437 | } |
438 | #endif |
439 | #endif |
440 | |
441 | #if PLATFORM(WAYLAND) |
442 | if (is<PlatformDisplayWayland>(sharedDisplay)) { |
443 | GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display" ); |
444 | if (shouldAdoptRef) |
445 | m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()))); |
446 | else |
447 | m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())); |
448 | } |
449 | #endif |
450 | |
451 | #if PLATFORM(WPE) |
452 | ASSERT(is<PlatformDisplayLibWPE>(sharedDisplay)); |
453 | GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display" ); |
454 | if (shouldAdoptRef) |
455 | m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()))); |
456 | else |
457 | m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())); |
458 | #endif |
459 | |
460 | ASSERT(m_glDisplay); |
461 | } |
462 | |
463 | GLContext* webkitContext = sharedDisplay.sharingGLContext(); |
464 | // EGL and GLX are mutually exclusive, no need for ifdefs here. |
465 | GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX; |
466 | |
467 | #if USE(OPENGL_ES) |
468 | GstGLAPI glAPI = GST_GL_API_GLES2; |
469 | #elif USE(OPENGL) |
470 | GstGLAPI glAPI = GST_GL_API_OPENGL; |
471 | #else |
472 | ASSERT_NOT_REACHED(); |
473 | #endif |
474 | |
475 | PlatformGraphicsContext3D contextHandle = webkitContext->platformContext(); |
476 | if (!contextHandle) |
477 | return false; |
478 | |
479 | if (shouldAdoptRef) |
480 | m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI)); |
481 | else |
482 | m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI); |
483 | |
484 | return true; |
485 | } |
486 | #endif // USE(GSTREAMER_GL) |
487 | |
488 | // Returns the size of the video |
489 | FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const |
490 | { |
491 | #if USE(GSTREAMER_HOLEPUNCH) |
492 | // When using the holepuch we may not be able to get the video frames size, so we can't use |
493 | // it. But we need to report some non empty naturalSize for the player's GraphicsLayer |
494 | // to be properly created. |
495 | return s_holePunchDefaultFrameSize; |
496 | #endif |
497 | |
498 | if (!hasVideo()) |
499 | return FloatSize(); |
500 | |
501 | if (!m_videoSize.isEmpty()) |
502 | return m_videoSize; |
503 | |
504 | auto sampleLocker = holdLock(m_sampleMutex); |
505 | if (!GST_IS_SAMPLE(m_sample.get())) |
506 | return FloatSize(); |
507 | |
508 | GstCaps* caps = gst_sample_get_caps(m_sample.get()); |
509 | if (!caps) |
510 | return FloatSize(); |
511 | |
512 | |
513 | // TODO: handle possible clean aperture data. See |
514 | // https://bugzilla.gnome.org/show_bug.cgi?id=596571 |
515 | // TODO: handle possible transformation matrix. See |
516 | // https://bugzilla.gnome.org/show_bug.cgi?id=596326 |
517 | |
518 | // Get the video PAR and original size, if this fails the |
519 | // video-sink has likely not yet negotiated its caps. |
520 | int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; |
521 | IntSize originalSize; |
522 | GstVideoFormat format; |
523 | if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) |
524 | return FloatSize(); |
525 | |
526 | #if USE(TEXTURE_MAPPER_GL) |
527 | // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height. |
528 | if (m_renderingCanBeAccelerated) { |
529 | if (m_videoSourceOrientation.usesWidthAsHeight()) |
530 | originalSize = originalSize.transposedSize(); |
531 | } |
532 | #endif |
533 | |
534 | GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d" , originalSize.width(), originalSize.height()); |
535 | GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d" , pixelAspectRatioNumerator, pixelAspectRatioDenominator); |
536 | |
537 | // Calculate DAR based on PAR and video size. |
538 | int displayWidth = originalSize.width() * pixelAspectRatioNumerator; |
539 | int displayHeight = originalSize.height() * pixelAspectRatioDenominator; |
540 | |
541 | // Divide display width and height by their GCD to avoid possible overflows. |
542 | int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight); |
543 | displayWidth /= displayAspectRatioGCD; |
544 | displayHeight /= displayAspectRatioGCD; |
545 | |
546 | // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function. |
547 | guint64 width = 0, height = 0; |
548 | if (!(originalSize.height() % displayHeight)) { |
549 | GST_DEBUG_OBJECT(pipeline(), "Keeping video original height" ); |
550 | width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); |
551 | height = static_cast<guint64>(originalSize.height()); |
552 | } else if (!(originalSize.width() % displayWidth)) { |
553 | GST_DEBUG_OBJECT(pipeline(), "Keeping video original width" ); |
554 | height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth); |
555 | width = static_cast<guint64>(originalSize.width()); |
556 | } else { |
557 | GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height" ); |
558 | width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); |
559 | height = static_cast<guint64>(originalSize.height()); |
560 | } |
561 | |
562 | GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); |
563 | m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height)); |
564 | return m_videoSize; |
565 | } |
566 | |
567 | void MediaPlayerPrivateGStreamerBase::setVolume(float volume) |
568 | { |
569 | if (!m_volumeElement) |
570 | return; |
571 | |
572 | GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f" , volume); |
573 | gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume)); |
574 | } |
575 | |
576 | float MediaPlayerPrivateGStreamerBase::volume() const |
577 | { |
578 | if (!m_volumeElement) |
579 | return 0; |
580 | |
581 | return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC); |
582 | } |
583 | |
584 | |
585 | void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange() |
586 | { |
587 | if (!m_player || !m_volumeElement) |
588 | return; |
589 | double volume; |
590 | volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC); |
591 | // get_volume() can return values superior to 1.0 if the user |
592 | // applies software user gain via third party application (GNOME |
593 | // volume control for instance). |
594 | volume = CLAMP(volume, 0.0, 1.0); |
595 | m_player->volumeChanged(static_cast<float>(volume)); |
596 | } |
597 | |
598 | void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player) |
599 | { |
600 | // This is called when m_volumeElement receives the notify::volume signal. |
601 | GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f" , player->volume()); |
602 | |
603 | player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { |
604 | player->notifyPlayerOfVolumeChange(); |
605 | }); |
606 | } |
607 | |
608 | MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const |
609 | { |
610 | return m_networkState; |
611 | } |
612 | |
613 | MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const |
614 | { |
615 | return m_readyState; |
616 | } |
617 | |
618 | void MediaPlayerPrivateGStreamerBase::sizeChanged() |
619 | { |
620 | notImplemented(); |
621 | } |
622 | |
623 | void MediaPlayerPrivateGStreamerBase::setMuted(bool mute) |
624 | { |
625 | if (!m_volumeElement) |
626 | return; |
627 | |
628 | bool currentValue = muted(); |
629 | if (currentValue == mute) |
630 | return; |
631 | |
632 | GST_INFO_OBJECT(pipeline(), "Set muted to %s" , toString(mute).utf8().data()); |
633 | g_object_set(m_volumeElement.get(), "mute" , mute, nullptr); |
634 | } |
635 | |
636 | bool MediaPlayerPrivateGStreamerBase::muted() const |
637 | { |
638 | if (!m_volumeElement) |
639 | return false; |
640 | |
641 | gboolean muted; |
642 | g_object_get(m_volumeElement.get(), "mute" , &muted, nullptr); |
643 | GST_INFO_OBJECT(pipeline(), "Player is muted: %s" , toString(static_cast<bool>(muted)).utf8().data()); |
644 | return muted; |
645 | } |
646 | |
647 | void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute() |
648 | { |
649 | if (!m_player || !m_volumeElement) |
650 | return; |
651 | |
652 | gboolean muted; |
653 | g_object_get(m_volumeElement.get(), "mute" , &muted, nullptr); |
654 | m_player->muteChanged(static_cast<bool>(muted)); |
655 | } |
656 | |
657 | void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player) |
658 | { |
659 | // This is called when m_volumeElement receives the notify::mute signal. |
660 | player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { |
661 | player->notifyPlayerOfMute(); |
662 | }); |
663 | } |
664 | |
665 | void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged() |
666 | { |
667 | m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled(); |
668 | } |
669 | |
670 | #if USE(TEXTURE_MAPPER_GL) |
671 | PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const |
672 | { |
673 | #if USE(NICOSIA) |
674 | return m_nicosiaLayer.ptr(); |
675 | #else |
676 | return const_cast<MediaPlayerPrivateGStreamerBase*>(this); |
677 | #endif |
678 | } |
679 | |
680 | #if USE(NICOSIA) |
681 | void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded() |
682 | { |
683 | #if USE(GSTREAMER_HOLEPUNCH) |
684 | pushNextHolePunchBuffer(); |
685 | #endif |
686 | } |
687 | #else |
688 | RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const |
689 | { |
690 | return m_platformLayerProxy.copyRef(); |
691 | } |
692 | |
693 | void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded() |
694 | { |
695 | #if USE(GSTREAMER_HOLEPUNCH) |
696 | pushNextHolePunchBuffer(); |
697 | #endif |
698 | } |
699 | #endif |
700 | |
701 | void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor() |
702 | { |
703 | auto sampleLocker = holdLock(m_sampleMutex); |
704 | if (!GST_IS_SAMPLE(m_sample.get())) |
705 | return; |
706 | |
707 | auto proxyOperation = |
708 | [this](TextureMapperPlatformLayerProxy& proxy) |
709 | { |
710 | LockHolder holder(proxy.lock()); |
711 | |
712 | if (!proxy.isActive()) |
713 | return; |
714 | |
715 | std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, !m_usingFallbackVideoSink); |
716 | |
717 | GLuint textureID = frameHolder->textureID(); |
718 | std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer; |
719 | if (textureID) { |
720 | layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA); |
721 | layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder)); |
722 | } else { |
723 | layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE); |
724 | if (UNLIKELY(!layerBuffer)) { |
725 | auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get()); |
726 | texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); |
727 | layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture)); |
728 | } |
729 | frameHolder->updateTexture(layerBuffer->textureGL()); |
730 | layerBuffer->setExtraFlags(m_textureMapperFlags | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0)); |
731 | } |
732 | proxy.pushNextBuffer(WTFMove(layerBuffer)); |
733 | }; |
734 | |
735 | #if USE(NICOSIA) |
736 | proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()); |
737 | #else |
738 | proxyOperation(*m_platformLayerProxy); |
739 | #endif |
740 | } |
741 | #endif // USE(TEXTURE_MAPPER_GL) |
742 | |
743 | void MediaPlayerPrivateGStreamerBase::repaint() |
744 | { |
745 | ASSERT(m_sample); |
746 | ASSERT(isMainThread()); |
747 | |
748 | m_player->repaint(); |
749 | |
750 | LockHolder lock(m_drawMutex); |
751 | m_drawCondition.notifyOne(); |
752 | } |
753 | |
754 | void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample) |
755 | { |
756 | bool triggerResize; |
757 | { |
758 | auto sampleLocker = holdLock(m_sampleMutex); |
759 | triggerResize = !m_sample; |
760 | m_sample = sample; |
761 | } |
762 | |
763 | if (triggerResize) { |
764 | GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update" ); |
765 | m_notifier->notify(MainThreadNotification::SizeChanged, [this] { |
766 | m_player->sizeChanged(); |
767 | }); |
768 | } |
769 | |
770 | if (!m_renderingCanBeAccelerated) { |
771 | LockHolder locker(m_drawMutex); |
772 | if (m_destroying) |
773 | return; |
774 | m_drawTimer.startOneShot(0_s); |
775 | m_drawCondition.wait(m_drawMutex); |
776 | return; |
777 | } |
778 | |
779 | #if USE(TEXTURE_MAPPER_GL) |
780 | if (m_usingFallbackVideoSink) { |
781 | LockHolder lock(m_drawMutex); |
782 | auto proxyOperation = |
783 | [this](TextureMapperPlatformLayerProxy& proxy) |
784 | { |
785 | return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }); |
786 | }; |
787 | #if USE(NICOSIA) |
788 | if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy())) |
789 | return; |
790 | #else |
791 | if (!proxyOperation(*m_platformLayerProxy)) |
792 | return; |
793 | #endif |
794 | m_drawTimer.startOneShot(0_s); |
795 | m_drawCondition.wait(m_drawMutex); |
796 | } else |
797 | pushTextureToCompositor(); |
798 | #endif // USE(TEXTURE_MAPPER_GL) |
799 | } |
800 | |
801 | void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample) |
802 | { |
803 | player->triggerRepaint(sample); |
804 | } |
805 | |
806 | void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying) |
807 | { |
808 | // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case, |
809 | // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003): |
810 | // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the |
811 | // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor |
812 | // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread. |
813 | // |
814 | // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from |
815 | // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition. |
816 | if (!m_renderingCanBeAccelerated) { |
817 | LockHolder locker(m_drawMutex); |
818 | m_drawTimer.stop(); |
819 | m_destroying = destroying; |
820 | m_drawCondition.notifyOne(); |
821 | } |
822 | } |
823 | |
824 | void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player) |
825 | { |
826 | player->cancelRepaint(); |
827 | } |
828 | |
829 | #if USE(GSTREAMER_GL) |
830 | GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player) |
831 | { |
832 | GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink))); |
833 | player->triggerRepaint(sample.get()); |
834 | return GST_FLOW_OK; |
835 | } |
836 | |
837 | GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player) |
838 | { |
839 | GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink))); |
840 | player->triggerRepaint(sample.get()); |
841 | return GST_FLOW_OK; |
842 | } |
843 | |
844 | void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer() |
845 | { |
846 | auto sampleLocker = holdLock(m_sampleMutex); |
847 | |
848 | if (m_sample) { |
849 | // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions. |
850 | // This prevents resizing problems when the video changes its quality and a DRAIN is performed. |
851 | const GstStructure* info = gst_sample_get_info(m_sample.get()); |
852 | m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()), |
853 | gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr)); |
854 | } |
855 | |
856 | bool shouldWait = m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux; |
857 | auto proxyOperation = [shouldWait, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy) { |
858 | GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s" , shouldWait ? "synchronously" : "" ); |
859 | LockHolder locker(!shouldWait ? &proxy.lock() : nullptr); |
860 | |
861 | if (proxy.isActive()) |
862 | proxy.dropCurrentBufferWhilePreservingTexture(shouldWait); |
863 | }; |
864 | |
865 | #if USE(NICOSIA) |
866 | proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()); |
867 | #else |
868 | proxyOperation(*m_platformLayerProxy); |
869 | #endif |
870 | } |
871 | #endif |
872 | |
873 | void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size) |
874 | { |
875 | m_size = size; |
876 | } |
877 | |
878 | void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect) |
879 | { |
880 | if (context.paintingDisabled()) |
881 | return; |
882 | |
883 | if (!m_player->visible()) |
884 | return; |
885 | |
886 | auto sampleLocker = holdLock(m_sampleMutex); |
887 | if (!GST_IS_SAMPLE(m_sample.get())) |
888 | return; |
889 | |
890 | ImagePaintingOptions paintingOptions(CompositeCopy); |
891 | if (m_renderingCanBeAccelerated) |
892 | paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation); |
893 | |
894 | auto gstImage = ImageGStreamer::createImage(m_sample.get()); |
895 | if (!gstImage) |
896 | return; |
897 | |
898 | context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions); |
899 | } |
900 | |
901 | #if USE(GSTREAMER_GL) |
902 | bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY) |
903 | { |
904 | UNUSED_PARAM(context); |
905 | |
906 | if (m_usingFallbackVideoSink) |
907 | return false; |
908 | |
909 | if (premultiplyAlpha) |
910 | return false; |
911 | |
912 | auto sampleLocker = holdLock(m_sampleMutex); |
913 | |
914 | if (!GST_IS_SAMPLE(m_sample.get())) |
915 | return false; |
916 | |
917 | std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true); |
918 | |
919 | auto textureID = frameHolder->textureID(); |
920 | if (!textureID) |
921 | return false; |
922 | |
923 | auto size = frameHolder->size(); |
924 | if (m_videoSourceOrientation.usesWidthAsHeight()) |
925 | size = size.transposedSize(); |
926 | |
927 | if (!m_videoTextureCopier) |
928 | m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG); |
929 | |
930 | return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation); |
931 | } |
932 | |
933 | NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime() |
934 | { |
935 | #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) |
936 | if (m_usingFallbackVideoSink) |
937 | return nullptr; |
938 | |
939 | auto sampleLocker = holdLock(m_sampleMutex); |
940 | |
941 | if (!GST_IS_SAMPLE(m_sample.get())) |
942 | return nullptr; |
943 | |
944 | std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true); |
945 | |
946 | auto textureID = frameHolder->textureID(); |
947 | if (!textureID) |
948 | return nullptr; |
949 | |
950 | auto size = frameHolder->size(); |
951 | if (m_videoSourceOrientation.usesWidthAsHeight()) |
952 | size = size.transposedSize(); |
953 | |
954 | GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext(); |
955 | context->makeContextCurrent(); |
956 | |
957 | if (!m_videoTextureCopier) |
958 | m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG); |
959 | |
960 | if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation)) |
961 | return nullptr; |
962 | |
963 | return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height())); |
964 | #else |
965 | return nullptr; |
966 | #endif |
967 | } |
968 | #endif // USE(GSTREAMER_GL) |
969 | |
970 | void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation) |
971 | { |
972 | if (m_videoSourceOrientation == orientation) |
973 | return; |
974 | |
975 | m_videoSourceOrientation = orientation; |
976 | #if USE(TEXTURE_MAPPER_GL) |
977 | updateTextureMapperFlags(); |
978 | #endif |
979 | } |
980 | |
981 | #if USE(TEXTURE_MAPPER_GL) |
982 | void MediaPlayerPrivateGStreamerBase::updateTextureMapperFlags() |
983 | { |
984 | switch (m_videoSourceOrientation) { |
985 | case DefaultImageOrientation: |
986 | m_textureMapperFlags = 0; |
987 | break; |
988 | case OriginRightTop: |
989 | m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture90; |
990 | break; |
991 | case OriginBottomRight: |
992 | m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture180; |
993 | break; |
994 | case OriginLeftBottom: |
995 | m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture270; |
996 | break; |
997 | default: |
998 | // FIXME: Handle OriginTopRight, OriginBottomLeft, OriginLeftTop and OriginRightBottom? |
999 | m_textureMapperFlags = 0; |
1000 | break; |
1001 | } |
1002 | |
1003 | #if USE(GSTREAMER_GL) |
1004 | // When the imxvpudecoder is used, the texture sampling of the |
1005 | // directviv-uploaded texture returns an RGB value, so there's no need to |
1006 | // convert it. |
1007 | if (m_videoDecoderPlatform != WebKitGstVideoDecoderPlatform::ImxVPU) |
1008 | m_textureMapperFlags |= TEXTURE_MAPPER_COLOR_CONVERT_FLAG; |
1009 | #endif |
1010 | } |
1011 | #endif |
1012 | |
1013 | bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const |
1014 | { |
1015 | return true; |
1016 | } |
1017 | |
1018 | MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const |
1019 | { |
1020 | if (m_readyState == MediaPlayer::HaveNothing) |
1021 | return MediaPlayer::Unknown; |
1022 | |
1023 | if (isLiveStream()) |
1024 | return MediaPlayer::LiveStream; |
1025 | |
1026 | return MediaPlayer::Download; |
1027 | } |
1028 | |
1029 | #if USE(GSTREAMER_GL) |
1030 | GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink() |
1031 | { |
1032 | if (!webkitGstCheckVersion(1, 8, 0)) |
1033 | return nullptr; |
1034 | |
1035 | GstElement* appsink = gst_element_factory_make("appsink" , "webkit-gl-video-sink" ); |
1036 | if (!appsink) |
1037 | return nullptr; |
1038 | |
1039 | g_object_set(appsink, "enable-last-sample" , FALSE, "emit-signals" , TRUE, "max-buffers" , 1, nullptr); |
1040 | g_signal_connect(appsink, "new-sample" , G_CALLBACK(newSampleCallback), this); |
1041 | g_signal_connect(appsink, "new-preroll" , G_CALLBACK(newPrerollCallback), this); |
1042 | |
1043 | GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink" )); |
1044 | gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn { |
1045 | // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the |
1046 | // pipeline has to be drained before a frame with the new resolution can be decoded. |
1047 | // In this context, it's important that we don't hold references to any previous frame |
1048 | // (e.g. m_sample) so that decoding can continue. |
1049 | // We are also not supposed to keep the original frame after a flush. |
1050 | if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) { |
1051 | if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN) |
1052 | return GST_PAD_PROBE_OK; |
1053 | GST_DEBUG("Acting upon DRAIN query" ); |
1054 | } |
1055 | if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) { |
1056 | if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START) |
1057 | return GST_PAD_PROBE_OK; |
1058 | GST_DEBUG("Acting upon flush-start event" ); |
1059 | } |
1060 | |
1061 | auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData); |
1062 | player->flushCurrentBuffer(); |
1063 | return GST_PAD_PROBE_OK; |
1064 | }, this, nullptr); |
1065 | |
1066 | return appsink; |
1067 | } |
1068 | |
1069 | GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL() |
1070 | { |
1071 | if (!webkitGstCheckVersion(1, 8, 0)) |
1072 | return nullptr; |
1073 | |
1074 | gboolean result = TRUE; |
1075 | GstElement* videoSink = gst_bin_new(nullptr); |
1076 | GstElement* upload = gst_element_factory_make("glupload" , nullptr); |
1077 | GstElement* colorconvert = gst_element_factory_make("glcolorconvert" , nullptr); |
1078 | GstElement* appsink = createGLAppSink(); |
1079 | |
1080 | if (!appsink || !upload || !colorconvert) { |
1081 | GST_WARNING("Failed to create GstGL elements" ); |
1082 | gst_object_unref(videoSink); |
1083 | |
1084 | if (upload) |
1085 | gst_object_unref(upload); |
1086 | if (colorconvert) |
1087 | gst_object_unref(colorconvert); |
1088 | if (appsink) |
1089 | gst_object_unref(appsink); |
1090 | |
1091 | g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin." ); |
1092 | return nullptr; |
1093 | } |
1094 | |
1095 | gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr); |
1096 | |
1097 | GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT)); |
1098 | |
1099 | result &= gst_element_link_pads(upload, "src" , colorconvert, "sink" ); |
1100 | result &= gst_element_link_pads_filtered(colorconvert, "src" , appsink, "sink" , caps.get()); |
1101 | |
1102 | GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink" )); |
1103 | gst_element_add_pad(videoSink, gst_ghost_pad_new("sink" , pad.get())); |
1104 | |
1105 | if (!result) { |
1106 | GST_WARNING("Failed to link GstGL elements" ); |
1107 | gst_object_unref(videoSink); |
1108 | videoSink = nullptr; |
1109 | } |
1110 | return videoSink; |
1111 | } |
1112 | |
1113 | void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext() |
1114 | { |
1115 | if (!m_glDisplayElementContext) |
1116 | m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE)); |
1117 | |
1118 | if (m_glDisplayElementContext) |
1119 | gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get()); |
1120 | |
1121 | if (!m_glAppElementContext) |
1122 | m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context" )); |
1123 | |
1124 | if (m_glAppElementContext) |
1125 | gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get()); |
1126 | } |
1127 | #endif // USE(GSTREAMER_GL) |
1128 | |
1129 | #if USE(GSTREAMER_HOLEPUNCH) |
1130 | static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect) |
1131 | { |
1132 | // Here goes the platform-dependant code to set to the videoSink the size |
1133 | // and position of the video rendering window. Mark them unused as default. |
1134 | UNUSED_PARAM(videoSink); |
1135 | UNUSED_PARAM(rect); |
1136 | } |
1137 | |
1138 | class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient { |
1139 | public: |
1140 | GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { }; |
1141 | void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); } |
1142 | private: |
1143 | GRefPtr<GstElement> m_videoSink; |
1144 | }; |
1145 | |
1146 | GstElement* MediaPlayerPrivateGStreamerBase::createHolePunchVideoSink() |
1147 | { |
1148 | // Here goes the platform-dependant code to create the videoSink. As a default |
1149 | // we use a fakeVideoSink so nothing is drawn to the page. |
1150 | GstElement* videoSink = gst_element_factory_make("fakevideosink" , nullptr); |
1151 | |
1152 | return videoSink; |
1153 | } |
1154 | |
1155 | void MediaPlayerPrivateGStreamerBase::pushNextHolePunchBuffer() |
1156 | { |
1157 | auto proxyOperation = |
1158 | [this](TextureMapperPlatformLayerProxy& proxy) |
1159 | { |
1160 | LockHolder holder(proxy.lock()); |
1161 | std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE); |
1162 | std::unique_ptr<GStreamerHolePunchClient> holePunchClient = std::make_unique<GStreamerHolePunchClient>(m_videoSink.get()); |
1163 | layerBuffer->setHolePunchClient(WTFMove(holePunchClient)); |
1164 | proxy.pushNextBuffer(WTFMove(layerBuffer)); |
1165 | }; |
1166 | |
1167 | #if USE(NICOSIA) |
1168 | proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()); |
1169 | #else |
1170 | proxyOperation(*m_platformLayerProxy); |
1171 | #endif |
1172 | } |
1173 | #endif |
1174 | |
1175 | GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink() |
1176 | { |
1177 | acceleratedRenderingStateChanged(); |
1178 | |
1179 | #if USE(GSTREAMER_HOLEPUNCH) |
1180 | m_videoSink = createHolePunchVideoSink(); |
1181 | pushNextHolePunchBuffer(); |
1182 | return m_videoSink.get(); |
1183 | #endif |
1184 | |
1185 | #if USE(GSTREAMER_GL) |
1186 | if (m_renderingCanBeAccelerated) |
1187 | m_videoSink = createVideoSinkGL(); |
1188 | #endif |
1189 | |
1190 | if (!m_videoSink) { |
1191 | m_usingFallbackVideoSink = true; |
1192 | m_videoSink = webkitVideoSinkNew(); |
1193 | g_signal_connect_swapped(m_videoSink.get(), "repaint-requested" , G_CALLBACK(repaintCallback), this); |
1194 | g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled" , G_CALLBACK(repaintCancelledCallback), this); |
1195 | } |
1196 | |
1197 | GstElement* videoSink = nullptr; |
1198 | #if ENABLE(MEDIA_STATISTICS) |
1199 | m_fpsSink = gst_element_factory_make("fpsdisplaysink" , "sink" ); |
1200 | if (m_fpsSink) { |
1201 | g_object_set(m_fpsSink.get(), "silent" , TRUE , nullptr); |
1202 | |
1203 | // Turn off text overlay unless tracing is enabled. |
1204 | if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE) |
1205 | g_object_set(m_fpsSink.get(), "text-overlay" , FALSE , nullptr); |
1206 | |
1207 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink" )) { |
1208 | g_object_set(m_fpsSink.get(), "video-sink" , m_videoSink.get(), nullptr); |
1209 | videoSink = m_fpsSink.get(); |
1210 | } else |
1211 | m_fpsSink = nullptr; |
1212 | } |
1213 | #endif |
1214 | |
1215 | if (!m_fpsSink) |
1216 | videoSink = m_videoSink.get(); |
1217 | |
1218 | ASSERT(videoSink); |
1219 | |
1220 | return videoSink; |
1221 | } |
1222 | |
1223 | void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume) |
1224 | { |
1225 | ASSERT(!m_volumeElement); |
1226 | m_volumeElement = volume; |
1227 | |
1228 | // We don't set the initial volume because we trust the sink to keep it for us. See |
1229 | // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information. |
1230 | if (!m_player->platformVolumeConfigurationRequired()) { |
1231 | GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f" , m_player->volume()); |
1232 | g_object_set(m_volumeElement.get(), "volume" , m_player->volume(), nullptr); |
1233 | } else |
1234 | GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one" ); |
1235 | |
1236 | GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s" , toString(m_player->muted()).utf8().data()); |
1237 | g_object_set(m_volumeElement.get(), "mute" , m_player->muted(), nullptr); |
1238 | |
1239 | g_signal_connect_swapped(m_volumeElement.get(), "notify::volume" , G_CALLBACK(volumeChangedCallback), this); |
1240 | g_signal_connect_swapped(m_volumeElement.get(), "notify::mute" , G_CALLBACK(muteChangedCallback), this); |
1241 | } |
1242 | |
1243 | unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const |
1244 | { |
1245 | guint64 decodedFrames = 0; |
1246 | if (m_fpsSink) |
1247 | g_object_get(m_fpsSink.get(), "frames-rendered" , &decodedFrames, nullptr); |
1248 | return static_cast<unsigned>(decodedFrames); |
1249 | } |
1250 | |
1251 | unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const |
1252 | { |
1253 | guint64 framesDropped = 0; |
1254 | if (m_fpsSink) |
1255 | g_object_get(m_fpsSink.get(), "frames-dropped" , &framesDropped, nullptr); |
1256 | return static_cast<unsigned>(framesDropped); |
1257 | } |
1258 | |
1259 | unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const |
1260 | { |
1261 | GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES); |
1262 | gint64 position = 0; |
1263 | |
1264 | if (audioSink() && gst_element_query(audioSink(), query)) |
1265 | gst_query_parse_position(query, 0, &position); |
1266 | |
1267 | gst_query_unref(query); |
1268 | return static_cast<unsigned>(position); |
1269 | } |
1270 | |
1271 | unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const |
1272 | { |
1273 | GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES); |
1274 | gint64 position = 0; |
1275 | |
1276 | if (gst_element_query(m_videoSink.get(), query)) |
1277 | gst_query_parse_position(query, 0, &position); |
1278 | |
1279 | gst_query_unref(query); |
1280 | return static_cast<unsigned>(position); |
1281 | } |
1282 | |
1283 | #if ENABLE(ENCRYPTED_MEDIA) |
1284 | void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData) |
1285 | { |
1286 | ASSERT(!isMainThread()); |
1287 | |
1288 | RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] { |
1289 | if (!weakThis) |
1290 | return; |
1291 | |
1292 | GST_DEBUG("scheduling initializationDataEncountered event of size %lu" , initData.payload()->size()); |
1293 | GST_MEMDUMP("init datas" , reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size()); |
1294 | weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer()); |
1295 | }); |
1296 | } |
1297 | |
1298 | void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance) |
1299 | { |
1300 | ASSERT(isMainThread()); |
1301 | |
1302 | if (m_cdmInstance == &instance) |
1303 | return; |
1304 | |
1305 | if (!m_pipeline) { |
1306 | GST_ERROR("no pipeline yet" ); |
1307 | ASSERT_NOT_REACHED(); |
1308 | return; |
1309 | } |
1310 | |
1311 | m_cdmInstance = &instance; |
1312 | |
1313 | GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance" , FALSE)); |
1314 | GstStructure* contextStructure = gst_context_writable_structure(context.get()); |
1315 | gst_structure_set(contextStructure, "cdm-instance" , G_TYPE_POINTER, m_cdmInstance.get(), nullptr); |
1316 | gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get()); |
1317 | |
1318 | GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context" , m_cdmInstance.get()); |
1319 | |
1320 | m_protectionCondition.notifyAll(); |
1321 | } |
1322 | |
1323 | void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance) |
1324 | { |
1325 | ASSERT(isMainThread()); |
1326 | |
1327 | if (m_cdmInstance != &instance) { |
1328 | GST_WARNING("passed CDMInstance %p is different from stored one %p" , &instance, m_cdmInstance.get()); |
1329 | ASSERT_NOT_REACHED(); |
1330 | return; |
1331 | } |
1332 | |
1333 | ASSERT(m_pipeline); |
1334 | |
1335 | GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context" , m_cdmInstance.get()); |
1336 | m_cdmInstance = nullptr; |
1337 | |
1338 | GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance" , FALSE)); |
1339 | gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get()); |
1340 | |
1341 | m_protectionCondition.notifyAll(); |
1342 | } |
1343 | |
1344 | void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance) |
1345 | { |
1346 | ASSERT(m_cdmInstance.get() == &instance); |
1347 | GST_TRACE("instance %p, current stored %p" , &instance, m_cdmInstance.get()); |
1348 | attemptToDecryptWithLocalInstance(); |
1349 | } |
1350 | |
1351 | void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance() |
1352 | { |
1353 | bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt" ))); |
1354 | GST_DEBUG("attempting to decrypt, event handled %s" , boolForPrinting(eventHandled)); |
1355 | } |
1356 | |
1357 | void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event) |
1358 | { |
1359 | if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) { |
1360 | GST_DEBUG_OBJECT(pipeline(), "event %u already handled" , GST_EVENT_SEQNUM(event)); |
1361 | return; |
1362 | } |
1363 | GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE" , GST_EVENT_SEQNUM(event)); |
1364 | const char* eventKeySystemUUID = nullptr; |
1365 | GstBuffer* initData = nullptr; |
1366 | gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr); |
1367 | initializationDataEncountered({eventKeySystemUUID, initData}); |
1368 | } |
1369 | |
1370 | void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey) |
1371 | { |
1372 | // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors. |
1373 | GST_TRACE("waitingForKey %s, m_waitingForKey %s" , boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey)); |
1374 | if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey())) |
1375 | return; |
1376 | |
1377 | m_waitingForKey = waitingForKey; |
1378 | GST_DEBUG("waiting for key changed %s" , boolForPrinting(m_waitingForKey)); |
1379 | m_player->waitingForKeyChanged(); |
1380 | } |
1381 | |
1382 | bool MediaPlayerPrivateGStreamerBase::waitingForKey() const |
1383 | { |
1384 | if (!m_pipeline) |
1385 | return false; |
1386 | |
1387 | GstState state; |
1388 | gst_element_get_state(m_pipeline.get(), &state, nullptr, 0); |
1389 | |
1390 | bool result = false; |
1391 | GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key" ))); |
1392 | if (state >= GST_STATE_PAUSED) { |
1393 | result = gst_element_query(m_pipeline.get(), query.get()); |
1394 | GST_TRACE("query result %s, on %s" , boolForPrinting(result), gst_element_state_get_name(state)); |
1395 | } else if (state >= GST_STATE_READY) { |
1396 | // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly. |
1397 | GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get()))); |
1398 | GstIteratorResult iteratorResult; |
1399 | do { |
1400 | iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean { |
1401 | GstElement* element = GST_ELEMENT(g_value_get_object(item)); |
1402 | GstQuery* query = GST_QUERY(data); |
1403 | return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query); |
1404 | }, nullptr, query.get()); |
1405 | if (iteratorResult == GST_ITERATOR_RESYNC) |
1406 | gst_iterator_resync(iterator.get()); |
1407 | } while (iteratorResult == GST_ITERATOR_RESYNC); |
1408 | if (iteratorResult == GST_ITERATOR_ERROR) |
1409 | GST_WARNING("iterator returned an error" ); |
1410 | result = iteratorResult == GST_ITERATOR_OK; |
1411 | GST_TRACE("iterator result %d, waiting %s" , iteratorResult, boolForPrinting(result)); |
1412 | } |
1413 | |
1414 | return result; |
1415 | } |
1416 | #endif |
1417 | |
1418 | bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType) |
1419 | { |
1420 | bool result = false; |
1421 | |
1422 | #if ENABLE(ENCRYPTED_MEDIA) |
1423 | result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem); |
1424 | #endif |
1425 | |
1426 | GST_DEBUG("checking for KeySystem support with %s and type %s: %s" , keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result)); |
1427 | return result; |
1428 | } |
1429 | |
1430 | MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result) |
1431 | { |
1432 | UNUSED_PARAM(parameters); |
1433 | return result; |
1434 | } |
1435 | |
1436 | } |
1437 | |
1438 | #endif // USE(GSTREAMER) |
1439 | |