| 1 | /* |
| 2 | * Copyright (C) 2012, 2015, 2016 Igalia S.L |
| 3 | * Copyright (C) 2015, 2016 Metrological Group B.V. |
| 4 | * |
| 5 | * This library is free software; you can redistribute it and/or |
| 6 | * modify it under the terms of the GNU Lesser General Public |
| 7 | * License as published by the Free Software Foundation; either |
| 8 | * version 2 of the License, or (at your option) any later version. |
| 9 | * |
| 10 | * This library is distributed in the hope that it will be useful, |
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 13 | * Lesser General Public License for more details. |
| 14 | * |
| 15 | * You should have received a copy of the GNU Lesser General Public |
| 16 | * License along with this library; if not, write to the Free Software |
| 17 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
| 18 | */ |
| 19 | |
| 20 | |
| 21 | #include "config.h" |
| 22 | #include "GStreamerCommon.h" |
| 23 | |
| 24 | #if USE(GSTREAMER) |
| 25 | |
| 26 | #include "GstAllocatorFastMalloc.h" |
| 27 | #include "IntSize.h" |
| 28 | #include "SharedBuffer.h" |
| 29 | #include <gst/audio/audio-info.h> |
| 30 | #include <gst/gst.h> |
| 31 | #include <mutex> |
| 32 | #include <wtf/glib/GLibUtilities.h> |
| 33 | #include <wtf/glib/GUniquePtr.h> |
| 34 | #include <wtf/glib/RunLoopSourcePriority.h> |
| 35 | |
| 36 | #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS) |
| 37 | #define GST_USE_UNSTABLE_API |
| 38 | #include <gst/mpegts/mpegts.h> |
| 39 | #undef GST_USE_UNSTABLE_API |
| 40 | #endif |
| 41 | |
| 42 | #if ENABLE(MEDIA_SOURCE) |
| 43 | #include "WebKitMediaSourceGStreamer.h" |
| 44 | #endif |
| 45 | |
| 46 | #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0) |
| 47 | #include "GStreamerMediaStreamSource.h" |
| 48 | #endif |
| 49 | |
| 50 | #if ENABLE(ENCRYPTED_MEDIA) |
| 51 | #include "WebKitClearKeyDecryptorGStreamer.h" |
| 52 | #endif |
| 53 | |
| 54 | #if ENABLE(VIDEO) |
| 55 | #include "WebKitWebSourceGStreamer.h" |
| 56 | #endif |
| 57 | |
| 58 | namespace WebCore { |
| 59 | |
| 60 | GstPad* webkitGstGhostPadFromStaticTemplate(GstStaticPadTemplate* staticPadTemplate, const gchar* name, GstPad* target) |
| 61 | { |
| 62 | GstPad* pad; |
| 63 | GstPadTemplate* padTemplate = gst_static_pad_template_get(staticPadTemplate); |
| 64 | |
| 65 | if (target) |
| 66 | pad = gst_ghost_pad_new_from_template(name, target, padTemplate); |
| 67 | else |
| 68 | pad = gst_ghost_pad_new_no_target_from_template(name, padTemplate); |
| 69 | |
| 70 | gst_object_unref(padTemplate); |
| 71 | |
| 72 | return pad; |
| 73 | } |
| 74 | |
| 75 | #if ENABLE(VIDEO) |
| 76 | bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride) |
| 77 | { |
| 78 | if (!doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) { |
| 79 | GST_WARNING("Failed to get the video size and format, these are not a video caps" ); |
| 80 | return false; |
| 81 | } |
| 82 | |
| 83 | if (areEncryptedCaps(caps)) { |
| 84 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
| 85 | format = GST_VIDEO_FORMAT_ENCODED; |
| 86 | stride = 0; |
| 87 | int width = 0, height = 0; |
| 88 | gst_structure_get_int(structure, "width" , &width); |
| 89 | gst_structure_get_int(structure, "height" , &height); |
| 90 | if (!gst_structure_get_fraction(structure, "pixel-aspect-ratio" , &pixelAspectRatioNumerator, &pixelAspectRatioDenominator)) { |
| 91 | pixelAspectRatioNumerator = 1; |
| 92 | pixelAspectRatioDenominator = 1; |
| 93 | } |
| 94 | |
| 95 | size.setWidth(width); |
| 96 | size.setHeight(height); |
| 97 | } else { |
| 98 | GstVideoInfo info; |
| 99 | gst_video_info_init(&info); |
| 100 | if (!gst_video_info_from_caps(&info, caps)) |
| 101 | return false; |
| 102 | |
| 103 | format = GST_VIDEO_INFO_FORMAT(&info); |
| 104 | size.setWidth(GST_VIDEO_INFO_WIDTH(&info)); |
| 105 | size.setHeight(GST_VIDEO_INFO_HEIGHT(&info)); |
| 106 | pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info); |
| 107 | pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info); |
| 108 | stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0); |
| 109 | } |
| 110 | |
| 111 | return true; |
| 112 | } |
| 113 | |
| 114 | Optional<FloatSize> getVideoResolutionFromCaps(const GstCaps* caps) |
| 115 | { |
| 116 | if (!doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) { |
| 117 | GST_WARNING("Failed to get the video resolution, these are not a video caps" ); |
| 118 | return WTF::nullopt; |
| 119 | } |
| 120 | |
| 121 | int width = 0, height = 0; |
| 122 | int pixelAspectRatioNumerator = 1, pixelAspectRatioDenominator = 1; |
| 123 | |
| 124 | if (areEncryptedCaps(caps)) { |
| 125 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
| 126 | gst_structure_get_int(structure, "width" , &width); |
| 127 | gst_structure_get_int(structure, "height" , &height); |
| 128 | gst_structure_get_fraction(structure, "pixel-aspect-ratio" , &pixelAspectRatioNumerator, &pixelAspectRatioDenominator); |
| 129 | } else { |
| 130 | GstVideoInfo info; |
| 131 | gst_video_info_init(&info); |
| 132 | if (!gst_video_info_from_caps(&info, caps)) |
| 133 | return WTF::nullopt; |
| 134 | |
| 135 | width = GST_VIDEO_INFO_WIDTH(&info); |
| 136 | height = GST_VIDEO_INFO_HEIGHT(&info); |
| 137 | pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info); |
| 138 | pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info); |
| 139 | } |
| 140 | |
| 141 | return makeOptional(FloatSize(width, height * (static_cast<float>(pixelAspectRatioDenominator) / static_cast<float>(pixelAspectRatioNumerator)))); |
| 142 | } |
| 143 | |
| 144 | bool getSampleVideoInfo(GstSample* sample, GstVideoInfo& videoInfo) |
| 145 | { |
| 146 | if (!GST_IS_SAMPLE(sample)) |
| 147 | return false; |
| 148 | |
| 149 | GstCaps* caps = gst_sample_get_caps(sample); |
| 150 | if (!caps) |
| 151 | return false; |
| 152 | |
| 153 | gst_video_info_init(&videoInfo); |
| 154 | if (!gst_video_info_from_caps(&videoInfo, caps)) |
| 155 | return false; |
| 156 | |
| 157 | return true; |
| 158 | } |
| 159 | #endif |
| 160 | |
| 161 | |
| 162 | const char* capsMediaType(const GstCaps* caps) |
| 163 | { |
| 164 | ASSERT(caps); |
| 165 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
| 166 | if (!structure) { |
| 167 | GST_WARNING("caps are empty" ); |
| 168 | return nullptr; |
| 169 | } |
| 170 | #if ENABLE(ENCRYPTED_MEDIA) |
| 171 | if (gst_structure_has_name(structure, "application/x-cenc" ) || gst_structure_has_name(structure, "application/x-webm-enc" )) |
| 172 | return gst_structure_get_string(structure, "original-media-type" ); |
| 173 | #endif |
| 174 | return gst_structure_get_name(structure); |
| 175 | } |
| 176 | |
| 177 | bool doCapsHaveType(const GstCaps* caps, const char* type) |
| 178 | { |
| 179 | const char* mediaType = capsMediaType(caps); |
| 180 | if (!mediaType) { |
| 181 | GST_WARNING("Failed to get MediaType" ); |
| 182 | return false; |
| 183 | } |
| 184 | return g_str_has_prefix(mediaType, type); |
| 185 | } |
| 186 | |
| 187 | bool areEncryptedCaps(const GstCaps* caps) |
| 188 | { |
| 189 | ASSERT(caps); |
| 190 | #if ENABLE(ENCRYPTED_MEDIA) |
| 191 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
| 192 | if (!structure) { |
| 193 | GST_WARNING("caps are empty" ); |
| 194 | return false; |
| 195 | } |
| 196 | return gst_structure_has_name(structure, "application/x-cenc" ) || gst_structure_has_name(structure, "application/x-webm-enc" ); |
| 197 | #else |
| 198 | UNUSED_PARAM(caps); |
| 199 | return false; |
| 200 | #endif |
| 201 | } |
| 202 | |
| 203 | Vector<String> extractGStreamerOptionsFromCommandLine() |
| 204 | { |
| 205 | GUniqueOutPtr<char> contents; |
| 206 | gsize length; |
| 207 | if (!g_file_get_contents("/proc/self/cmdline" , &contents.outPtr(), &length, nullptr)) |
| 208 | return { }; |
| 209 | |
| 210 | Vector<String> options; |
| 211 | auto optionsString = String::fromUTF8(contents.get(), length); |
| 212 | optionsString.split('\0', [&options](StringView item) { |
| 213 | if (item.startsWith("--gst" )) |
| 214 | options.append(item.toString()); |
| 215 | }); |
| 216 | return options; |
| 217 | } |
| 218 | |
| 219 | bool initializeGStreamer(Optional<Vector<String>>&& options) |
| 220 | { |
| 221 | static std::once_flag onceFlag; |
| 222 | static bool isGStreamerInitialized; |
| 223 | std::call_once(onceFlag, [options = WTFMove(options)] { |
| 224 | isGStreamerInitialized = false; |
| 225 | |
| 226 | // USE_PLAYBIN3 is dangerous for us because its potential sneaky effect |
| 227 | // is to register the playbin3 element under the playbin namespace. We |
| 228 | // can't allow this, when we create playbin, we want playbin2, not |
| 229 | // playbin3. |
| 230 | if (g_getenv("USE_PLAYBIN3" )) |
| 231 | WTFLogAlways("The USE_PLAYBIN3 variable was detected in the environment. Expect playback issues or please unset it." ); |
| 232 | |
| 233 | #if ENABLE(VIDEO) || ENABLE(WEB_AUDIO) |
| 234 | Vector<String> parameters = options.valueOr(extractGStreamerOptionsFromCommandLine()); |
| 235 | char** argv = g_new0(char*, parameters.size() + 2); |
| 236 | int argc = parameters.size() + 1; |
| 237 | argv[0] = g_strdup(getCurrentExecutableName().data()); |
| 238 | for (unsigned i = 0; i < parameters.size(); i++) |
| 239 | argv[i + 1] = g_strdup(parameters[i].utf8().data()); |
| 240 | |
| 241 | GUniqueOutPtr<GError> error; |
| 242 | isGStreamerInitialized = gst_init_check(&argc, &argv, &error.outPtr()); |
| 243 | ASSERT_WITH_MESSAGE(isGStreamerInitialized, "GStreamer initialization failed: %s" , error ? error->message : "unknown error occurred" ); |
| 244 | g_strfreev(argv); |
| 245 | |
| 246 | if (isFastMallocEnabled()) { |
| 247 | const char* disableFastMalloc = getenv("WEBKIT_GST_DISABLE_FAST_MALLOC" ); |
| 248 | if (!disableFastMalloc || !strcmp(disableFastMalloc, "0" )) |
| 249 | gst_allocator_set_default(GST_ALLOCATOR(g_object_new(gst_allocator_fast_malloc_get_type(), nullptr))); |
| 250 | } |
| 251 | |
| 252 | #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS) |
| 253 | if (isGStreamerInitialized) |
| 254 | gst_mpegts_initialize(); |
| 255 | #endif |
| 256 | #endif |
| 257 | }); |
| 258 | return isGStreamerInitialized; |
| 259 | } |
| 260 | |
| 261 | bool initializeGStreamerAndRegisterWebKitElements() |
| 262 | { |
| 263 | if (!initializeGStreamer()) |
| 264 | return false; |
| 265 | |
| 266 | static std::once_flag onceFlag; |
| 267 | std::call_once(onceFlag, [] { |
| 268 | #if ENABLE(ENCRYPTED_MEDIA) |
| 269 | gst_element_register(nullptr, "webkitclearkey" , GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT); |
| 270 | #endif |
| 271 | |
| 272 | #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0) |
| 273 | if (webkitGstCheckVersion(1, 10, 0)) |
| 274 | gst_element_register(nullptr, "mediastreamsrc" , GST_RANK_PRIMARY, WEBKIT_TYPE_MEDIA_STREAM_SRC); |
| 275 | #endif |
| 276 | |
| 277 | #if ENABLE(MEDIA_SOURCE) |
| 278 | gst_element_register(nullptr, "webkitmediasrc" , GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC); |
| 279 | #endif |
| 280 | |
| 281 | #if ENABLE(VIDEO) |
| 282 | gst_element_register(0, "webkitwebsrc" , GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC); |
| 283 | #endif |
| 284 | }); |
| 285 | return true; |
| 286 | } |
| 287 | |
| 288 | unsigned getGstPlayFlag(const char* nick) |
| 289 | { |
| 290 | static GFlagsClass* flagsClass = static_cast<GFlagsClass*>(g_type_class_ref(g_type_from_name("GstPlayFlags" ))); |
| 291 | ASSERT(flagsClass); |
| 292 | |
| 293 | GFlagsValue* flag = g_flags_get_value_by_nick(flagsClass, nick); |
| 294 | if (!flag) |
| 295 | return 0; |
| 296 | |
| 297 | return flag->value; |
| 298 | } |
| 299 | |
| 300 | // Convert a MediaTime in seconds to a GstClockTime. Note that we can get MediaTime objects with a time scale that isn't a GST_SECOND, since they can come to |
| 301 | // us through the internal testing API, the DOM and internally. It would be nice to assert the format of the incoming time, but all the media APIs assume time |
| 302 | // is passed around in fractional seconds, so we'll just have to assume the same. |
| 303 | uint64_t toGstUnsigned64Time(const MediaTime& mediaTime) |
| 304 | { |
| 305 | MediaTime time = mediaTime.toTimeScale(GST_SECOND); |
| 306 | if (time.isInvalid()) |
| 307 | return GST_CLOCK_TIME_NONE; |
| 308 | return time.timeValue(); |
| 309 | } |
| 310 | |
| 311 | static void simpleBusMessageCallback(GstBus*, GstMessage* message, GstBin* pipeline) |
| 312 | { |
| 313 | switch (GST_MESSAGE_TYPE(message)) { |
| 314 | case GST_MESSAGE_ERROR: |
| 315 | GST_ERROR_OBJECT(pipeline, "Got message: %" GST_PTR_FORMAT, message); |
| 316 | { |
| 317 | WTF::String dotFileName = makeString(GST_OBJECT_NAME(pipeline), "_error" ); |
| 318 | GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(pipeline, GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); |
| 319 | } |
| 320 | break; |
| 321 | case GST_MESSAGE_STATE_CHANGED: |
| 322 | if (GST_MESSAGE_SRC(message) == GST_OBJECT(pipeline)) { |
| 323 | GstState oldState, newState, pending; |
| 324 | gst_message_parse_state_changed(message, &oldState, &newState, &pending); |
| 325 | |
| 326 | GST_INFO_OBJECT(pipeline, "State changed (old: %s, new: %s, pending: %s)" , |
| 327 | gst_element_state_get_name(oldState), |
| 328 | gst_element_state_get_name(newState), |
| 329 | gst_element_state_get_name(pending)); |
| 330 | |
| 331 | WTF::String dotFileName = makeString( |
| 332 | GST_OBJECT_NAME(pipeline), '_', |
| 333 | gst_element_state_get_name(oldState), '_', |
| 334 | gst_element_state_get_name(newState)); |
| 335 | |
| 336 | GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); |
| 337 | } |
| 338 | break; |
| 339 | default: |
| 340 | break; |
| 341 | } |
| 342 | } |
| 343 | |
| 344 | void disconnectSimpleBusMessageCallback(GstElement* pipeline) |
| 345 | { |
| 346 | GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(pipeline))); |
| 347 | g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(simpleBusMessageCallback), pipeline); |
| 348 | } |
| 349 | |
| 350 | void connectSimpleBusMessageCallback(GstElement* pipeline) |
| 351 | { |
| 352 | GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(pipeline))); |
| 353 | gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher); |
| 354 | g_signal_connect(bus.get(), "message" , G_CALLBACK(simpleBusMessageCallback), pipeline); |
| 355 | } |
| 356 | |
| 357 | Ref<SharedBuffer> GstMappedBuffer::createSharedBuffer() |
| 358 | { |
| 359 | // SharedBuffer provides a read-only view on what it expects are |
| 360 | // immutable data. Do not create one is writable and hence mutable. |
| 361 | RELEASE_ASSERT(isSharable()); |
| 362 | |
| 363 | return SharedBuffer::create(*this); |
| 364 | } |
| 365 | |
| 366 | } |
| 367 | |
| 368 | #endif // USE(GSTREAMER) |
| 369 | |