1 | /* |
2 | * Copyright (C) 2018 Metrological Group B.V. |
3 | * Copyright (C) 2018 Igalia S.L. All rights reserved. |
4 | * |
5 | * This library is free software; you can redistribute it and/or |
6 | * modify it under the terms of the GNU Library General Public |
7 | * License as published by the Free Software Foundation; either |
8 | * version 2 of the License, or (at your option) any later version. |
9 | * |
10 | * This library is distributed in the hope that it will be useful, |
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | * Library General Public License for more details. |
14 | * |
15 | * You should have received a copy of the GNU Library General Public License |
16 | * aint with this library; see the file COPYING.LIB. If not, write to |
17 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
18 | * Boston, MA 02110-1301, USA. |
19 | */ |
20 | |
21 | #include "config.h" |
22 | |
23 | #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) |
24 | #include "GStreamerVideoDecoderFactory.h" |
25 | |
26 | #include "GStreamerVideoFrameLibWebRTC.h" |
27 | #include "webrtc/common_video/h264/h264_common.h" |
28 | #include "webrtc/common_video/h264/profile_level_id.h" |
29 | #include "webrtc/media/base/codec.h" |
30 | #include "webrtc/modules/video_coding/codecs/h264/include/h264.h" |
31 | #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
32 | #include "webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h" |
33 | #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
34 | #include <gst/app/gstappsink.h> |
35 | #include <gst/app/gstappsrc.h> |
36 | #include <gst/video/video.h> |
37 | #include <mutex> |
38 | #include <wtf/Lock.h> |
39 | #include <wtf/StdMap.h> |
40 | #include <wtf/glib/RunLoopSourcePriority.h> |
41 | #include <wtf/text/WTFString.h> |
42 | |
43 | GST_DEBUG_CATEGORY(webkit_webrtcdec_debug); |
44 | #define GST_CAT_DEFAULT webkit_webrtcdec_debug |
45 | |
46 | namespace WebCore { |
47 | |
48 | typedef struct { |
49 | uint64_t timestamp; |
50 | int64_t renderTimeMs; |
51 | } InputTimestamps; |
52 | |
53 | class GStreamerVideoDecoder : public webrtc::VideoDecoder { |
54 | public: |
55 | GStreamerVideoDecoder() |
56 | : m_pictureId(0) |
57 | , m_width(0) |
58 | , m_height(0) |
59 | , m_firstBufferPts(GST_CLOCK_TIME_NONE) |
60 | , m_firstBufferDts(GST_CLOCK_TIME_NONE) |
61 | { |
62 | } |
63 | |
64 | static void decodebinPadAddedCb(GstElement*, |
65 | GstPad* srcpad, |
66 | GstPad* sinkpad) |
67 | { |
68 | GST_INFO_OBJECT(srcpad, "connecting pad with %" GST_PTR_FORMAT, sinkpad); |
69 | if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) |
70 | ASSERT_NOT_REACHED(); |
71 | } |
72 | |
73 | GstElement* pipeline() |
74 | { |
75 | return m_pipeline.get(); |
76 | } |
77 | |
78 | GstElement* makeElement(const gchar* factoryName) |
79 | { |
80 | GUniquePtr<char> name(g_strdup_printf("%s_dec_%s_%p" , Name(), factoryName, this)); |
81 | |
82 | return gst_element_factory_make(factoryName, name.get()); |
83 | } |
84 | |
85 | int32_t InitDecode(const webrtc::VideoCodec* codecSettings, int32_t) override |
86 | { |
87 | m_src = makeElement("appsrc" ); |
88 | |
89 | auto capsfilter = CreateFilter(); |
90 | auto decoder = makeElement("decodebin" ); |
91 | |
92 | if (codecSettings) { |
93 | m_width = codecSettings->width; |
94 | m_height = codecSettings->height; |
95 | } |
96 | |
97 | // Make the decoder output "parsed" frames only and let the main decodebin |
98 | // do the real decoding. This allows us to have optimized decoding/rendering |
99 | // happening in the main pipeline. |
100 | g_object_set(decoder, "caps" , adoptGRef(gst_caps_from_string(Caps())).get(), nullptr); |
101 | auto sinkpad = gst_element_get_static_pad(capsfilter, "sink" ); |
102 | g_signal_connect(decoder, "pad-added" , G_CALLBACK(decodebinPadAddedCb), sinkpad); |
103 | |
104 | m_pipeline = makeElement("pipeline" ); |
105 | connectSimpleBusMessageCallback(m_pipeline.get()); |
106 | |
107 | auto sink = makeElement("appsink" ); |
108 | gst_app_sink_set_emit_signals(GST_APP_SINK(sink), true); |
109 | g_signal_connect(sink, "new-sample" , G_CALLBACK(newSampleCallbackTramp), this); |
110 | // This is an encoder, everything should happen as fast as possible and not |
111 | // be synced on the clock. |
112 | g_object_set(sink, "sync" , false, nullptr); |
113 | |
114 | gst_bin_add_many(GST_BIN(pipeline()), m_src, decoder, capsfilter, sink, nullptr); |
115 | if (!gst_element_link(m_src, decoder)) { |
116 | GST_ERROR_OBJECT(pipeline(), "Could not link src to decoder." ); |
117 | return WEBRTC_VIDEO_CODEC_ERROR; |
118 | } |
119 | |
120 | if (!gst_element_link(capsfilter, sink)) { |
121 | GST_ERROR_OBJECT(pipeline(), "Could not link capsfilter to sink." ); |
122 | return WEBRTC_VIDEO_CODEC_ERROR; |
123 | } |
124 | |
125 | if (gst_element_set_state(pipeline(), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { |
126 | GST_ERROR_OBJECT(pipeline(), "Could not set state to PLAYING." ); |
127 | return WEBRTC_VIDEO_CODEC_ERROR; |
128 | } |
129 | |
130 | return WEBRTC_VIDEO_CODEC_OK; |
131 | } |
132 | |
133 | int32_t RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback) |
134 | { |
135 | m_imageReadyCb = callback; |
136 | |
137 | return WEBRTC_VIDEO_CODEC_OK; |
138 | } |
139 | |
140 | virtual GstElement* CreateFilter() |
141 | { |
142 | return makeElement("identity" ); |
143 | } |
144 | |
145 | int32_t Release() final |
146 | { |
147 | if (m_pipeline.get()) { |
148 | GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); |
149 | gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr); |
150 | |
151 | gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); |
152 | m_src = nullptr; |
153 | m_pipeline = nullptr; |
154 | } |
155 | |
156 | return WEBRTC_VIDEO_CODEC_OK; |
157 | } |
158 | |
159 | int32_t Decode(const webrtc::EncodedImage& inputImage, |
160 | bool, |
161 | const webrtc::CodecSpecificInfo*, |
162 | int64_t renderTimeMs) override |
163 | { |
164 | if (!m_src) { |
165 | GST_ERROR("No source set, can't decode." ); |
166 | |
167 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
168 | } |
169 | |
170 | if (!GST_CLOCK_TIME_IS_VALID(m_firstBufferPts)) { |
171 | GRefPtr<GstPad> srcpad = adoptGRef(gst_element_get_static_pad(m_src, "src" )); |
172 | m_firstBufferPts = (static_cast<guint64>(renderTimeMs)) * GST_MSECOND; |
173 | m_firstBufferDts = (static_cast<guint64>(inputImage.Timestamp())) * GST_MSECOND; |
174 | } |
175 | |
176 | // FIXME- Use a GstBufferPool. |
177 | auto buffer = adoptGRef(gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size), |
178 | inputImage._size)); |
179 | GST_BUFFER_DTS(buffer.get()) = (static_cast<guint64>(inputImage.Timestamp()) * GST_MSECOND) - m_firstBufferDts; |
180 | GST_BUFFER_PTS(buffer.get()) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts; |
181 | { |
182 | auto locker = holdLock(m_bufferMapLock); |
183 | InputTimestamps timestamps = {inputImage.Timestamp(), renderTimeMs}; |
184 | m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = timestamps; |
185 | } |
186 | |
187 | GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer.get()); |
188 | auto sample = adoptGRef(gst_sample_new(buffer.get(), GetCapsForFrame(inputImage), nullptr, nullptr)); |
189 | switch (gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())) { |
190 | case GST_FLOW_OK: |
191 | return WEBRTC_VIDEO_CODEC_OK; |
192 | case GST_FLOW_FLUSHING: |
193 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
194 | default: |
195 | return WEBRTC_VIDEO_CODEC_ERROR; |
196 | } |
197 | } |
198 | |
199 | virtual GstCaps* GetCapsForFrame(const webrtc::EncodedImage& image) |
200 | { |
201 | if (!m_caps) { |
202 | m_caps = adoptGRef(gst_caps_new_simple(Caps(), |
203 | "width" , G_TYPE_INT, image._encodedWidth ? image._encodedWidth : m_width, |
204 | "height" , G_TYPE_INT, image._encodedHeight ? image._encodedHeight : m_height, |
205 | nullptr)); |
206 | } |
207 | |
208 | return m_caps.get(); |
209 | } |
210 | |
211 | void AddDecoderIfSupported(std::vector<webrtc::SdpVideoFormat> codecList) |
212 | { |
213 | if (HasGstDecoder()) { |
214 | webrtc::SdpVideoFormat format = ConfigureSupportedDecoder(); |
215 | |
216 | codecList.push_back(format); |
217 | } |
218 | } |
219 | |
220 | virtual webrtc::SdpVideoFormat ConfigureSupportedDecoder() |
221 | { |
222 | return webrtc::SdpVideoFormat(Name()); |
223 | } |
224 | |
225 | static GRefPtr<GstElementFactory> GstDecoderFactory(const char *capsStr) |
226 | { |
227 | auto all_decoders = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER, |
228 | GST_RANK_MARGINAL); |
229 | auto caps = adoptGRef(gst_caps_from_string(capsStr)); |
230 | auto decoders = gst_element_factory_list_filter(all_decoders, |
231 | caps.get(), GST_PAD_SINK, FALSE); |
232 | |
233 | gst_plugin_feature_list_free(all_decoders); |
234 | GRefPtr<GstElementFactory> res; |
235 | if (decoders) |
236 | res = GST_ELEMENT_FACTORY(decoders->data); |
237 | gst_plugin_feature_list_free(decoders); |
238 | |
239 | return res; |
240 | } |
241 | |
242 | bool HasGstDecoder() |
243 | { |
244 | return GstDecoderFactory(Caps()); |
245 | } |
246 | |
247 | GstFlowReturn newSampleCallback(GstElement* sink) |
248 | { |
249 | auto sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); |
250 | auto buffer = gst_sample_get_buffer(sample); |
251 | |
252 | m_bufferMapLock.lock(); |
253 | // Make sure that the frame.timestamp == previsouly input_frame._timeStamp |
254 | // as it is required by the VideoDecoder baseclass. |
255 | auto timestamps = m_dtsPtsMap[GST_BUFFER_PTS(buffer)]; |
256 | m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer)); |
257 | m_bufferMapLock.unlock(); |
258 | |
259 | auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0, |
260 | timestamps.timestamp, timestamps.renderTimeMs)); |
261 | |
262 | GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE; |
263 | GST_LOG_OBJECT(pipeline(), "Output decoded frame! %d -> %" GST_PTR_FORMAT, |
264 | frame->timestamp(), buffer); |
265 | |
266 | m_imageReadyCb->Decoded(*frame.get(), absl::optional<int32_t>(), absl::optional<uint8_t>()); |
267 | |
268 | return GST_FLOW_OK; |
269 | } |
270 | |
271 | virtual const gchar* Caps() = 0; |
272 | virtual webrtc::VideoCodecType CodecType() = 0; |
273 | const char* ImplementationName() const { return "GStreamer" ; } |
274 | virtual const gchar* Name() = 0; |
275 | |
276 | protected: |
277 | GRefPtr<GstCaps> m_caps; |
278 | gint m_pictureId; |
279 | gint m_width; |
280 | gint m_height; |
281 | |
282 | private: |
283 | static GstFlowReturn newSampleCallbackTramp(GstElement* sink, GStreamerVideoDecoder* enc) |
284 | { |
285 | return enc->newSampleCallback(sink); |
286 | } |
287 | |
288 | GRefPtr<GstElement> m_pipeline; |
289 | GstElement* m_src; |
290 | |
291 | GstVideoInfo m_info; |
292 | webrtc::DecodedImageCallback* m_imageReadyCb; |
293 | |
294 | Lock m_bufferMapLock; |
295 | StdMap<GstClockTime, InputTimestamps> m_dtsPtsMap; |
296 | GstClockTime m_firstBufferPts; |
297 | GstClockTime m_firstBufferDts; |
298 | }; |
299 | |
300 | class H264Decoder : public GStreamerVideoDecoder { |
301 | public: |
302 | H264Decoder() { } |
303 | |
304 | int32_t InitDecode(const webrtc::VideoCodec* codecInfo, int32_t nCores) final |
305 | { |
306 | if (codecInfo && codecInfo->codecType != webrtc::kVideoCodecH264) |
307 | return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
308 | |
309 | m_profile = nullptr; |
310 | if (codecInfo) { |
311 | auto h264Info = codecInfo->H264(); |
312 | |
313 | switch (h264Info.profile) { |
314 | case webrtc::H264::kProfileConstrainedBaseline: |
315 | m_profile = "constrained-baseline" ; |
316 | break; |
317 | case webrtc::H264::kProfileBaseline: |
318 | m_profile = "baseline" ; |
319 | break; |
320 | case webrtc::H264::kProfileMain: |
321 | m_profile = "main" ; |
322 | break; |
323 | case webrtc::H264::kProfileConstrainedHigh: |
324 | m_profile = "constrained-high" ; |
325 | break; |
326 | case webrtc::H264::kProfileHigh: |
327 | m_profile = "high" ; |
328 | break; |
329 | } |
330 | } |
331 | |
332 | return GStreamerVideoDecoder::InitDecode(codecInfo, nCores); |
333 | } |
334 | |
335 | GstCaps* GetCapsForFrame(const webrtc::EncodedImage& image) final |
336 | { |
337 | if (!m_caps) { |
338 | m_caps = adoptGRef(gst_caps_new_simple(Caps(), |
339 | "width" , G_TYPE_INT, image._encodedWidth ? image._encodedWidth : m_width, |
340 | "height" , G_TYPE_INT, image._encodedHeight ? image._encodedHeight : m_height, |
341 | "alignment" , G_TYPE_STRING, "au" , |
342 | nullptr)); |
343 | } |
344 | |
345 | return m_caps.get(); |
346 | } |
347 | const gchar* Caps() final { return "video/x-h264" ; } |
348 | const gchar* Name() final { return cricket::kH264CodecName; } |
349 | webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecH264; } |
350 | |
351 | private: |
352 | const gchar* m_profile; |
353 | }; |
354 | |
355 | class VP8Decoder : public GStreamerVideoDecoder { |
356 | public: |
357 | VP8Decoder() { } |
358 | const gchar* Caps() final { return "video/x-vp8" ; } |
359 | const gchar* Name() final { return cricket::kVp8CodecName; } |
360 | webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecVP8; } |
361 | static std::unique_ptr<webrtc::VideoDecoder> Create() |
362 | { |
363 | auto factory = GstDecoderFactory("video/x-vp8" ); |
364 | |
365 | if (factory && !g_strcmp0(GST_OBJECT_NAME(GST_OBJECT(factory.get())), "vp8dec" )) { |
366 | GST_INFO("Our best GStreamer VP8 decoder is vp8dec, better use the one from LibWebRTC" ); |
367 | |
368 | return std::unique_ptr<webrtc::VideoDecoder>(new webrtc::LibvpxVp8Decoder()); |
369 | } |
370 | |
371 | return std::unique_ptr<webrtc::VideoDecoder>(new VP8Decoder()); |
372 | } |
373 | }; |
374 | |
375 | std::unique_ptr<webrtc::VideoDecoder> GStreamerVideoDecoderFactory::CreateVideoDecoder(const webrtc::SdpVideoFormat& format) |
376 | { |
377 | webrtc::VideoDecoder* dec; |
378 | |
379 | if (format.name == cricket::kH264CodecName) |
380 | dec = new H264Decoder(); |
381 | else if (format.name == cricket::kVp8CodecName) |
382 | return VP8Decoder::Create(); |
383 | else { |
384 | GST_ERROR("Could not create decoder for %s" , format.name.c_str()); |
385 | |
386 | return nullptr; |
387 | } |
388 | |
389 | return std::unique_ptr<webrtc::VideoDecoder>(dec); |
390 | } |
391 | |
392 | GStreamerVideoDecoderFactory::GStreamerVideoDecoderFactory() |
393 | { |
394 | static std::once_flag debugRegisteredFlag; |
395 | |
396 | std::call_once(debugRegisteredFlag, [] { |
397 | GST_DEBUG_CATEGORY_INIT(webkit_webrtcdec_debug, "webkitlibwebrtcvideodecoder" , 0, "WebKit WebRTC video decoder" ); |
398 | }); |
399 | } |
400 | std::vector<webrtc::SdpVideoFormat> GStreamerVideoDecoderFactory::GetSupportedFormats() const |
401 | { |
402 | std::vector<webrtc::SdpVideoFormat> formats; |
403 | |
404 | VP8Decoder().AddDecoderIfSupported(formats); |
405 | H264Decoder().AddDecoderIfSupported(formats); |
406 | |
407 | return formats; |
408 | } |
409 | } |
410 | #endif |
411 | |