1 | /* |
2 | * Copyright (C) 2018 Metrological Group B.V. |
3 | * Copyright (C) 2018 Igalia S.L. All rights reserved. |
4 | * |
5 | * This library is free software; you can redistribute it and/or |
6 | * modify it under the terms of the GNU Library General Public |
7 | * License as published by the Free Software Foundation; either |
8 | * version 2 of the License, or (at your option) any later version. |
9 | * |
10 | * This library is distributed in the hope that it will be useful, |
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | * Library General Public License for more details. |
14 | * |
15 | * You should have received a copy of the GNU Library General Public License |
16 | * aint with this library; see the file COPYING.LIB. If not, write to |
17 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
18 | * Boston, MA 02110-1301, USA. |
19 | */ |
20 | |
21 | #include "config.h" |
22 | |
23 | #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) |
24 | #include "GStreamerVideoEncoderFactory.h" |
25 | |
26 | #include "GStreamerVideoEncoder.h" |
27 | #include "GStreamerVideoFrameLibWebRTC.h" |
28 | #include "webrtc/common_video/h264/h264_common.h" |
29 | #include "webrtc/common_video/h264/profile_level_id.h" |
30 | #include "webrtc/media/base/codec.h" |
31 | #include "webrtc/modules/video_coding/codecs/h264/include/h264.h" |
32 | #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
33 | #include "webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" |
34 | #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
35 | #include "webrtc/modules/video_coding/utility/simulcast_utility.h" |
36 | |
37 | #include <gst/app/gstappsink.h> |
38 | #include <gst/app/gstappsrc.h> |
39 | #define GST_USE_UNSTABLE_API 1 |
40 | #include <gst/codecparsers/gsth264parser.h> |
41 | #undef GST_USE_UNSTABLE_API |
42 | #include <gst/pbutils/encoding-profile.h> |
43 | #include <gst/video/video.h> |
44 | #include <wtf/HashMap.h> |
45 | #include <wtf/HexNumber.h> |
46 | #include <wtf/Lock.h> |
47 | #include <wtf/StdMap.h> |
48 | |
49 | // Required for unified builds |
50 | #ifdef GST_CAT_DEFAULT |
51 | #undef GST_CAT_DEFAULT |
52 | #endif |
53 | |
54 | GST_DEBUG_CATEGORY(webkit_webrtcenc_debug); |
55 | #define GST_CAT_DEFAULT webkit_webrtcenc_debug |
56 | |
57 | #define KBIT_TO_BIT 1024 |
58 | |
59 | namespace WebCore { |
60 | |
61 | class GStreamerVideoEncoder : public webrtc::VideoEncoder { |
62 | public: |
63 | GStreamerVideoEncoder(const webrtc::SdpVideoFormat&) |
64 | : m_firstFramePts(GST_CLOCK_TIME_NONE) |
65 | , m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw" ))) |
66 | , m_adapter(adoptGRef(gst_adapter_new())) |
67 | { |
68 | } |
69 | GStreamerVideoEncoder() |
70 | : m_firstFramePts(GST_CLOCK_TIME_NONE) |
71 | , m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw" ))) |
72 | , m_adapter(adoptGRef(gst_adapter_new())) |
73 | { |
74 | } |
75 | |
76 | int SetRates(uint32_t newBitrate, uint32_t frameRate) override |
77 | { |
78 | GST_INFO_OBJECT(m_pipeline.get(), "New bitrate: %d - framerate is %d" , |
79 | newBitrate, frameRate); |
80 | |
81 | auto caps = adoptGRef(gst_caps_copy(m_restrictionCaps.get())); |
82 | |
83 | SetRestrictionCaps(WTFMove(caps)); |
84 | |
85 | if (m_encoder) |
86 | g_object_set(m_encoder, "bitrate" , newBitrate, nullptr); |
87 | |
88 | return WEBRTC_VIDEO_CODEC_OK; |
89 | } |
90 | |
91 | GstElement* pipeline() |
92 | { |
93 | return m_pipeline.get(); |
94 | } |
95 | |
96 | GstElement* makeElement(const gchar* factoryName) |
97 | { |
98 | auto name = makeString(Name(), "_enc_" , factoryName, "_0x" , hex(reinterpret_cast<uintptr_t>(this))); |
99 | auto elem = gst_element_factory_make(factoryName, name.utf8().data()); |
100 | |
101 | return elem; |
102 | } |
103 | |
104 | int32_t InitEncode(const webrtc::VideoCodec* codecSettings, int32_t, size_t) |
105 | { |
106 | g_return_val_if_fail(codecSettings, WEBRTC_VIDEO_CODEC_ERR_PARAMETER); |
107 | g_return_val_if_fail(codecSettings->codecType == CodecType(), WEBRTC_VIDEO_CODEC_ERR_PARAMETER); |
108 | |
109 | if (webrtc::SimulcastUtility::NumberOfSimulcastStreams(*codecSettings) > 1) { |
110 | GST_ERROR("Simulcast not supported." ); |
111 | |
112 | return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; |
113 | } |
114 | |
115 | m_encodedFrame._size = codecSettings->width * codecSettings->height * 3; |
116 | m_encodedFrame._buffer = new uint8_t[m_encodedFrame._size]; |
117 | m_encodedImageBuffer.reset(m_encodedFrame._buffer); |
118 | m_encodedFrame._completeFrame = true; |
119 | m_encodedFrame._encodedWidth = 0; |
120 | m_encodedFrame._encodedHeight = 0; |
121 | m_encodedFrame._length = 0; |
122 | |
123 | m_pipeline = makeElement("pipeline" ); |
124 | |
125 | connectSimpleBusMessageCallback(m_pipeline.get()); |
126 | auto encoder = createEncoder(); |
127 | ASSERT(encoder); |
128 | m_encoder = encoder.get(); |
129 | |
130 | g_object_set(m_encoder, "keyframe-interval" , KeyframeInterval(codecSettings), nullptr); |
131 | |
132 | m_src = makeElement("appsrc" ); |
133 | g_object_set(m_src, "is-live" , true, "format" , GST_FORMAT_TIME, nullptr); |
134 | |
135 | auto videoconvert = makeElement("videoconvert" ); |
136 | m_sink = makeElement("appsink" ); |
137 | g_object_set(m_sink, "sync" , FALSE, nullptr); |
138 | |
139 | auto name = makeString(Name(), "_enc_rawcapsfilter_0x" , hex(reinterpret_cast<uintptr_t>(this))); |
140 | m_capsFilter = gst_element_factory_make("capsfilter" , name.utf8().data()); |
141 | if (m_restrictionCaps) |
142 | g_object_set(m_capsFilter, "caps" , m_restrictionCaps.get(), nullptr); |
143 | |
144 | gst_bin_add_many(GST_BIN(m_pipeline.get()), m_src, videoconvert, m_capsFilter, encoder.leakRef(), m_sink, nullptr); |
145 | if (!gst_element_link_many(m_src, videoconvert, m_capsFilter, m_encoder, m_sink, nullptr)) { |
146 | GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_VERBOSE, "webkit-webrtc-encoder.error" ); |
147 | |
148 | ASSERT_NOT_REACHED(); |
149 | } |
150 | |
151 | gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); |
152 | |
153 | return WEBRTC_VIDEO_CODEC_OK; |
154 | } |
155 | |
156 | bool SupportsNativeHandle() const final |
157 | { |
158 | return true; |
159 | } |
160 | |
161 | int32_t RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) final |
162 | { |
163 | m_imageReadyCb = callback; |
164 | |
165 | return WEBRTC_VIDEO_CODEC_OK; |
166 | } |
167 | |
168 | int32_t Release() final |
169 | { |
170 | m_encodedFrame._buffer = nullptr; |
171 | m_encodedImageBuffer.reset(); |
172 | if (m_pipeline) { |
173 | GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); |
174 | gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr); |
175 | |
176 | gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); |
177 | m_src = nullptr; |
178 | m_encoder = nullptr; |
179 | m_capsFilter = nullptr; |
180 | m_sink = nullptr; |
181 | m_pipeline = nullptr; |
182 | } |
183 | |
184 | return WEBRTC_VIDEO_CODEC_OK; |
185 | } |
186 | |
187 | int32_t returnFromFlowReturn(GstFlowReturn flow) |
188 | { |
189 | switch (flow) { |
190 | case GST_FLOW_OK: |
191 | return WEBRTC_VIDEO_CODEC_OK; |
192 | case GST_FLOW_FLUSHING: |
193 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
194 | default: |
195 | return WEBRTC_VIDEO_CODEC_ERROR; |
196 | } |
197 | } |
198 | |
199 | |
200 | int32_t Encode(const webrtc::VideoFrame& frame, |
201 | const webrtc::CodecSpecificInfo*, |
202 | const std::vector<webrtc::FrameType>* frameTypes) final |
203 | { |
204 | int32_t res; |
205 | |
206 | if (!m_imageReadyCb) { |
207 | GST_INFO_OBJECT(m_pipeline.get(), "No encoded callback set yet!" ); |
208 | |
209 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
210 | } |
211 | |
212 | if (!m_src) { |
213 | GST_INFO_OBJECT(m_pipeline.get(), "No source set yet!" ); |
214 | |
215 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
216 | } |
217 | |
218 | auto sample = GStreamerSampleFromLibWebRTCVideoFrame(frame); |
219 | auto buffer = gst_sample_get_buffer(sample.get()); |
220 | |
221 | if (!GST_CLOCK_TIME_IS_VALID(m_firstFramePts)) { |
222 | m_firstFramePts = GST_BUFFER_PTS(buffer); |
223 | auto pad = adoptGRef(gst_element_get_static_pad(m_src, "src" )); |
224 | gst_pad_set_offset(pad.get(), -m_firstFramePts); |
225 | } |
226 | |
227 | for (auto frame_type : *frameTypes) { |
228 | if (frame_type == webrtc::kVideoFrameKey) { |
229 | auto pad = adoptGRef(gst_element_get_static_pad(m_src, "src" )); |
230 | auto forceKeyUnit = gst_video_event_new_downstream_force_key_unit(GST_CLOCK_TIME_NONE, |
231 | GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, 1); |
232 | GST_INFO_OBJECT(m_pipeline.get(), "Requesting KEYFRAME!" ); |
233 | |
234 | if (!gst_pad_push_event(pad.get(), forceKeyUnit)) |
235 | GST_WARNING_OBJECT(pipeline(), "Could not send ForceKeyUnit event" ); |
236 | |
237 | break; |
238 | } |
239 | } |
240 | |
241 | res = returnFromFlowReturn(gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())); |
242 | if (res != WEBRTC_VIDEO_CODEC_OK) |
243 | return res; |
244 | |
245 | auto encodedSample = adoptGRef(gst_app_sink_try_pull_sample(GST_APP_SINK(m_sink), 5 * GST_SECOND)); |
246 | if (!encodedSample) { |
247 | GST_ERROR("Didn't get any encodedSample" ); |
248 | return WEBRTC_VIDEO_CODEC_ERROR; |
249 | } |
250 | |
251 | auto encodedBuffer = gst_sample_get_buffer(encodedSample.get()); |
252 | auto encodedCaps = gst_sample_get_caps(encodedSample.get()); |
253 | |
254 | webrtc::RTPFragmentationHeader fragmentationInfo; |
255 | |
256 | Fragmentize(&m_encodedFrame, &m_encodedImageBuffer, &m_encodedImageBufferSize, encodedBuffer, &fragmentationInfo); |
257 | if (!m_encodedFrame._size) |
258 | return WEBRTC_VIDEO_CODEC_OK; |
259 | |
260 | gst_structure_get(gst_caps_get_structure(encodedCaps, 0), |
261 | "width" , G_TYPE_INT, &m_encodedFrame._encodedWidth, |
262 | "height" , G_TYPE_INT, &m_encodedFrame._encodedHeight, |
263 | nullptr); |
264 | |
265 | m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(encodedBuffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey; |
266 | m_encodedFrame._completeFrame = true; |
267 | m_encodedFrame.capture_time_ms_ = frame.render_time_ms(); |
268 | m_encodedFrame.SetTimestamp(frame.timestamp()); |
269 | |
270 | GST_LOG_OBJECT(m_pipeline.get(), "Got buffer capture_time_ms: %ld _timestamp: %u" , |
271 | m_encodedFrame.capture_time_ms_, m_encodedFrame.Timestamp()); |
272 | |
273 | webrtc::CodecSpecificInfo codecInfo; |
274 | PopulateCodecSpecific(&codecInfo, encodedBuffer); |
275 | webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &codecInfo, &fragmentationInfo); |
276 | if (result.error != webrtc::EncodedImageCallback::Result::OK) |
277 | GST_ERROR_OBJECT(m_pipeline.get(), "Encode callback failed: %d" , result.error); |
278 | |
279 | return WEBRTC_VIDEO_CODEC_OK; |
280 | } |
281 | |
282 | GRefPtr<GstElement> createEncoder(void) |
283 | { |
284 | GRefPtr<GstElement> encoder = nullptr; |
285 | GstElement* webrtcencoder = GST_ELEMENT(g_object_ref_sink(gst_element_factory_make("webrtcvideoencoder" , NULL))); |
286 | |
287 | g_object_set(webrtcencoder, "format" , adoptGRef(gst_caps_from_string(Caps())).get(), NULL); |
288 | g_object_get(webrtcencoder, "encoder" , &encoder.outPtr(), NULL); |
289 | |
290 | if (!encoder) { |
291 | GST_INFO("No encoder found for %s" , Caps()); |
292 | |
293 | return nullptr; |
294 | } |
295 | |
296 | return webrtcencoder; |
297 | } |
298 | |
299 | void AddCodecIfSupported(std::vector<webrtc::SdpVideoFormat>* supportedFormats) |
300 | { |
301 | GstElement* encoder; |
302 | |
303 | if (createEncoder().get() != nullptr) { |
304 | webrtc::SdpVideoFormat format = ConfigureSupportedCodec(encoder); |
305 | |
306 | supportedFormats->push_back(format); |
307 | } |
308 | } |
309 | |
310 | virtual const gchar* Caps() |
311 | { |
312 | return nullptr; |
313 | } |
314 | |
315 | virtual webrtc::VideoCodecType CodecType() = 0; |
316 | virtual webrtc::SdpVideoFormat ConfigureSupportedCodec(GstElement*) |
317 | { |
318 | return webrtc::SdpVideoFormat(Name()); |
319 | } |
320 | |
321 | virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, GstBuffer*) = 0; |
322 | |
323 | virtual void (webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encodedImageBuffer, |
324 | size_t* bufferSize, GstBuffer* buffer, webrtc::RTPFragmentationHeader* fragmentationInfo) |
325 | { |
326 | auto map = GstMappedBuffer::create(buffer, GST_MAP_READ); |
327 | |
328 | if (*bufferSize < map->size()) { |
329 | encodedImage->_size = map->size(); |
330 | encodedImage->_buffer = new uint8_t[encodedImage->_size]; |
331 | encodedImageBuffer->reset(encodedImage->_buffer); |
332 | *bufferSize = map->size(); |
333 | } |
334 | |
335 | memcpy(encodedImage->_buffer, map->data(), map->size()); |
336 | encodedImage->_length = map->size(); |
337 | encodedImage->_size = map->size(); |
338 | |
339 | fragmentationInfo->VerifyAndAllocateFragmentationHeader(1); |
340 | fragmentationInfo->fragmentationOffset[0] = 0; |
341 | fragmentationInfo->fragmentationLength[0] = map->size(); |
342 | fragmentationInfo->fragmentationPlType[0] = 0; |
343 | fragmentationInfo->fragmentationTimeDiff[0] = 0; |
344 | } |
345 | |
346 | const char* ImplementationName() const |
347 | { |
348 | GRefPtr<GstElement> encoderImplementation; |
349 | g_return_val_if_fail(m_encoder, nullptr); |
350 | |
351 | g_object_get(m_encoder, "encoder" , &encoderImplementation.outPtr(), nullptr); |
352 | |
353 | return GST_OBJECT_NAME(gst_element_get_factory(encoderImplementation.get())); |
354 | } |
355 | |
356 | virtual const gchar* Name() = 0; |
357 | virtual int KeyframeInterval(const webrtc::VideoCodec* codecSettings) = 0; |
358 | |
359 | void SetRestrictionCaps(GRefPtr<GstCaps> caps) |
360 | { |
361 | if (m_restrictionCaps) |
362 | g_object_set(m_capsFilter, "caps" , m_restrictionCaps.get(), nullptr); |
363 | |
364 | m_restrictionCaps = caps; |
365 | } |
366 | |
367 | private: |
368 | GRefPtr<GstElement> m_pipeline; |
369 | GstElement* m_src; |
370 | GstElement* m_encoder; |
371 | GstElement* m_capsFilter; |
372 | |
373 | webrtc::EncodedImageCallback* m_imageReadyCb; |
374 | GstClockTime m_firstFramePts; |
375 | GRefPtr<GstCaps> m_restrictionCaps; |
376 | webrtc::EncodedImage m_encodedFrame; |
377 | std::unique_ptr<uint8_t[]> m_encodedImageBuffer; |
378 | size_t m_encodedImageBufferSize; |
379 | |
380 | Lock m_bufferMapLock; |
381 | GRefPtr<GstAdapter> m_adapter; |
382 | GstElement* m_sink; |
383 | }; |
384 | |
385 | class GStreamerH264Encoder : public GStreamerVideoEncoder { |
386 | public: |
387 | GStreamerH264Encoder() { } |
388 | |
389 | GStreamerH264Encoder(const webrtc::SdpVideoFormat& format) |
390 | : m_parser(gst_h264_nal_parser_new()) |
391 | , packetizationMode(webrtc::H264PacketizationMode::NonInterleaved) |
392 | { |
393 | auto it = format.parameters.find(cricket::kH264FmtpPacketizationMode); |
394 | |
395 | if (it != format.parameters.end() && it->second == "1" ) |
396 | packetizationMode = webrtc::H264PacketizationMode::NonInterleaved; |
397 | } |
398 | |
399 | int KeyframeInterval(const webrtc::VideoCodec* codecSettings) final |
400 | { |
401 | return codecSettings->H264().keyFrameInterval; |
402 | } |
403 | |
404 | // FIXME - MT. safety! |
405 | void (webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encodedImageBuffer, size_t *bufferSize, |
406 | GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader* ) final |
407 | { |
408 | GstH264NalUnit nalu; |
409 | auto parserResult = GST_H264_PARSER_OK; |
410 | |
411 | gsize offset = 0; |
412 | size_t requiredSize = 0; |
413 | |
414 | std::vector<GstH264NalUnit> nals; |
415 | |
416 | const uint8_t startCode[4] = { 0, 0, 0, 1 }; |
417 | auto map = GstMappedBuffer::create(gstbuffer, GST_MAP_READ); |
418 | while (parserResult == GST_H264_PARSER_OK) { |
419 | parserResult = gst_h264_parser_identify_nalu(m_parser, map->data(), offset, map->size(), &nalu); |
420 | |
421 | nalu.sc_offset = offset; |
422 | nalu.offset = offset + sizeof(startCode); |
423 | if (parserResult != GST_H264_PARSER_OK && parserResult != GST_H264_PARSER_NO_NAL_END) |
424 | break; |
425 | |
426 | requiredSize += nalu.size + sizeof(startCode); |
427 | nals.push_back(nalu); |
428 | offset = nalu.offset + nalu.size; |
429 | } |
430 | |
431 | if (encodedImage->_size < requiredSize) { |
432 | encodedImage->_size = requiredSize; |
433 | encodedImage->_buffer = new uint8_t[encodedImage->_size]; |
434 | encodedImageBuffer->reset(encodedImage->_buffer); |
435 | *bufferSize = map->size(); |
436 | } |
437 | |
438 | // Iterate nal units and fill the Fragmentation info. |
439 | fragmentationHeader->VerifyAndAllocateFragmentationHeader(nals.size()); |
440 | size_t fragmentIndex = 0; |
441 | encodedImage->_length = 0; |
442 | for (std::vector<GstH264NalUnit>::iterator nal = nals.begin(); nal != nals.end(); ++nal, fragmentIndex++) { |
443 | |
444 | ASSERT(map->data()[nal->sc_offset + 0] == startCode[0]); |
445 | ASSERT(map->data()[nal->sc_offset + 1] == startCode[1]); |
446 | ASSERT(map->data()[nal->sc_offset + 2] == startCode[2]); |
447 | ASSERT(map->data()[nal->sc_offset + 3] == startCode[3]); |
448 | |
449 | fragmentationHeader->fragmentationOffset[fragmentIndex] = nal->offset; |
450 | fragmentationHeader->fragmentationLength[fragmentIndex] = nal->size; |
451 | |
452 | memcpy(encodedImage->_buffer + encodedImage->_length, &map->data()[nal->sc_offset], |
453 | sizeof(startCode) + nal->size); |
454 | encodedImage->_length += nal->size + sizeof(startCode); |
455 | } |
456 | } |
457 | |
458 | webrtc::SdpVideoFormat ConfigureSupportedCodec(GstElement*) final |
459 | { |
460 | // TODO- Create from encoder src pad caps template |
461 | return webrtc::SdpVideoFormat(cricket::kH264CodecName, |
462 | { { cricket::kH264FmtpProfileLevelId, cricket::kH264ProfileLevelConstrainedBaseline }, |
463 | { cricket::kH264FmtpLevelAsymmetryAllowed, "1" }, |
464 | { cricket::kH264FmtpPacketizationMode, "1" } }); |
465 | } |
466 | |
467 | const gchar* Caps() final { return "video/x-h264" ; } |
468 | const gchar* Name() final { return cricket::kH264CodecName; } |
469 | GstH264NalParser* m_parser; |
470 | webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecH264; } |
471 | |
472 | void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, GstBuffer*) final |
473 | { |
474 | codecSpecificInfos->codecType = CodecType(); |
475 | codecSpecificInfos->codec_name = ImplementationName(); |
476 | webrtc::CodecSpecificInfoH264* h264Info = &(codecSpecificInfos->codecSpecific.H264); |
477 | h264Info->packetization_mode = packetizationMode; |
478 | } |
479 | |
480 | webrtc::H264PacketizationMode packetizationMode; |
481 | }; |
482 | |
483 | class GStreamerVP8Encoder : public GStreamerVideoEncoder { |
484 | public: |
485 | GStreamerVP8Encoder() { } |
486 | GStreamerVP8Encoder(const webrtc::SdpVideoFormat&) { } |
487 | const gchar* Caps() final { return "video/x-vp8" ; } |
488 | const gchar* Name() final { return cricket::kVp8CodecName; } |
489 | webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecVP8; } |
490 | |
491 | int KeyframeInterval(const webrtc::VideoCodec* codecSettings) final |
492 | { |
493 | return codecSettings->VP8().keyFrameInterval; |
494 | } |
495 | |
496 | void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, GstBuffer* buffer) final |
497 | { |
498 | codecSpecificInfos->codecType = webrtc::kVideoCodecVP8; |
499 | codecSpecificInfos->codec_name = ImplementationName(); |
500 | webrtc::CodecSpecificInfoVP8* vp8Info = &(codecSpecificInfos->codecSpecific.VP8); |
501 | vp8Info->temporalIdx = 0; |
502 | |
503 | vp8Info->keyIdx = webrtc::kNoKeyIdx; |
504 | vp8Info->nonReference = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); |
505 | } |
506 | }; |
507 | |
508 | std::unique_ptr<webrtc::VideoEncoder> GStreamerVideoEncoderFactory::CreateVideoEncoder(const webrtc::SdpVideoFormat& format) |
509 | { |
510 | if (format.name == cricket::kVp8CodecName) { |
511 | GRefPtr<GstElement> webrtcencoder = adoptGRef(GST_ELEMENT(g_object_ref_sink(gst_element_factory_make("webrtcvideoencoder" , NULL)))); |
512 | GRefPtr<GstElement> encoder = nullptr; |
513 | |
514 | g_object_set(webrtcencoder.get(), "format" , adoptGRef(gst_caps_from_string("video/x-vp8" )).get(), NULL); |
515 | g_object_get(webrtcencoder.get(), "encoder" , &encoder.outPtr(), NULL); |
516 | |
517 | if (encoder) |
518 | return std::make_unique<GStreamerVP8Encoder>(format); |
519 | |
520 | GST_INFO("Using VP8 Encoder from LibWebRTC." ); |
521 | return std::make_unique<webrtc::LibvpxVp8Encoder>(); |
522 | } |
523 | |
524 | if (format.name == cricket::kH264CodecName) |
525 | return std::make_unique<GStreamerH264Encoder>(format); |
526 | |
527 | return nullptr; |
528 | } |
529 | |
530 | GStreamerVideoEncoderFactory::GStreamerVideoEncoderFactory() |
531 | { |
532 | static std::once_flag debugRegisteredFlag; |
533 | |
534 | std::call_once(debugRegisteredFlag, [] { |
535 | GST_DEBUG_CATEGORY_INIT(webkit_webrtcenc_debug, "webkitlibwebrtcvideoencoder" , 0, "WebKit WebRTC video encoder" ); |
536 | gst_element_register(nullptr, "webrtcvideoencoder" , GST_RANK_PRIMARY, GST_TYPE_WEBRTC_VIDEO_ENCODER); |
537 | }); |
538 | } |
539 | |
540 | std::vector<webrtc::SdpVideoFormat> GStreamerVideoEncoderFactory::GetSupportedFormats() const |
541 | { |
542 | std::vector<webrtc::SdpVideoFormat> supportedCodecs; |
543 | |
544 | supportedCodecs.push_back(webrtc::SdpVideoFormat(cricket::kVp8CodecName)); |
545 | GStreamerH264Encoder().AddCodecIfSupported(&supportedCodecs); |
546 | |
547 | return supportedCodecs; |
548 | } |
549 | |
550 | } // namespace WebCore |
551 | #endif |
552 | |