1/*
2 * Copyright (C) 2012, 2015, 2016, 2018 Igalia S.L
3 * Copyright (C) 2015, 2016, 2018 Metrological Group B.V.
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
14 *
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19#include "config.h"
20
21#if USE(GSTREAMER) && USE(LIBWEBRTC)
22#include "GStreamerVideoFrameLibWebRTC.h"
23
24#include <thread>
25
26namespace WebCore {
27
28const GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame& frame)
29{
30 if (frame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
31 auto framebuffer = static_cast<GStreamerVideoFrameLibWebRTC*>(frame.video_frame_buffer().get());
32 auto gstsample = framebuffer->getSample();
33
34 GST_LOG("Reusing native GStreamer sample: %p", gstsample.get());
35
36 return gstsample;
37 }
38
39 auto webrtcbuffer = frame.video_frame_buffer().get()->ToI420();
40 // FIXME - Check lifetime of those buffers.
41 const uint8_t* comps[3] = {
42 webrtcbuffer->DataY(),
43 webrtcbuffer->DataU(),
44 webrtcbuffer->DataV()
45 };
46
47 GstVideoInfo info;
48 gst_video_info_set_format(&info, GST_VIDEO_FORMAT_I420, frame.width(), frame.height());
49 auto buffer = adoptGRef(gst_buffer_new());
50 for (gint i = 0; i < 3; i++) {
51 gsize compsize = GST_VIDEO_INFO_COMP_STRIDE(&info, i) * GST_VIDEO_INFO_COMP_HEIGHT(&info, i);
52
53 GstMemory* comp = gst_memory_new_wrapped(
54 static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS | GST_MEMORY_FLAG_READONLY),
55 const_cast<gpointer>(reinterpret_cast<const void*>(comps[i])), compsize, 0, compsize, webrtcbuffer, nullptr);
56 gst_buffer_append_memory(buffer.get(), comp);
57 }
58
59 auto caps = adoptGRef(gst_video_info_to_caps(&info));
60 auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
61 return sample;
62}
63
64rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::create(GstSample * sample)
65{
66 GstVideoInfo info;
67
68 if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample)))
69 ASSERT_NOT_REACHED();
70
71 return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(new GStreamerVideoFrameLibWebRTC(sample, info));
72}
73
74std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation,
75 int64_t timestamp, int64_t renderTimeMs)
76{
77 auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(sample));
78
79 return std::unique_ptr<webrtc::VideoFrame>(
80 new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation));
81}
82
83webrtc::VideoFrameBuffer::Type GStreamerVideoFrameLibWebRTC::type() const
84{
85 return Type::kNative;
86}
87
88GRefPtr<GstSample> GStreamerVideoFrameLibWebRTC::getSample()
89{
90 return m_sample.get();
91}
92
93rtc::scoped_refptr<webrtc::I420BufferInterface> GStreamerVideoFrameLibWebRTC::ToI420()
94{
95 GstMappedFrame inFrame(m_sample, GST_MAP_READ);
96
97 if (!inFrame) {
98 GST_WARNING("Could not map frame");
99
100 return nullptr;
101 }
102
103 auto newBuffer = m_bufferPool.CreateBuffer(inFrame.width(), inFrame.height());
104 ASSERT(newBuffer);
105 if (!newBuffer) {
106 GST_WARNING("RealtimeOutgoingVideoSourceGStreamer::videoSampleAvailable unable to allocate buffer for conversion to YUV");
107 return nullptr;
108 }
109
110 if (inFrame.format() != GST_VIDEO_FORMAT_I420) {
111 GstVideoInfo outInfo;
112
113 gst_video_info_set_format(&outInfo, GST_VIDEO_FORMAT_I420, inFrame.width(),
114 inFrame.height());
115 auto info = inFrame.info();
116 outInfo.fps_n = info->fps_n;
117 outInfo.fps_d = info->fps_d;
118
119 GRefPtr<GstBuffer> buffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, newBuffer->MutableDataY(),
120 outInfo.size, 0, outInfo.size, nullptr, nullptr));
121
122 GstMappedFrame outFrame(buffer.get(), outInfo, GST_MAP_WRITE);
123
124 GUniquePtr<GstVideoConverter> videoConverter(gst_video_converter_new(inFrame.info(),
125 &outInfo, gst_structure_new("GstVideoConvertConfig",
126 GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT, std::thread::hardware_concurrency() || 1 , nullptr)));
127
128 ASSERT(videoConverter);
129
130 gst_video_converter_frame(videoConverter.get(), inFrame.get(), outFrame.get());
131
132 return newBuffer;
133 }
134
135 newBuffer->Copy(
136 inFrame.width(),
137 inFrame.height(),
138 inFrame.ComponentData(0),
139 inFrame.ComponentStride(0),
140 inFrame.ComponentData(1),
141 inFrame.ComponentStride(1),
142 inFrame.ComponentData(2),
143 inFrame.ComponentStride(2));
144
145 return newBuffer;
146}
147}
148#endif // USE(LIBWEBRTC)
149