1 | /* |
2 | * Copyright (C) 2018 Metrological Group B.V. |
3 | * Author: Thibault Saunier <tsaunier@igalia.com> |
4 | * Author: Alejandro G. Castro <alex@igalia.com> |
5 | * |
6 | * This library is free software; you can redistribute it and/or |
7 | * modify it under the terms of the GNU Library General Public |
8 | * License as published by the Free Software Foundation; either |
9 | * version 2 of the License, or (at your option) any later version. |
10 | * |
11 | * This library is distributed in the hope that it will be useful, |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
14 | * Library General Public License for more details. |
15 | * |
16 | * You should have received a copy of the GNU Library General Public License |
17 | * aint with this library; see the file COPYING.LIB. If not, write to |
18 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
19 | * Boston, MA 02110-1301, USA. |
20 | */ |
21 | |
22 | #include "config.h" |
23 | |
24 | #if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) |
25 | #include "GStreamerVideoCaptureSource.h" |
26 | |
27 | #include "GStreamerCaptureDeviceManager.h" |
28 | #include "MediaSampleGStreamer.h" |
29 | |
30 | #include <gst/app/gstappsink.h> |
31 | #include <webrtc/api/mediastreaminterface.h> |
32 | #include <webrtc/api/peerconnectioninterface.h> |
33 | #include <webrtc/media/base/videocommon.h> |
34 | #include <webrtc/media/engine/webrtcvideocapturer.h> |
35 | #include <webrtc/media/engine/webrtcvideocapturerfactory.h> |
36 | #include <webrtc/modules/video_capture/video_capture_defines.h> |
37 | |
38 | namespace WebCore { |
39 | |
40 | const static int defaultWidth = 640; |
41 | const static int defaultHeight = 480; |
42 | |
43 | GST_DEBUG_CATEGORY(webkit_video_capture_source_debug); |
44 | #define GST_CAT_DEFAULT webkit_video_capture_source_debug |
45 | |
46 | static void initializeGStreamerDebug() |
47 | { |
48 | static std::once_flag debugRegisteredFlag; |
49 | std::call_once(debugRegisteredFlag, [] { |
50 | GST_DEBUG_CATEGORY_INIT(webkit_video_capture_source_debug, "webkitvideocapturesource" , 0, |
51 | "WebKit Video Capture Source." ); |
52 | }); |
53 | } |
54 | |
55 | class GStreamerVideoPreset : public VideoPreset { |
56 | public: |
57 | static Ref<GStreamerVideoPreset> create(IntSize size, Vector<FrameRateRange>&& framerates) |
58 | { |
59 | return adoptRef(*new GStreamerVideoPreset(size, WTFMove(framerates))); |
60 | } |
61 | |
62 | GStreamerVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges) |
63 | : VideoPreset(size, WTFMove(frameRateRanges), GStreamer) |
64 | { |
65 | } |
66 | }; |
67 | |
68 | class GStreamerVideoCaptureSourceFactory final : public VideoCaptureFactory { |
69 | public: |
70 | CaptureSourceOrError createVideoCaptureSource(const CaptureDevice& device, String&& hashSalt, const MediaConstraints* constraints) final |
71 | { |
72 | return GStreamerVideoCaptureSource::create(String { device.persistentId() }, WTFMove(hashSalt), constraints); |
73 | } |
74 | private: |
75 | CaptureDeviceManager& videoCaptureDeviceManager() final { return GStreamerVideoCaptureDeviceManager::singleton(); } |
76 | }; |
77 | |
78 | VideoCaptureFactory& libWebRTCVideoCaptureSourceFactory() |
79 | { |
80 | static NeverDestroyed<GStreamerVideoCaptureSourceFactory> factory; |
81 | return factory.get(); |
82 | } |
83 | |
84 | class GStreamerDisplayCaptureSourceFactory final : public DisplayCaptureFactory { |
85 | public: |
86 | CaptureSourceOrError createDisplayCaptureSource(const CaptureDevice&, const MediaConstraints*) final |
87 | { |
88 | // FIXME: Implement this. |
89 | return { }; |
90 | } |
91 | private: |
92 | CaptureDeviceManager& displayCaptureDeviceManager() final { return GStreamerDisplayCaptureDeviceManager::singleton(); } |
93 | }; |
94 | |
95 | DisplayCaptureFactory& libWebRTCDisplayCaptureSourceFactory() |
96 | { |
97 | static NeverDestroyed<GStreamerDisplayCaptureSourceFactory> factory; |
98 | return factory.get(); |
99 | } |
100 | |
101 | CaptureSourceOrError GStreamerVideoCaptureSource::create(String&& deviceID, String&& hashSalt, const MediaConstraints* constraints) |
102 | { |
103 | auto device = GStreamerVideoCaptureDeviceManager::singleton().gstreamerDeviceWithUID(deviceID); |
104 | if (!device) { |
105 | auto errorMessage = makeString("GStreamerVideoCaptureSource::create(): GStreamer did not find the device: " , deviceID, '.'); |
106 | return CaptureSourceOrError(WTFMove(errorMessage)); |
107 | } |
108 | |
109 | auto source = adoptRef(*new GStreamerVideoCaptureSource(device.value(), WTFMove(hashSalt))); |
110 | |
111 | if (constraints) { |
112 | if (auto result = source->applyConstraints(*constraints)) |
113 | return WTFMove(result->badConstraint); |
114 | } |
115 | return CaptureSourceOrError(WTFMove(source)); |
116 | } |
117 | |
118 | VideoCaptureFactory& GStreamerVideoCaptureSource::factory() |
119 | { |
120 | return libWebRTCVideoCaptureSourceFactory(); |
121 | } |
122 | |
123 | DisplayCaptureFactory& GStreamerVideoCaptureSource::displayFactory() |
124 | { |
125 | return libWebRTCDisplayCaptureSourceFactory(); |
126 | } |
127 | |
128 | GStreamerVideoCaptureSource::GStreamerVideoCaptureSource(String&& deviceID, String&& name, String&& hashSalt, const gchar *source_factory) |
129 | : RealtimeVideoSource(WTFMove(deviceID), WTFMove(name), WTFMove(hashSalt)) |
130 | , m_capturer(std::make_unique<GStreamerVideoCapturer>(source_factory)) |
131 | { |
132 | initializeGStreamerDebug(); |
133 | } |
134 | |
135 | GStreamerVideoCaptureSource::GStreamerVideoCaptureSource(GStreamerCaptureDevice device, String&& hashSalt) |
136 | : RealtimeVideoSource(String { device.persistentId() }, String { device.label() }, WTFMove(hashSalt)) |
137 | , m_capturer(std::make_unique<GStreamerVideoCapturer>(device)) |
138 | { |
139 | initializeGStreamerDebug(); |
140 | } |
141 | |
142 | GStreamerVideoCaptureSource::~GStreamerVideoCaptureSource() |
143 | { |
144 | } |
145 | |
146 | void GStreamerVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag> settings) |
147 | { |
148 | if (settings.containsAny({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height })) |
149 | m_capturer->setSize(size().width(), size().height()); |
150 | if (settings.contains(RealtimeMediaSourceSettings::Flag::FrameRate)) |
151 | m_capturer->setFrameRate(frameRate()); |
152 | } |
153 | |
154 | void GStreamerVideoCaptureSource::startProducingData() |
155 | { |
156 | m_capturer->setupPipeline(); |
157 | m_capturer->setSize(size().width(), size().height()); |
158 | m_capturer->setFrameRate(frameRate()); |
159 | g_signal_connect(m_capturer->sink(), "new-sample" , G_CALLBACK(newSampleCallback), this); |
160 | m_capturer->play(); |
161 | } |
162 | |
163 | GstFlowReturn GStreamerVideoCaptureSource::newSampleCallback(GstElement* sink, GStreamerVideoCaptureSource* source) |
164 | { |
165 | auto gstSample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink))); |
166 | auto mediaSample = MediaSampleGStreamer::create(WTFMove(gstSample), WebCore::FloatSize(), String()); |
167 | |
168 | // FIXME - Check how presentationSize is supposed to be used here. |
169 | callOnMainThread([protectedThis = makeRef(*source), mediaSample = WTFMove(mediaSample)] { |
170 | protectedThis->videoSampleAvailable(mediaSample.get()); |
171 | }); |
172 | |
173 | return GST_FLOW_OK; |
174 | } |
175 | |
176 | void GStreamerVideoCaptureSource::stopProducingData() |
177 | { |
178 | g_signal_handlers_disconnect_by_func(m_capturer->sink(), reinterpret_cast<gpointer>(newSampleCallback), this); |
179 | m_capturer->stop(); |
180 | |
181 | GST_INFO("Reset height and width after stopping source" ); |
182 | setSize({ 0, 0 }); |
183 | } |
184 | |
185 | const RealtimeMediaSourceCapabilities& GStreamerVideoCaptureSource::capabilities() |
186 | { |
187 | RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints()); |
188 | |
189 | capabilities.setDeviceId(hashedId()); |
190 | updateCapabilities(capabilities); |
191 | |
192 | capabilities.addFacingMode(RealtimeMediaSourceSettings::Unknown); |
193 | |
194 | m_capabilities = WTFMove(capabilities); |
195 | |
196 | return m_capabilities.value(); |
197 | } |
198 | |
199 | const RealtimeMediaSourceSettings& GStreamerVideoCaptureSource::settings() |
200 | { |
201 | if (!m_currentSettings) { |
202 | RealtimeMediaSourceSettings settings; |
203 | settings.setDeviceId(hashedId()); |
204 | |
205 | RealtimeMediaSourceSupportedConstraints supportedConstraints; |
206 | supportedConstraints.setSupportsDeviceId(true); |
207 | supportedConstraints.setSupportsFacingMode(true); |
208 | supportedConstraints.setSupportsWidth(true); |
209 | supportedConstraints.setSupportsHeight(true); |
210 | supportedConstraints.setSupportsAspectRatio(true); |
211 | supportedConstraints.setSupportsFrameRate(true); |
212 | settings.setSupportedConstraints(supportedConstraints); |
213 | |
214 | m_currentSettings = WTFMove(settings); |
215 | } |
216 | |
217 | m_currentSettings->setWidth(size().width()); |
218 | m_currentSettings->setHeight(size().height()); |
219 | m_currentSettings->setFrameRate(frameRate()); |
220 | m_currentSettings->setAspectRatio(aspectRatio()); |
221 | m_currentSettings->setFacingMode(facingMode()); |
222 | return m_currentSettings.value(); |
223 | } |
224 | |
225 | void GStreamerVideoCaptureSource::generatePresets() |
226 | { |
227 | Vector<Ref<VideoPreset>> presets; |
228 | GRefPtr<GstCaps> caps = adoptGRef(m_capturer->caps()); |
229 | for (unsigned i = 0; i < gst_caps_get_size(caps.get()); i++) { |
230 | GstStructure* str = gst_caps_get_structure(caps.get(), i); |
231 | |
232 | // Only accept raw video for now. |
233 | if (!gst_structure_has_name(str, "video/x-raw" )) |
234 | continue; |
235 | |
236 | int32_t width, height; |
237 | if (!gst_structure_get(str, "width" , G_TYPE_INT, &width, "height" , G_TYPE_INT, &height, nullptr)) { |
238 | GST_INFO("Could not find discret height and width values in %" GST_PTR_FORMAT, str); |
239 | continue; |
240 | } |
241 | |
242 | IntSize size = { width, height }; |
243 | double framerate; |
244 | Vector<FrameRateRange> frameRates; |
245 | int32_t minFrameRateNumerator, minFrameRateDenominator, maxFrameRateNumerator, maxFrameRateDenominator, framerateNumerator, framerateDenominator; |
246 | if (gst_structure_get(str, "framerate" , GST_TYPE_FRACTION_RANGE, &minFrameRateNumerator, &minFrameRateDenominator, &maxFrameRateNumerator, &maxFrameRateDenominator, nullptr)) { |
247 | FrameRateRange range; |
248 | |
249 | gst_util_fraction_to_double(minFrameRateNumerator, minFrameRateDenominator, &range.minimum); |
250 | gst_util_fraction_to_double(maxFrameRateNumerator, maxFrameRateDenominator, &range.maximum); |
251 | |
252 | frameRates.append(range); |
253 | } else if (gst_structure_get(str, "framerate" , GST_TYPE_FRACTION, &framerateNumerator, &framerateDenominator, nullptr)) { |
254 | gst_util_fraction_to_double(framerateNumerator, framerateDenominator, &framerate); |
255 | frameRates.append({ framerate, framerate}); |
256 | } else { |
257 | const GValue* frameRateValues(gst_structure_get_value(str, "framerate" )); |
258 | unsigned frameRatesLength = static_cast<unsigned>(gst_value_list_get_size(frameRateValues)); |
259 | |
260 | for (unsigned j = 0; j < frameRatesLength; j++) { |
261 | const GValue* val = gst_value_list_get_value(frameRateValues, j); |
262 | |
263 | ASSERT(val && G_VALUE_TYPE(val) == GST_TYPE_FRACTION); |
264 | gst_util_fraction_to_double(gst_value_get_fraction_numerator(val), |
265 | gst_value_get_fraction_denominator(val), &framerate); |
266 | |
267 | frameRates.append({ framerate, framerate}); |
268 | } |
269 | } |
270 | |
271 | presets.append(GStreamerVideoPreset::create(size, WTFMove(frameRates))); |
272 | } |
273 | |
274 | if (presets.isEmpty()) { |
275 | GST_INFO("Could not find any presets for caps: %" GST_PTR_FORMAT " just let anything go out." , caps.get()); |
276 | |
277 | for (auto& size : standardVideoSizes()) { |
278 | Vector<FrameRateRange> frameRates; |
279 | |
280 | frameRates.append({ 0, G_MAXDOUBLE}); |
281 | presets.append(GStreamerVideoPreset::create(size, WTFMove(frameRates))); |
282 | } |
283 | } |
284 | |
285 | setSupportedPresets(WTFMove(presets)); |
286 | } |
287 | |
288 | } // namespace WebCore |
289 | |
290 | #endif // ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) |
291 | |