1/*
2 * Copyright (C) 2011 Google Inc. All rights reserved.
3 * Copyright (C) 2011, 2015 Ericsson AB. All rights reserved.
4 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
5 * Copyright (C) 2013 Nokia Corporation and/or its subsidiary(-ies).
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
20 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
23 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "config.h"
29#include "MediaStreamTrack.h"
30
31#if ENABLE(MEDIA_STREAM)
32
33#include "Document.h"
34#include "Event.h"
35#include "EventNames.h"
36#include "JSOverconstrainedError.h"
37#include "MediaConstraints.h"
38#include "MediaStream.h"
39#include "MediaStreamPrivate.h"
40#include "NotImplemented.h"
41#include "OverconstrainedError.h"
42#include "Page.h"
43#include "RealtimeMediaSourceCenter.h"
44#include "ScriptExecutionContext.h"
45#include <wtf/CompletionHandler.h>
46#include <wtf/IsoMallocInlines.h>
47#include <wtf/NeverDestroyed.h>
48
49namespace WebCore {
50
51WTF_MAKE_ISO_ALLOCATED_IMPL(MediaStreamTrack);
52
53Ref<MediaStreamTrack> MediaStreamTrack::create(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
54{
55 return adoptRef(*new MediaStreamTrack(context, WTFMove(privateTrack)));
56}
57
58MediaStreamTrack::MediaStreamTrack(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
59 : ActiveDOMObject(&context)
60 , m_private(WTFMove(privateTrack))
61#if !RELEASE_LOG_DISABLED
62 , m_logger(document()->logger())
63 , m_logIdentifier(uniqueLogIdentifier())
64#endif
65 , m_taskQueue(context)
66{
67 ALWAYS_LOG(LOGIDENTIFIER);
68 suspendIfNeeded();
69
70#if !RELEASE_LOG_DISABLED
71 m_private->setLogger(logger(), logIdentifier());
72#endif
73 m_private->addObserver(*this);
74
75 if (auto document = this->document()) {
76 document->addAudioProducer(*this);
77 if (isCaptureTrack() && document->page() && document->page()->mutedState())
78 setMuted(document->page()->mutedState());
79 }
80}
81
82MediaStreamTrack::~MediaStreamTrack()
83{
84 m_private->removeObserver(*this);
85
86 if (auto document = this->document())
87 document->removeAudioProducer(*this);
88}
89
90const AtomicString& MediaStreamTrack::kind() const
91{
92 static NeverDestroyed<AtomicString> audioKind("audio", AtomicString::ConstructFromLiteral);
93 static NeverDestroyed<AtomicString> videoKind("video", AtomicString::ConstructFromLiteral);
94
95 if (m_private->type() == RealtimeMediaSource::Type::Audio)
96 return audioKind;
97 return videoKind;
98}
99
100const String& MediaStreamTrack::id() const
101{
102 return m_private->id();
103}
104
105const String& MediaStreamTrack::label() const
106{
107 return m_private->label();
108}
109
110const AtomicString& MediaStreamTrack::contentHint() const
111{
112 static NeverDestroyed<const AtomicString> speechHint("speech", AtomicString::ConstructFromLiteral);
113 static NeverDestroyed<const AtomicString> musicHint("music", AtomicString::ConstructFromLiteral);
114 static NeverDestroyed<const AtomicString> detailHint("detail", AtomicString::ConstructFromLiteral);
115 static NeverDestroyed<const AtomicString> textHint("text", AtomicString::ConstructFromLiteral);
116 static NeverDestroyed<const AtomicString> motionHint("motion", AtomicString::ConstructFromLiteral);
117
118 switch (m_private->contentHint()) {
119 case MediaStreamTrackPrivate::HintValue::Empty:
120 return emptyAtom();
121 case MediaStreamTrackPrivate::HintValue::Speech:
122 return speechHint;
123 case MediaStreamTrackPrivate::HintValue::Music:
124 return musicHint;
125 case MediaStreamTrackPrivate::HintValue::Motion:
126 return motionHint;
127 case MediaStreamTrackPrivate::HintValue::Detail:
128 return detailHint;
129 case MediaStreamTrackPrivate::HintValue::Text:
130 return textHint;
131 default:
132 return emptyAtom();
133 }
134}
135
136void MediaStreamTrack::setContentHint(const String& hintValue)
137{
138 MediaStreamTrackPrivate::HintValue value;
139 if (m_private->type() == RealtimeMediaSource::Type::Audio) {
140 if (hintValue == "")
141 value = MediaStreamTrackPrivate::HintValue::Empty;
142 else if (hintValue == "speech")
143 value = MediaStreamTrackPrivate::HintValue::Speech;
144 else if (hintValue == "music")
145 value = MediaStreamTrackPrivate::HintValue::Music;
146 else
147 return;
148 } else {
149 if (hintValue == "")
150 value = MediaStreamTrackPrivate::HintValue::Empty;
151 else if (hintValue == "detail")
152 value = MediaStreamTrackPrivate::HintValue::Detail;
153 else if (hintValue == "motion")
154 value = MediaStreamTrackPrivate::HintValue::Motion;
155 else if (hintValue == "text")
156 value = MediaStreamTrackPrivate::HintValue::Text;
157 else
158 return;
159 }
160 m_private->setContentHint(value);
161}
162
163bool MediaStreamTrack::enabled() const
164{
165 return m_private->enabled();
166}
167
168void MediaStreamTrack::setEnabled(bool enabled)
169{
170 m_private->setEnabled(enabled);
171}
172
173bool MediaStreamTrack::muted() const
174{
175 return m_private->muted();
176}
177
178void MediaStreamTrack::setMuted(MediaProducer::MutedStateFlags state)
179{
180 bool trackMuted = false;
181 switch (source().deviceType()) {
182 case CaptureDevice::DeviceType::Microphone:
183 case CaptureDevice::DeviceType::Camera:
184 trackMuted = state & AudioAndVideoCaptureIsMuted;
185 break;
186 case CaptureDevice::DeviceType::Screen:
187 case CaptureDevice::DeviceType::Window:
188 trackMuted = state & ScreenCaptureIsMuted;
189 break;
190 case CaptureDevice::DeviceType::Unknown:
191 ASSERT_NOT_REACHED();
192 break;
193 }
194
195 m_private->setMuted(trackMuted);
196}
197
198auto MediaStreamTrack::readyState() const -> State
199{
200 return ended() ? State::Ended : State::Live;
201}
202
203bool MediaStreamTrack::ended() const
204{
205 return m_ended || m_private->ended();
206}
207
208RefPtr<MediaStreamTrack> MediaStreamTrack::clone()
209{
210 if (!scriptExecutionContext())
211 return nullptr;
212
213 return MediaStreamTrack::create(*scriptExecutionContext(), m_private->clone());
214}
215
216void MediaStreamTrack::stopTrack(StopMode mode)
217{
218 // NOTE: this method is called when the "stop" method is called from JS, using the "ImplementedAs" IDL attribute.
219 // This is done because ActiveDOMObject requires a "stop" method.
220
221 if (ended())
222 return;
223
224 // An 'ended' event is not posted if m_ended is true when trackEnded is called, so set it now if we are
225 // not supposed to post the event.
226 if (mode == StopMode::Silently)
227 m_ended = true;
228
229 m_private->endTrack();
230 m_ended = true;
231
232 configureTrackRendering();
233}
234
235MediaStreamTrack::TrackSettings MediaStreamTrack::getSettings() const
236{
237 auto& settings = m_private->settings();
238 TrackSettings result;
239 if (settings.supportsWidth())
240 result.width = settings.width();
241 if (settings.supportsHeight())
242 result.height = settings.height();
243 if (settings.supportsAspectRatio() && settings.aspectRatio()) // FIXME: Why the check for zero here?
244 result.aspectRatio = settings.aspectRatio();
245 if (settings.supportsFrameRate())
246 result.frameRate = settings.frameRate();
247 if (settings.supportsFacingMode())
248 result.facingMode = RealtimeMediaSourceSettings::facingMode(settings.facingMode());
249 if (settings.supportsVolume())
250 result.volume = settings.volume();
251 if (settings.supportsSampleRate())
252 result.sampleRate = settings.sampleRate();
253 if (settings.supportsSampleSize())
254 result.sampleSize = settings.sampleSize();
255 if (settings.supportsEchoCancellation())
256 result.echoCancellation = settings.echoCancellation();
257 if (settings.supportsDeviceId())
258 result.deviceId = settings.deviceId();
259 if (settings.supportsGroupId())
260 result.groupId = settings.groupId();
261
262 // FIXME: shouldn't this include displaySurface and logicalSurface?
263
264 return result;
265}
266
267static DoubleRange capabilityDoubleRange(const CapabilityValueOrRange& value)
268{
269 DoubleRange range;
270 switch (value.type()) {
271 case CapabilityValueOrRange::Double:
272 range.min = value.value().asDouble;
273 range.max = range.min;
274 break;
275 case CapabilityValueOrRange::DoubleRange:
276 range.min = value.rangeMin().asDouble;
277 range.max = value.rangeMax().asDouble;
278 break;
279 case CapabilityValueOrRange::Undefined:
280 case CapabilityValueOrRange::ULong:
281 case CapabilityValueOrRange::ULongRange:
282 ASSERT_NOT_REACHED();
283 }
284 return range;
285}
286
287static LongRange capabilityIntRange(const CapabilityValueOrRange& value)
288{
289 LongRange range;
290 switch (value.type()) {
291 case CapabilityValueOrRange::ULong:
292 range.min = value.value().asInt;
293 range.max = range.min;
294 break;
295 case CapabilityValueOrRange::ULongRange:
296 range.min = value.rangeMin().asInt;
297 range.max = value.rangeMax().asInt;
298 break;
299 case CapabilityValueOrRange::Undefined:
300 case CapabilityValueOrRange::Double:
301 case CapabilityValueOrRange::DoubleRange:
302 ASSERT_NOT_REACHED();
303 }
304 return range;
305}
306
307static Vector<String> capabilityStringVector(const Vector<RealtimeMediaSourceSettings::VideoFacingMode>& modes)
308{
309 Vector<String> result;
310 result.reserveCapacity(modes.size());
311 for (auto& mode : modes)
312 result.uncheckedAppend(RealtimeMediaSourceSettings::facingMode(mode));
313 return result;
314}
315
316static Vector<bool> capabilityBooleanVector(RealtimeMediaSourceCapabilities::EchoCancellation cancellation)
317{
318 Vector<bool> result;
319 result.reserveCapacity(2);
320 result.uncheckedAppend(true);
321 result.uncheckedAppend(cancellation == RealtimeMediaSourceCapabilities::EchoCancellation::ReadWrite);
322 return result;
323}
324
325MediaStreamTrack::TrackCapabilities MediaStreamTrack::getCapabilities() const
326{
327 auto capabilities = m_private->capabilities();
328 TrackCapabilities result;
329 if (capabilities.supportsWidth())
330 result.width = capabilityIntRange(capabilities.width());
331 if (capabilities.supportsHeight())
332 result.height = capabilityIntRange(capabilities.height());
333 if (capabilities.supportsAspectRatio())
334 result.aspectRatio = capabilityDoubleRange(capabilities.aspectRatio());
335 if (capabilities.supportsFrameRate())
336 result.frameRate = capabilityDoubleRange(capabilities.frameRate());
337 if (capabilities.supportsFacingMode())
338 result.facingMode = capabilityStringVector(capabilities.facingMode());
339 if (capabilities.supportsVolume())
340 result.volume = capabilityDoubleRange(capabilities.volume());
341 if (capabilities.supportsSampleRate())
342 result.sampleRate = capabilityIntRange(capabilities.sampleRate());
343 if (capabilities.supportsSampleSize())
344 result.sampleSize = capabilityIntRange(capabilities.sampleSize());
345 if (capabilities.supportsEchoCancellation())
346 result.echoCancellation = capabilityBooleanVector(capabilities.echoCancellation());
347 if (capabilities.supportsDeviceId())
348 result.deviceId = capabilities.deviceId();
349 if (capabilities.supportsGroupId())
350 result.groupId = capabilities.groupId();
351 return result;
352}
353
354static MediaConstraints createMediaConstraints(const Optional<MediaTrackConstraints>& constraints)
355{
356 if (!constraints) {
357 MediaConstraints validConstraints;
358 validConstraints.isValid = true;
359 return validConstraints;
360 }
361 return createMediaConstraints(constraints.value());
362}
363
364void MediaStreamTrack::applyConstraints(const Optional<MediaTrackConstraints>& constraints, DOMPromiseDeferred<void>&& promise)
365{
366 m_promise = WTFMove(promise);
367
368 auto completionHandler = [this, weakThis = makeWeakPtr(*this), constraints](auto&& error) mutable {
369 if (!weakThis || !m_promise)
370 return;
371 if (error) {
372 m_promise->rejectType<IDLInterface<OverconstrainedError>>(OverconstrainedError::create(WTFMove(error->badConstraint), WTFMove(error->message)));
373 return;
374 }
375 m_promise->resolve();
376 m_constraints = constraints.valueOr(MediaTrackConstraints { });
377 };
378 m_private->applyConstraints(createMediaConstraints(constraints), WTFMove(completionHandler));
379}
380
381void MediaStreamTrack::addObserver(Observer& observer)
382{
383 m_observers.append(&observer);
384}
385
386void MediaStreamTrack::removeObserver(Observer& observer)
387{
388 m_observers.removeFirst(&observer);
389}
390
391void MediaStreamTrack::pageMutedStateDidChange()
392{
393 if (m_ended || !isCaptureTrack())
394 return;
395
396 Document* document = this->document();
397 if (!document || !document->page())
398 return;
399
400 setMuted(document->page()->mutedState());
401}
402
403MediaProducer::MediaStateFlags MediaStreamTrack::mediaState() const
404{
405 if (m_ended || !isCaptureTrack())
406 return IsNotPlaying;
407
408 Document* document = this->document();
409 if (!document || !document->page())
410 return IsNotPlaying;
411
412 if (source().type() == RealtimeMediaSource::Type::Audio) {
413 if (source().interrupted() && !source().muted())
414 return HasInterruptedAudioCaptureDevice;
415 if (muted())
416 return HasMutedAudioCaptureDevice;
417 if (m_private->isProducingData())
418 return HasActiveAudioCaptureDevice;
419 } else {
420 auto deviceType = source().deviceType();
421 ASSERT(deviceType == CaptureDevice::DeviceType::Camera || deviceType == CaptureDevice::DeviceType::Screen || deviceType == CaptureDevice::DeviceType::Window);
422 if (source().interrupted() && !source().muted())
423 return deviceType == CaptureDevice::DeviceType::Camera ? HasInterruptedVideoCaptureDevice : HasInterruptedDisplayCaptureDevice;
424 if (muted())
425 return deviceType == CaptureDevice::DeviceType::Camera ? HasMutedVideoCaptureDevice : HasMutedDisplayCaptureDevice;
426 if (m_private->isProducingData())
427 return deviceType == CaptureDevice::DeviceType::Camera ? HasActiveVideoCaptureDevice : HasActiveDisplayCaptureDevice;
428 }
429
430 return IsNotPlaying;
431}
432
433void MediaStreamTrack::trackStarted(MediaStreamTrackPrivate&)
434{
435 configureTrackRendering();
436}
437
438void MediaStreamTrack::trackEnded(MediaStreamTrackPrivate&)
439{
440 // http://w3c.github.io/mediacapture-main/#life-cycle
441 // When a MediaStreamTrack track ends for any reason other than the stop() method being invoked, the User Agent must queue a task that runs the following steps:
442 // 1. If the track's readyState attribute has the value ended already, then abort these steps.
443 if (m_ended)
444 return;
445
446 // 2. Set track's readyState attribute to ended.
447 m_ended = true;
448
449 if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped())
450 return;
451
452 // 3. Notify track's source that track is ended so that the source may be stopped, unless other MediaStreamTrack objects depend on it.
453 // 4. Fire a simple event named ended at the object.
454 dispatchEvent(Event::create(eventNames().endedEvent, Event::CanBubble::No, Event::IsCancelable::No));
455
456 for (auto& observer : m_observers)
457 observer->trackDidEnd();
458
459 configureTrackRendering();
460}
461
462void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&)
463{
464 if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped() || m_ended)
465 return;
466
467 m_eventTaskQueue.enqueueTask([this, muted = this->muted()] {
468 AtomicString eventType = muted ? eventNames().muteEvent : eventNames().unmuteEvent;
469 dispatchEvent(Event::create(eventType, Event::CanBubble::No, Event::IsCancelable::No));
470 });
471
472 configureTrackRendering();
473}
474
475void MediaStreamTrack::trackSettingsChanged(MediaStreamTrackPrivate&)
476{
477 configureTrackRendering();
478}
479
480void MediaStreamTrack::trackEnabledChanged(MediaStreamTrackPrivate&)
481{
482 configureTrackRendering();
483}
484
485void MediaStreamTrack::configureTrackRendering()
486{
487 m_taskQueue.enqueueTask([this] {
488 if (auto document = this->document())
489 document->updateIsPlayingMedia();
490 });
491
492 // 4.3.1
493 // ... media from the source only flows when a MediaStreamTrack object is both unmuted and enabled
494}
495
496void MediaStreamTrack::stop()
497{
498 stopTrack();
499 m_taskQueue.close();
500}
501
502const char* MediaStreamTrack::activeDOMObjectName() const
503{
504 return "MediaStreamTrack";
505}
506
507bool MediaStreamTrack::canSuspendForDocumentSuspension() const
508{
509 return !hasPendingActivity();
510}
511
512bool MediaStreamTrack::hasPendingActivity() const
513{
514 return !m_ended;
515}
516
517AudioSourceProvider* MediaStreamTrack::audioSourceProvider()
518{
519 return m_private->audioSourceProvider();
520}
521
522Document* MediaStreamTrack::document() const
523{
524 return downcast<Document>(scriptExecutionContext());
525}
526
527#if !RELEASE_LOG_DISABLED
528WTFLogChannel& MediaStreamTrack::logChannel() const
529{
530 return LogWebRTC;
531}
532#endif
533
534} // namespace WebCore
535
536#endif // ENABLE(MEDIA_STREAM)
537