1/*
2 * Copyright (C) 2011 Google Inc. All rights reserved.
3 * Copyright (C) 2011, 2015 Ericsson AB. All rights reserved.
4 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
5 * Copyright (C) 2013 Nokia Corporation and/or its subsidiary(-ies).
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
20 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
23 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "config.h"
29#include "MediaStreamTrack.h"
30
31#if ENABLE(MEDIA_STREAM)
32
33#include "Document.h"
34#include "Event.h"
35#include "EventNames.h"
36#include "JSOverconstrainedError.h"
37#include "MediaConstraints.h"
38#include "MediaStream.h"
39#include "MediaStreamPrivate.h"
40#include "NotImplemented.h"
41#include "OverconstrainedError.h"
42#include "Page.h"
43#include "RealtimeMediaSourceCenter.h"
44#include "ScriptExecutionContext.h"
45#include <wtf/CompletionHandler.h>
46#include <wtf/IsoMallocInlines.h>
47#include <wtf/NeverDestroyed.h>
48
49namespace WebCore {
50
51WTF_MAKE_ISO_ALLOCATED_IMPL(MediaStreamTrack);
52
53Ref<MediaStreamTrack> MediaStreamTrack::create(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
54{
55 return adoptRef(*new MediaStreamTrack(context, WTFMove(privateTrack)));
56}
57
58MediaStreamTrack::MediaStreamTrack(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
59 : ActiveDOMObject(&context)
60 , m_private(WTFMove(privateTrack))
61 , m_taskQueue(context)
62 , m_isCaptureTrack(m_private->isCaptureTrack())
63{
64 ALWAYS_LOG(LOGIDENTIFIER);
65 suspendIfNeeded();
66
67 m_private->addObserver(*this);
68
69 if (auto document = this->document()) {
70 document->addAudioProducer(*this);
71 if (isCaptureTrack() && document->page() && document->page()->mutedState())
72 setMuted(document->page()->mutedState());
73 }
74}
75
76MediaStreamTrack::~MediaStreamTrack()
77{
78 m_private->removeObserver(*this);
79
80 if (auto document = this->document())
81 document->removeAudioProducer(*this);
82}
83
84const AtomString& MediaStreamTrack::kind() const
85{
86 static NeverDestroyed<AtomString> audioKind("audio", AtomString::ConstructFromLiteral);
87 static NeverDestroyed<AtomString> videoKind("video", AtomString::ConstructFromLiteral);
88
89 if (m_private->type() == RealtimeMediaSource::Type::Audio)
90 return audioKind;
91 return videoKind;
92}
93
94const String& MediaStreamTrack::id() const
95{
96 return m_private->id();
97}
98
99const String& MediaStreamTrack::label() const
100{
101 return m_private->label();
102}
103
104const AtomString& MediaStreamTrack::contentHint() const
105{
106 static NeverDestroyed<const AtomString> speechHint("speech", AtomString::ConstructFromLiteral);
107 static NeverDestroyed<const AtomString> musicHint("music", AtomString::ConstructFromLiteral);
108 static NeverDestroyed<const AtomString> detailHint("detail", AtomString::ConstructFromLiteral);
109 static NeverDestroyed<const AtomString> textHint("text", AtomString::ConstructFromLiteral);
110 static NeverDestroyed<const AtomString> motionHint("motion", AtomString::ConstructFromLiteral);
111
112 switch (m_private->contentHint()) {
113 case MediaStreamTrackPrivate::HintValue::Empty:
114 return emptyAtom();
115 case MediaStreamTrackPrivate::HintValue::Speech:
116 return speechHint;
117 case MediaStreamTrackPrivate::HintValue::Music:
118 return musicHint;
119 case MediaStreamTrackPrivate::HintValue::Motion:
120 return motionHint;
121 case MediaStreamTrackPrivate::HintValue::Detail:
122 return detailHint;
123 case MediaStreamTrackPrivate::HintValue::Text:
124 return textHint;
125 default:
126 return emptyAtom();
127 }
128}
129
130void MediaStreamTrack::setContentHint(const String& hintValue)
131{
132 MediaStreamTrackPrivate::HintValue value;
133 if (m_private->type() == RealtimeMediaSource::Type::Audio) {
134 if (hintValue == "")
135 value = MediaStreamTrackPrivate::HintValue::Empty;
136 else if (hintValue == "speech")
137 value = MediaStreamTrackPrivate::HintValue::Speech;
138 else if (hintValue == "music")
139 value = MediaStreamTrackPrivate::HintValue::Music;
140 else
141 return;
142 } else {
143 if (hintValue == "")
144 value = MediaStreamTrackPrivate::HintValue::Empty;
145 else if (hintValue == "detail")
146 value = MediaStreamTrackPrivate::HintValue::Detail;
147 else if (hintValue == "motion")
148 value = MediaStreamTrackPrivate::HintValue::Motion;
149 else if (hintValue == "text")
150 value = MediaStreamTrackPrivate::HintValue::Text;
151 else
152 return;
153 }
154 m_private->setContentHint(value);
155}
156
157bool MediaStreamTrack::enabled() const
158{
159 return m_private->enabled();
160}
161
162void MediaStreamTrack::setEnabled(bool enabled)
163{
164 m_private->setEnabled(enabled);
165}
166
167bool MediaStreamTrack::muted() const
168{
169 return m_private->muted();
170}
171
172void MediaStreamTrack::setMuted(MediaProducer::MutedStateFlags state)
173{
174 bool trackMuted = false;
175 switch (source().deviceType()) {
176 case CaptureDevice::DeviceType::Microphone:
177 case CaptureDevice::DeviceType::Camera:
178 trackMuted = state & AudioAndVideoCaptureIsMuted;
179 break;
180 case CaptureDevice::DeviceType::Screen:
181 case CaptureDevice::DeviceType::Window:
182 trackMuted = state & ScreenCaptureIsMuted;
183 break;
184 case CaptureDevice::DeviceType::Unknown:
185 ASSERT_NOT_REACHED();
186 break;
187 }
188
189 m_private->setMuted(trackMuted);
190}
191
192auto MediaStreamTrack::readyState() const -> State
193{
194 return ended() ? State::Ended : State::Live;
195}
196
197bool MediaStreamTrack::ended() const
198{
199 return m_ended || m_private->ended();
200}
201
202RefPtr<MediaStreamTrack> MediaStreamTrack::clone()
203{
204 if (!scriptExecutionContext())
205 return nullptr;
206
207 return MediaStreamTrack::create(*scriptExecutionContext(), m_private->clone());
208}
209
210void MediaStreamTrack::stopTrack(StopMode mode)
211{
212 // NOTE: this method is called when the "stop" method is called from JS, using the "ImplementedAs" IDL attribute.
213 // This is done because ActiveDOMObject requires a "stop" method.
214
215 if (ended())
216 return;
217
218 // An 'ended' event is not posted if m_ended is true when trackEnded is called, so set it now if we are
219 // not supposed to post the event.
220 if (mode == StopMode::Silently)
221 m_ended = true;
222
223 m_private->endTrack();
224 m_ended = true;
225
226 configureTrackRendering();
227}
228
229MediaStreamTrack::TrackSettings MediaStreamTrack::getSettings() const
230{
231 auto& settings = m_private->settings();
232 TrackSettings result;
233 if (settings.supportsWidth())
234 result.width = settings.width();
235 if (settings.supportsHeight())
236 result.height = settings.height();
237 if (settings.supportsAspectRatio() && settings.aspectRatio()) // FIXME: Why the check for zero here?
238 result.aspectRatio = settings.aspectRatio();
239 if (settings.supportsFrameRate())
240 result.frameRate = settings.frameRate();
241 if (settings.supportsFacingMode())
242 result.facingMode = RealtimeMediaSourceSettings::facingMode(settings.facingMode());
243 if (settings.supportsVolume())
244 result.volume = settings.volume();
245 if (settings.supportsSampleRate())
246 result.sampleRate = settings.sampleRate();
247 if (settings.supportsSampleSize())
248 result.sampleSize = settings.sampleSize();
249 if (settings.supportsEchoCancellation())
250 result.echoCancellation = settings.echoCancellation();
251 if (settings.supportsDeviceId())
252 result.deviceId = settings.deviceId();
253 if (settings.supportsGroupId())
254 result.groupId = settings.groupId();
255
256 // FIXME: shouldn't this include displaySurface and logicalSurface?
257
258 return result;
259}
260
261static DoubleRange capabilityDoubleRange(const CapabilityValueOrRange& value)
262{
263 DoubleRange range;
264 switch (value.type()) {
265 case CapabilityValueOrRange::Double:
266 range.min = value.value().asDouble;
267 range.max = range.min;
268 break;
269 case CapabilityValueOrRange::DoubleRange:
270 range.min = value.rangeMin().asDouble;
271 range.max = value.rangeMax().asDouble;
272 break;
273 case CapabilityValueOrRange::Undefined:
274 case CapabilityValueOrRange::ULong:
275 case CapabilityValueOrRange::ULongRange:
276 ASSERT_NOT_REACHED();
277 }
278 return range;
279}
280
281static LongRange capabilityIntRange(const CapabilityValueOrRange& value)
282{
283 LongRange range;
284 switch (value.type()) {
285 case CapabilityValueOrRange::ULong:
286 range.min = value.value().asInt;
287 range.max = range.min;
288 break;
289 case CapabilityValueOrRange::ULongRange:
290 range.min = value.rangeMin().asInt;
291 range.max = value.rangeMax().asInt;
292 break;
293 case CapabilityValueOrRange::Undefined:
294 case CapabilityValueOrRange::Double:
295 case CapabilityValueOrRange::DoubleRange:
296 ASSERT_NOT_REACHED();
297 }
298 return range;
299}
300
301static Vector<String> capabilityStringVector(const Vector<RealtimeMediaSourceSettings::VideoFacingMode>& modes)
302{
303 Vector<String> result;
304 result.reserveCapacity(modes.size());
305 for (auto& mode : modes)
306 result.uncheckedAppend(RealtimeMediaSourceSettings::facingMode(mode));
307 return result;
308}
309
310static Vector<bool> capabilityBooleanVector(RealtimeMediaSourceCapabilities::EchoCancellation cancellation)
311{
312 Vector<bool> result;
313 result.reserveCapacity(2);
314 result.uncheckedAppend(true);
315 result.uncheckedAppend(cancellation == RealtimeMediaSourceCapabilities::EchoCancellation::ReadWrite);
316 return result;
317}
318
319MediaStreamTrack::TrackCapabilities MediaStreamTrack::getCapabilities() const
320{
321 auto capabilities = m_private->capabilities();
322 TrackCapabilities result;
323 if (capabilities.supportsWidth())
324 result.width = capabilityIntRange(capabilities.width());
325 if (capabilities.supportsHeight())
326 result.height = capabilityIntRange(capabilities.height());
327 if (capabilities.supportsAspectRatio())
328 result.aspectRatio = capabilityDoubleRange(capabilities.aspectRatio());
329 if (capabilities.supportsFrameRate())
330 result.frameRate = capabilityDoubleRange(capabilities.frameRate());
331 if (capabilities.supportsFacingMode())
332 result.facingMode = capabilityStringVector(capabilities.facingMode());
333 if (capabilities.supportsVolume())
334 result.volume = capabilityDoubleRange(capabilities.volume());
335 if (capabilities.supportsSampleRate())
336 result.sampleRate = capabilityIntRange(capabilities.sampleRate());
337 if (capabilities.supportsSampleSize())
338 result.sampleSize = capabilityIntRange(capabilities.sampleSize());
339 if (capabilities.supportsEchoCancellation())
340 result.echoCancellation = capabilityBooleanVector(capabilities.echoCancellation());
341 if (capabilities.supportsDeviceId())
342 result.deviceId = capabilities.deviceId();
343 if (capabilities.supportsGroupId())
344 result.groupId = capabilities.groupId();
345 return result;
346}
347
348static MediaConstraints createMediaConstraints(const Optional<MediaTrackConstraints>& constraints)
349{
350 if (!constraints) {
351 MediaConstraints validConstraints;
352 validConstraints.isValid = true;
353 return validConstraints;
354 }
355 return createMediaConstraints(constraints.value());
356}
357
358void MediaStreamTrack::applyConstraints(const Optional<MediaTrackConstraints>& constraints, DOMPromiseDeferred<void>&& promise)
359{
360 m_promise = WTFMove(promise);
361
362 auto completionHandler = [this, weakThis = makeWeakPtr(*this), constraints](auto&& error) mutable {
363 if (!weakThis || !m_promise)
364 return;
365 if (error) {
366 m_promise->rejectType<IDLInterface<OverconstrainedError>>(OverconstrainedError::create(WTFMove(error->badConstraint), WTFMove(error->message)));
367 return;
368 }
369 m_promise->resolve();
370 m_constraints = constraints.valueOr(MediaTrackConstraints { });
371 };
372 m_private->applyConstraints(createMediaConstraints(constraints), WTFMove(completionHandler));
373}
374
375void MediaStreamTrack::addObserver(Observer& observer)
376{
377 m_observers.append(&observer);
378}
379
380void MediaStreamTrack::removeObserver(Observer& observer)
381{
382 m_observers.removeFirst(&observer);
383}
384
385void MediaStreamTrack::pageMutedStateDidChange()
386{
387 if (m_ended || !isCaptureTrack())
388 return;
389
390 Document* document = this->document();
391 if (!document || !document->page())
392 return;
393
394 setMuted(document->page()->mutedState());
395}
396
397MediaProducer::MediaStateFlags MediaStreamTrack::mediaState() const
398{
399 if (m_ended || !isCaptureTrack())
400 return IsNotPlaying;
401
402 Document* document = this->document();
403 if (!document || !document->page())
404 return IsNotPlaying;
405
406 if (source().type() == RealtimeMediaSource::Type::Audio) {
407 if (source().interrupted() && !source().muted())
408 return HasInterruptedAudioCaptureDevice;
409 if (muted())
410 return HasMutedAudioCaptureDevice;
411 if (m_private->isProducingData())
412 return HasActiveAudioCaptureDevice;
413 } else {
414 auto deviceType = source().deviceType();
415 ASSERT(deviceType == CaptureDevice::DeviceType::Camera || deviceType == CaptureDevice::DeviceType::Screen || deviceType == CaptureDevice::DeviceType::Window);
416 if (source().interrupted() && !source().muted())
417 return deviceType == CaptureDevice::DeviceType::Camera ? HasInterruptedVideoCaptureDevice : HasInterruptedDisplayCaptureDevice;
418 if (muted())
419 return deviceType == CaptureDevice::DeviceType::Camera ? HasMutedVideoCaptureDevice : HasMutedDisplayCaptureDevice;
420 if (m_private->isProducingData())
421 return deviceType == CaptureDevice::DeviceType::Camera ? HasActiveVideoCaptureDevice : HasActiveDisplayCaptureDevice;
422 }
423
424 return IsNotPlaying;
425}
426
427void MediaStreamTrack::trackStarted(MediaStreamTrackPrivate&)
428{
429 configureTrackRendering();
430}
431
432void MediaStreamTrack::trackEnded(MediaStreamTrackPrivate&)
433{
434 // http://w3c.github.io/mediacapture-main/#life-cycle
435 // When a MediaStreamTrack track ends for any reason other than the stop() method being invoked, the User Agent must queue a task that runs the following steps:
436 // 1. If the track's readyState attribute has the value ended already, then abort these steps.
437 if (m_ended)
438 return;
439
440 // 2. Set track's readyState attribute to ended.
441 m_ended = true;
442
443 if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped())
444 return;
445
446 // 3. Notify track's source that track is ended so that the source may be stopped, unless other MediaStreamTrack objects depend on it.
447 // 4. Fire a simple event named ended at the object.
448 dispatchEvent(Event::create(eventNames().endedEvent, Event::CanBubble::No, Event::IsCancelable::No));
449
450 for (auto& observer : m_observers)
451 observer->trackDidEnd();
452
453 configureTrackRendering();
454}
455
456void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&)
457{
458 if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped() || m_ended)
459 return;
460
461 m_eventTaskQueue.enqueueTask([this, muted = this->muted()] {
462 AtomString eventType = muted ? eventNames().muteEvent : eventNames().unmuteEvent;
463 dispatchEvent(Event::create(eventType, Event::CanBubble::No, Event::IsCancelable::No));
464 });
465
466 configureTrackRendering();
467}
468
469void MediaStreamTrack::trackSettingsChanged(MediaStreamTrackPrivate&)
470{
471 configureTrackRendering();
472}
473
474void MediaStreamTrack::trackEnabledChanged(MediaStreamTrackPrivate&)
475{
476 configureTrackRendering();
477}
478
479void MediaStreamTrack::configureTrackRendering()
480{
481 m_taskQueue.enqueueTask([this] {
482 if (auto document = this->document())
483 document->updateIsPlayingMedia();
484 });
485
486 // 4.3.1
487 // ... media from the source only flows when a MediaStreamTrack object is both unmuted and enabled
488}
489
490void MediaStreamTrack::stop()
491{
492 stopTrack();
493 m_taskQueue.close();
494}
495
496const char* MediaStreamTrack::activeDOMObjectName() const
497{
498 return "MediaStreamTrack";
499}
500
501bool MediaStreamTrack::canSuspendForDocumentSuspension() const
502{
503 return !hasPendingActivity();
504}
505
506bool MediaStreamTrack::hasPendingActivity() const
507{
508 return !m_ended;
509}
510
511AudioSourceProvider* MediaStreamTrack::audioSourceProvider()
512{
513 return m_private->audioSourceProvider();
514}
515
516Document* MediaStreamTrack::document() const
517{
518 return downcast<Document>(scriptExecutionContext());
519}
520
521#if !RELEASE_LOG_DISABLED
522WTFLogChannel& MediaStreamTrack::logChannel() const
523{
524 return LogWebRTC;
525}
526#endif
527
528} // namespace WebCore
529
530#endif // ENABLE(MEDIA_STREAM)
531