MediaStreamNote
MediaStream from getUserMedia
Call flow of getUserMedia:
Navigator::MozGetUserMedia(const MediaStreamConstraints& aConstraints,
NavigatorUserMediaSuccessCallback& aOnSuccess,
NavigatorUserMediaErrorCallback& aOnError,
ErrorResult& aRv)
{
...
MediaManager* manager = MediaManager::Get();
aRv = manager->GetUserMedia(privileged, mWindow, aConstraints,
onsuccess, onerror);
}
nsresult
MediaManager::GetUserMedia(bool aPrivileged,
nsPIDOMWindow* aWindow, const MediaStreamConstraints& aConstraints,
nsIDOMGetUserMediaSuccessCallback* aOnSuccess,
nsIDOMGetUserMediaErrorCallback* aOnError)
{
...
StreamListeners* listeners = GetActiveWindows()->Get(windowID);
if (!listeners) {
listeners = new StreamListeners;
GetActiveWindows()->Put(windowID, listeners);
}
...
// Create a disabled listener to act as a placeholder
GetUserMediaCallbackMediaStreamListener* listener =
new GetUserMediaCallbackMediaStreamListener(mediaThread, windowID);
// No need for locking because we always do this in the main thread.
listeners->AppendElement(listener);
...
// Pass callbacks and MediaStreamListener along to GetUserMediaRunnable.
nsRefPtr<GetUserMediaRunnable> runnable;
if (c.mFake) {
// Fake stream from default backend.
runnable = new GetUserMediaRunnable(c, onSuccess.forget(),
onError.forget(), windowID, listener, mPrefs, new MediaEngineDefault());
} else {
// Stream from default device from WebRTC backend.
runnable = new GetUserMediaRunnable(c, onSuccess.forget(),
onError.forget(), windowID, listener, mPrefs);
}
...
#ifdef MOZ_B2G_CAMERA
if (mCameraManager == nullptr) {
mCameraManager = nsDOMCameraManager::CreateInstance(aWindow);
}
#endif
...
if (aPrivileged ||
(c.mFake && !Preferences::GetBool("media.navigator.permission.fake"))) {
mMediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
} ...
}
NS_IMETHOD
GetUserMediaRunnable::Run()
{
MediaEngine* backend = mBackend;
// Was a backend provided?
if (!backend) {
// backend will become MediaEngineWebRTC.
backend = mManager->GetBackend(mWindowID);
}
// Was a device provided?
if (!mDeviceChosen) {
// SelectDevice will call void MediaEngineWebRTC::EnumerateVideoDevices(MediaSourceType aMediaSource,
// nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources).
// It will new and append MediaEngineWebRTCVideoSource into aVSources.
// Then pick the first devive.
nsresult rv = SelectDevice(backend);
if (rv != NS_OK) {
return rv;
}
}
...
ProcessGetUserMedia(((IsOn(mConstraints.mAudio) && mAudioDevice) ?
mAudioDevice->GetSource() : nullptr),
((IsOn(mConstraints.mVideo) && mVideoDevice) ?
mVideoDevice->GetSource() : nullptr));
return NS_OK;
}
void
ProcessGetUserMedia(MediaEngineAudioSource* aAudioSource,
MediaEngineVideoSource* aVideoSource)
{
...
if (aVideoSource) {
rv = aVideoSource->Allocate(GetInvariant(mConstraints.mVideo), mPrefs);
...
}
...
NS_DispatchToMainThread(new GetUserMediaStreamRunnable(
mSuccess, mError, mWindowID, mListener, aAudioSource, aVideoSource,
peerIdentity
));
...
}
NS_IMETHOD
GetUserMediaStreamRunnable::Run()
{
...
// Create a media stream.
nsRefPtr<nsDOMUserMediaStream> trackunion =
nsDOMUserMediaStream::CreateTrackUnionStream(window, mAudioSource,
mVideoSource);
...
MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
nsRefPtr<SourceMediaStream> stream = gm->CreateSourceStream(nullptr);
...
trackunion->mSourceStream = stream;
...
// The listener was added at the beginning in an inactive state.
// Activate our listener. We'll call Start() on the source when get a callback
// that the MediaStream has started consuming. The listener is freed
// when the page is invalidated (on navigation or close).
mListener->Activate(stream.forget(), mAudioSource, mVideoSource);
...
// This will call to nsresult MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
nsRefPtr<MediaOperationRunnable> runnable(
new MediaOperationRunnable(MEDIA_START, mListener, trackunion,
tracksAvailableCallback,
mAudioSource, mVideoSource, false, mWindowID,
mError.forget()));
mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
// We won't need mError now.
mError = nullptr;
return NS_OK;
}
SourceMediaStream*
MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper)
{
SourceMediaStream* stream = new SourceMediaStream(aWrapper);
NS_ADDREF(stream);
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
stream->SetGraphImpl(graph);
graph->AppendMessage(new CreateMessage(stream));
return stream;
}
virtual void CreateMessage::Run() MOZ_OVERRIDE
{
mStream->GraphImpl()->AddStream(mStream); // It will call mStreams.AppendElement(aStream);
mStream->Init();
}
MediaStreamGraphImpl::RunThread
void
MediaStreamGraphImpl::RunThread()
{
...
// Update mCurrentTime to the min of the playing audio times, or using the
// wall-clock time change if no audio is playing.
UpdateCurrentTime();
...
// Grab pending stream input.
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
SourceMediaStream* is = mStreams[i]->AsSourceStream();
if (is) {
UpdateConsumptionState(is);
ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration);
}
}
...
// Figure out what each stream wants to do
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaStream* stream = mStreams[i];
if (!doneAllProducing) {
ProcessedMediaStream* ps = stream->AsProcessedStream();
if (ps) {
AudioNodeStream* n = stream->AsAudioNodeStream();
if (n) {
// Since an AudioNodeStream is present, go ahead and
// produce audio block by block for all the rest of the streams.
ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime);
ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime);
doneAllProducing = true;
} else {
ps->ProcessInput(prevComputedTime, mStateComputedTime,
ProcessedMediaStream::ALLOW_FINISH);
NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
GraphTimeToStreamTime(stream, mStateComputedTime),
"Stream did not produce enough data");
}
}
}
NotifyHasCurrentData(stream);
if (mRealtime) {
// Only playback audio and video in real-time mode
CreateOrDestroyAudioStreams(prevComputedTime, stream);
TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime);
if (!ticksPlayed) {
ticksPlayed = ticksPlayedForThisStream;
} else {
MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
"Each stream should have the same number of frame.");
}
PlayVideo(stream);
}
SourceMediaStream* is = stream->AsSourceStream();
if (is) {
UpdateBufferSufficiencyState(is);
}
GraphTime end;
if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
allBlockedForever = false;
}
}
}